Searched refs:aligned_addr (Results 1 – 4 of 4) sorted by relevance
/Zephyr-Core-3.5.0/drivers/cache/ |
D | cache_aspeed.c | 163 uint32_t aligned_addr, i, n; in cache_data_invd_range() local 177 n = get_n_cacheline((uint32_t)addr, size, &aligned_addr); in cache_data_invd_range() 181 syscon_write_reg(dev, CACHE_INVALID_REG, DCACHE_INVALID(aligned_addr)); in cache_data_invd_range() 182 aligned_addr += CACHE_LINE_SIZE; in cache_data_invd_range() 224 uint32_t aligned_addr, i, n; in cache_instr_invd_range() local 233 n = get_n_cacheline((uint32_t)addr, size, &aligned_addr); in cache_instr_invd_range() 242 syscon_write_reg(dev, CACHE_INVALID_REG, ICACHE_INVALID(aligned_addr)); in cache_instr_invd_range() 243 aligned_addr += CACHE_LINE_SIZE; in cache_instr_invd_range()
|
/Zephyr-Core-3.5.0/soc/xtensa/intel_adsp/common/include/ |
D | intel_adsp_hda.h | 160 uint32_t aligned_addr = addr & HDA_ALIGN_MASK; in intel_adsp_hda_set_buffer() local 163 __ASSERT(aligned_addr == addr, "Buffer must be 128 byte aligned"); in intel_adsp_hda_set_buffer() 164 __ASSERT(aligned_addr >= L2_SRAM_BASE in intel_adsp_hda_set_buffer() 165 && aligned_addr < L2_SRAM_BASE + L2_SRAM_SIZE, in intel_adsp_hda_set_buffer() 170 __ASSERT(aligned_addr + aligned_size < L2_SRAM_BASE + L2_SRAM_SIZE, in intel_adsp_hda_set_buffer() 181 *DGBBA(base, regblock_size, sid) = aligned_addr; in intel_adsp_hda_set_buffer()
|
/Zephyr-Core-3.5.0/include/zephyr/sys/ |
D | mem_manage.h | 379 size_t k_mem_region_align(uintptr_t *aligned_addr, size_t *aligned_size,
|
/Zephyr-Core-3.5.0/kernel/ |
D | mmu.c | 856 size_t k_mem_region_align(uintptr_t *aligned_addr, size_t *aligned_size, in k_mem_region_align() argument 864 *aligned_addr = ROUND_DOWN(addr, align); in k_mem_region_align() 865 addr_offset = addr - *aligned_addr; in k_mem_region_align()
|