Home
last modified time | relevance | path

Searched refs:aligned_addr (Results 1 – 6 of 6) sorted by relevance

/Zephyr-latest/drivers/cache/
Dcache_aspeed.c163 uint32_t aligned_addr, i, n; in cache_data_invd_range() local
177 n = get_n_cacheline((uint32_t)addr, size, &aligned_addr); in cache_data_invd_range()
181 syscon_write_reg(dev, CACHE_INVALID_REG, DCACHE_INVALID(aligned_addr)); in cache_data_invd_range()
182 aligned_addr += CACHE_LINE_SIZE; in cache_data_invd_range()
224 uint32_t aligned_addr, i, n; in cache_instr_invd_range() local
233 n = get_n_cacheline((uint32_t)addr, size, &aligned_addr); in cache_instr_invd_range()
242 syscon_write_reg(dev, CACHE_INVALID_REG, ICACHE_INVALID(aligned_addr)); in cache_instr_invd_range()
243 aligned_addr += CACHE_LINE_SIZE; in cache_instr_invd_range()
/Zephyr-latest/soc/intel/intel_adsp/common/include/
Dintel_adsp_hda.h171 uint32_t aligned_addr = addr & HDA_ALIGN_MASK; in intel_adsp_hda_set_buffer() local
174 __ASSERT(aligned_addr == addr, "Buffer must be 128 byte aligned"); in intel_adsp_hda_set_buffer()
186 __ASSERT(aligned_addr >= _INTEL_ADSP_BASE in intel_adsp_hda_set_buffer()
187 && aligned_addr < _INTEL_ADSP_BASE + _INTEL_ADSP_SIZE, in intel_adsp_hda_set_buffer()
189 __ASSERT(aligned_addr + aligned_size < _INTEL_ADSP_BASE + _INTEL_ADSP_SIZE, in intel_adsp_hda_set_buffer()
200 *DGBBA(base, regblock_size, sid) = aligned_addr; in intel_adsp_hda_set_buffer()
/Zephyr-latest/arch/xtensa/core/
Dmpu.c1012 uintptr_t aligned_addr; in arch_buffer_validate() local
1017 aligned_addr = ROUND_DOWN((uintptr_t)addr, XCHAL_MPU_ALIGN); in arch_buffer_validate()
1018 addr_offset = (uintptr_t)addr - aligned_addr; in arch_buffer_validate()
1023 uint32_t probed = xtensa_pptlb_probe(aligned_addr + offset); in arch_buffer_validate()
1083 uintptr_t aligned_addr; in xtensa_mem_kernel_has_access() local
1088 aligned_addr = ROUND_DOWN((uintptr_t)addr, XCHAL_MPU_ALIGN); in xtensa_mem_kernel_has_access()
1089 addr_offset = (uintptr_t)addr - aligned_addr; in xtensa_mem_kernel_has_access()
1094 uint32_t probed = xtensa_pptlb_probe(aligned_addr + offset); in xtensa_mem_kernel_has_access()
/Zephyr-latest/include/zephyr/kernel/
Dmm.h291 size_t k_mem_region_align(uintptr_t *aligned_addr, size_t *aligned_size,
/Zephyr-latest/tests/kernel/mem_protect/mem_map/src/
Dmain.c60 uintptr_t aligned_addr; in ZTEST() local
89 k_mem_region_align(&aligned_addr, &aligned_size, (uintptr_t)mapped_rw, in ZTEST()
94 sys_cache_data_flush_and_invd_range((void *)aligned_addr, aligned_size); in ZTEST()
/Zephyr-latest/kernel/
Dmmu.c1007 size_t k_mem_region_align(uintptr_t *aligned_addr, size_t *aligned_size, in k_mem_region_align() argument
1015 *aligned_addr = ROUND_DOWN(addr, align); in k_mem_region_align()
1016 addr_offset = addr - *aligned_addr; in k_mem_region_align()