Home
last modified time | relevance | path

Searched refs:CONFIG_MMU_PAGE_SIZE (Results 1 – 25 of 54) sorted by relevance

123

/Zephyr-latest/tests/arch/arm64/arm64_mmu/src/
Dmain.c92 size_t size = CONFIG_MMU_PAGE_SIZE; in ZTEST()
106 int table_entries = CONFIG_MMU_PAGE_SIZE / sizeof(uint64_t); in ZTEST()
107 size_t block_size = table_entries * CONFIG_MMU_PAGE_SIZE; in ZTEST()
124 int table_entries = CONFIG_MMU_PAGE_SIZE / sizeof(uint64_t); in ZTEST()
125 size_t block_size = table_entries * CONFIG_MMU_PAGE_SIZE; in ZTEST()
128 size_t size = block_size + CONFIG_MMU_PAGE_SIZE; in ZTEST()
143 int table_entries = CONFIG_MMU_PAGE_SIZE / sizeof(uint64_t); in ZTEST()
144 size_t block_size = table_entries * CONFIG_MMU_PAGE_SIZE; in ZTEST()
145 uintptr_t virt = (TEST_VIRT_ADDR & ~(block_size - 1)) - CONFIG_MMU_PAGE_SIZE; in ZTEST()
146 uintptr_t phys = (TEST_PHYS_ADDR & ~(block_size - 1)) - CONFIG_MMU_PAGE_SIZE; in ZTEST()
[all …]
/Zephyr-latest/tests/kernel/mem_protect/mem_map/src/
Dmain.c29 #define BUF_SIZE (CONFIG_MMU_PAGE_SIZE + 907)
32 #define TEST_PAGE_SZ ROUND_UP(BUF_OFFSET + BUF_SIZE, CONFIG_MMU_PAGE_SIZE)
35 static uint8_t __aligned(CONFIG_MMU_PAGE_SIZE) test_page[TEST_PAGE_SZ]; in __aligned() argument
90 BUF_SIZE, CONFIG_MMU_PAGE_SIZE); in ZTEST()
303 mapped = k_mem_map(CONFIG_MMU_PAGE_SIZE, K_MEM_PERM_RW); in ZTEST()
314 sys_cache_data_flush_and_invd_range((void *)mapped, CONFIG_MMU_PAGE_SIZE); in ZTEST()
318 for (i = 0; i < CONFIG_MMU_PAGE_SIZE; i++) { in ZTEST()
324 zassert_equal(free_mem, free_mem_after_map + CONFIG_MMU_PAGE_SIZE, in ZTEST()
328 (void)memset(mapped, '\xFF', CONFIG_MMU_PAGE_SIZE); in ZTEST()
331 sys_cache_data_flush_and_invd_range((void *)mapped, CONFIG_MMU_PAGE_SIZE); in ZTEST()
[all …]
/Zephyr-latest/include/zephyr/arch/x86/
Dthread_stack.h19 #define Z_X86_STACK_BASE_ALIGN CONFIG_MMU_PAGE_SIZE
32 #define Z_X86_STACK_SIZE_ALIGN CONFIG_MMU_PAGE_SIZE
91 char guard_page[CONFIG_MMU_PAGE_SIZE];
107 #define ARCH_KERNEL_STACK_RESERVED CONFIG_MMU_PAGE_SIZE
108 #define ARCH_KERNEL_STACK_OBJ_ALIGN CONFIG_MMU_PAGE_SIZE
/Zephyr-latest/subsys/demand_paging/backing_store/
Dram.c54 #define BACKING_STORE_SIZE (CONFIG_BACKING_STORE_RAM_PAGES * CONFIG_MMU_PAGE_SIZE)
61 __ASSERT(location % CONFIG_MMU_PAGE_SIZE == 0, in location_to_slab()
64 (CONFIG_BACKING_STORE_RAM_PAGES * CONFIG_MMU_PAGE_SIZE), in location_to_slab()
79 __ASSERT(offset % CONFIG_MMU_PAGE_SIZE == 0, in slab_to_location()
118 CONFIG_MMU_PAGE_SIZE); in k_mem_paging_backing_store_page_out()
124 CONFIG_MMU_PAGE_SIZE); in k_mem_paging_backing_store_page_in()
141 k_mem_slab_init(&backing_slabs, backing_store, CONFIG_MMU_PAGE_SIZE, in k_mem_paging_backing_store_init()
Dbacking_store_qemu_x86_tiny.c38 - CONFIG_MMU_PAGE_SIZE)); in location_to_flash()
61 CONFIG_MMU_PAGE_SIZE); in k_mem_paging_backing_store_page_out()
67 CONFIG_MMU_PAGE_SIZE); in k_mem_paging_backing_store_page_in()
Dbacking_store_ondemand_semihost.c43 long size = CONFIG_MMU_PAGE_SIZE; in k_mem_paging_backing_store_page_in()
63 __ASSERT(file_offset % CONFIG_MMU_PAGE_SIZE == 0, "file_offset = %#lx", file_offset); in k_mem_paging_backing_store_location_query()
/Zephyr-latest/kernel/include/
Dmmu.h124 #define K_MEM_NUM_PAGE_FRAMES (K_MEM_PHYS_RAM_SIZE / (size_t)CONFIG_MMU_PAGE_SIZE)
256 __ASSERT(phys % CONFIG_MMU_PAGE_SIZE == 0U, in k_mem_assert_phys_aligned()
265 return (uintptr_t)((pf - k_mem_page_frames) * CONFIG_MMU_PAGE_SIZE) + in k_mem_page_frame_to_phys()
272 uintptr_t flags_mask = CONFIG_MMU_PAGE_SIZE - 1; in k_mem_page_frame_to_virt()
290 CONFIG_MMU_PAGE_SIZE]; in k_mem_phys_to_page_frame()
295 __ASSERT((uintptr_t)addr % CONFIG_MMU_PAGE_SIZE == 0U, in k_mem_assert_virtual_region()
297 __ASSERT(size % CONFIG_MMU_PAGE_SIZE == 0U, in k_mem_assert_virtual_region()
322 (_phys) += CONFIG_MMU_PAGE_SIZE, (_pageframe)++)
334 #define K_MEM_VM_RESERVED CONFIG_MMU_PAGE_SIZE
341 CONFIG_MMU_PAGE_SIZE))
/Zephyr-latest/arch/arm64/core/
Dheader.S11 #if CONFIG_MMU_PAGE_SIZE == 4096 || defined(CONFIG_ARM_MPU)
13 #elif CONFIG_MMU_PAGE_SIZE == 16384
15 #elif CONFIG_MMU_PAGE_SIZE == 65536
/Zephyr-latest/tests/kernel/mem_protect/demand_paging/ondemand_section/src/
Dmain.c25 void *addr = (void *)ROUND_DOWN(&evictable_function, CONFIG_MMU_PAGE_SIZE); in ZTEST()
40 zassert_ok(k_mem_page_out(addr, CONFIG_MMU_PAGE_SIZE), ""); in ZTEST()
49 zassert_ok(k_mem_page_out(addr, CONFIG_MMU_PAGE_SIZE), ""); in ZTEST()
53 k_mem_page_in(addr, CONFIG_MMU_PAGE_SIZE); in ZTEST()
/Zephyr-latest/samples/subsys/demand_paging/src/
Dmain.c52 free_pages_before = k_mem_free_get() / CONFIG_MMU_PAGE_SIZE; in main()
57 free_pages_after = k_mem_free_get() / CONFIG_MMU_PAGE_SIZE; in main()
63 free_pages_before = k_mem_free_get() / CONFIG_MMU_PAGE_SIZE; in main()
68 free_pages_after = k_mem_free_get() / CONFIG_MMU_PAGE_SIZE; in main()
/Zephyr-latest/soc/espressif/esp32s3/
Dsoc_cache.c43 ((_instruction_size + CONFIG_MMU_PAGE_SIZE - 1) / CONFIG_MMU_PAGE_SIZE) * in esp_config_data_cache_mode()
47 ((_rodata_size + CONFIG_MMU_PAGE_SIZE - 1) / CONFIG_MMU_PAGE_SIZE) * in esp_config_data_cache_mode()
/Zephyr-latest/kernel/
Dmmu.c137 (_pos) < ((uint8_t *)(_base) + (_size)); (_pos) += CONFIG_MMU_PAGE_SIZE)
141 (_pos) < ((uintptr_t)(_base) + (_size)); (_pos) += CONFIG_MMU_PAGE_SIZE)
194 CONFIG_KERNEL_VM_SIZE / CONFIG_MMU_PAGE_SIZE);
204 - (offset * CONFIG_MMU_PAGE_SIZE) - size; in virt_from_bitmap_offset()
210 - POINTER_TO_UINT(vaddr) - size) / CONFIG_MMU_PAGE_SIZE; in virt_to_bitmap_offset()
224 num_bits = K_MEM_VM_RESERVED / CONFIG_MMU_PAGE_SIZE; in virt_region_init()
233 num_bits /= CONFIG_MMU_PAGE_SIZE; in virt_region_init()
264 num_bits = size / CONFIG_MMU_PAGE_SIZE; in virt_region_free()
282 num_bits = adjusted_sz / CONFIG_MMU_PAGE_SIZE; in virt_region_free()
301 num_bits = (size + align - CONFIG_MMU_PAGE_SIZE) / CONFIG_MMU_PAGE_SIZE; in virt_region_alloc()
[all …]
/Zephyr-latest/soc/intel/intel_socfpga_std/cyclonev/
Dsoc.c25 uintptr_t pos = ROUND_DOWN(addr, CONFIG_MMU_PAGE_SIZE); in arch_reserved_pages_update()
26 uintptr_t end = ROUND_UP(addr + len, CONFIG_MMU_PAGE_SIZE); in arch_reserved_pages_update()
28 for (; pos < end; pos += CONFIG_MMU_PAGE_SIZE) { in arch_reserved_pages_update()
/Zephyr-latest/arch/x86/core/
Duserspace.c17 (CONFIG_PRIVILEGED_STACK_SIZE % CONFIG_MMU_PAGE_SIZE) == 0);
173 stack_start += CONFIG_MMU_PAGE_SIZE; in arch_user_mode_enter()
174 stack_size -= CONFIG_MMU_PAGE_SIZE; in arch_user_mode_enter()
179 CONFIG_MMU_PAGE_SIZE); in arch_user_mode_enter()
Dx86_mmu.c207 #define PT_AREA ((uintptr_t)(CONFIG_MMU_PAGE_SIZE * NUM_PT_ENTRIES))
266 ((INITIAL_PTABLE_PAGES * CONFIG_MMU_PAGE_SIZE) + 0x20)
269 (INITIAL_PTABLE_PAGES * CONFIG_MMU_PAGE_SIZE)
451 __ASSERT((addr & (CONFIG_MMU_PAGE_SIZE - 1)) == 0U, in assert_addr_aligned()
461 if ((addr & (CONFIG_MMU_PAGE_SIZE - 1)) == 0U) { in is_addr_aligned()
484 __ASSERT((size & (CONFIG_MMU_PAGE_SIZE - 1)) == 0U, in assert_size_aligned()
494 if ((size & (CONFIG_MMU_PAGE_SIZE - 1)) == 0U) { in is_size_aligned()
1118 for (size_t offset = 0; offset < size; offset += CONFIG_MMU_PAGE_SIZE) { in range_map_ptables()
1385 ret = range_map_unlocked(stack, 0, CONFIG_MMU_PAGE_SIZE, in z_x86_set_stack_guard()
1450 (uintptr_t)addr, size, CONFIG_MMU_PAGE_SIZE); in arch_buffer_validate()
[all …]
/Zephyr-latest/include/zephyr/arch/xtensa/
Dthread_stack.h23 #define XTENSA_STACK_BASE_ALIGN CONFIG_MMU_PAGE_SIZE
24 #define XTENSA_STACK_SIZE_ALIGN CONFIG_MMU_PAGE_SIZE
/Zephyr-latest/tests/kernel/mem_protect/stackprot/src/
Dmapped_stack.c13 #define STACK_SIZE (CONFIG_MMU_PAGE_SIZE + CONFIG_TEST_EXTRA_STACK_SIZE)
44 ptr -= CONFIG_MMU_PAGE_SIZE / 2; in mapped_thread()
48 ptr += CONFIG_MMU_PAGE_SIZE / 2; in mapped_thread()
/Zephyr-latest/soc/espressif/common/
Dloader.c178 (app_drom_size + CONFIG_MMU_PAGE_SIZE - 1) / CONFIG_MMU_PAGE_SIZE; in map_rom_segments()
186 (app_irom_size + CONFIG_MMU_PAGE_SIZE - 1) / CONFIG_MMU_PAGE_SIZE; in map_rom_segments()
230 ((app_irom_size + CONFIG_MMU_PAGE_SIZE - 1) / CONFIG_MMU_PAGE_SIZE) * in map_rom_segments()
/Zephyr-latest/tests/arch/arm64/arm64_mmu/
Dprj.conf6 CONFIG_MMU_PAGE_SIZE=0x1000
/Zephyr-latest/include/zephyr/arch/arm64/
Dmm.h15 #define MEM_DOMAIN_ALIGN_AND_SIZE CONFIG_MMU_PAGE_SIZE
/Zephyr-latest/tests/kernel/mem_protect/mem_map/
Dcustom-sections.ld3 SECTION_DATA_PROLOGUE(TEST_MEM_MAP,,SUBALIGN(CONFIG_MMU_PAGE_SIZE))
/Zephyr-latest/soc/espressif/esp32s2/
Dmemory.h64 #define CACHE_ALIGN CONFIG_MMU_PAGE_SIZE
/Zephyr-latest/tests/arch/x86/pagetables/src/
Dmain.c96 pos += CONFIG_MMU_PAGE_SIZE) { in ZTEST()
167 pos += CONFIG_MMU_PAGE_SIZE) { in ZTEST()
202 pos += CONFIG_MMU_PAGE_SIZE) { in ZTEST()
/Zephyr-latest/soc/espressif/esp32c2/
Dmemory.h70 #define CACHE_ALIGN CONFIG_MMU_PAGE_SIZE
/Zephyr-latest/soc/espressif/esp32c6/
Dmemory.h63 #define CACHE_ALIGN CONFIG_MMU_PAGE_SIZE

123