/Zephyr-latest/arch/nios2/core/ |
D | crt0.S | 46 movhi r2, %hi(ALT_CPU_ICACHE_SIZE) 48 movui r2, ALT_CPU_ICACHE_SIZE 55 subi r2, r2, 4 57 subi r2, r2, ALT_CPU_ICACHE_LINE_SIZE 59 initi r2 60 bgt r2, zero, 0b 85 movhi r2, %hi(ALT_CPU_DCACHE_SIZE) 87 movui r2, ALT_CPU_DCACHE_SIZE 94 subi r2, r2, 4 96 subi r2, r2, ALT_CPU_DCACHE_LINE_SIZE [all …]
|
D | swap.S | 75 ldw r2, _kernel_offset_to_ready_q_cache(r10) 80 stw r2, _kernel_offset_to_current(r10) 85 ldw r16, _thread_offset_to_r16(r2) 86 ldw r17, _thread_offset_to_r17(r2) 87 ldw r18, _thread_offset_to_r18(r2) 88 ldw r19, _thread_offset_to_r19(r2) 89 ldw r20, _thread_offset_to_r20(r2) 90 ldw r21, _thread_offset_to_r21(r2) 91 ldw r22, _thread_offset_to_r22(r2) 92 ldw r23, _thread_offset_to_r23(r2) [all …]
|
/Zephyr-latest/soc/nxp/s32/s32k3/ |
D | s32k3xx_startup.S | 32 ldr r2, [r1, MC_RGM_DES] 33 cmp r2, 0x0 35 ldr r2, [r1, MC_RGM_FES] 36 cmp r2, 0x0 41 ldr r2, = DT_REG_SIZE(DT_CHOSEN(zephyr_sram)) 43 subs r2, #1 52 subs r2, 8 60 ldr r2, = DT_REG_SIZE(DT_CHOSEN(zephyr_itcm)) 62 subs r2, #1 66 subs r2, 8 [all …]
|
/Zephyr-latest/arch/arm/core/cortex_a_r/ |
D | switch.S | 38 ldr r2, =_thread_offset_to_callee_saved 39 add r2, r1, r2 41 stm r2, {r4-r11, sp, lr} 44 get_cpu r2 45 ldrb r3, [r2, #_cpu_offset_to_exc_depth] 50 strb r3, [r2, #_cpu_offset_to_exc_depth] 72 ldr r2, =_thread_offset_to_callee_saved 73 add r2, r0, r2 74 ldm r2, {r4-r11, sp, lr} 133 get_cpu r2 [all …]
|
D | isr_wrapper.S | 113 get_cpu r2 114 ldr r0, [r2, #___cpu_t_fp_ctx_OFFSET] 116 streq sp, [r2, #___cpu_t_fp_ctx_OFFSET] 141 push {r2, r3} 144 get_cpu r2 145 ldr r0, [r2, #___cpu_t_nested_OFFSET] 147 str r0, [r2, #___cpu_t_nested_OFFSET] 164 ldr r2, =_kernel 166 ldr r0, [r2, #_kernel_offset_to_idle] 172 str r1, [r2, #_kernel_offset_to_idle] [all …]
|
D | swap_helper.S | 55 ldr r2, [r1, #___cpu_t_current_OFFSET] 59 strb lr, [r2, #_thread_offset_to_mode_exc_return] 64 add r0, r2 72 ldrb r0, [r2, #_thread_offset_to_user_options] 98 add r0, r2, #_thread_offset_to_preempt_float 115 ldr r2, [r3, #_kernel_offset_to_ready_q_cache] 117 str r2, [r1, #___cpu_t_current_OFFSET] 122 adds r4, r2, r4 134 ldrsb lr, [r2, #_thread_offset_to_mode_exc_return] 140 ldr r0, [r2, #_thread_offset_to_basepri] [all …]
|
D | exc.S | 64 mov r2, sp 65 vstmia r2!, {s0-s15} 67 vstmia r2!, {d16-d31} 69 stm r2, {r0, r1} 92 get_cpu r2 93 ldr r1, [r2, #___cpu_t_nested_OFFSET] 95 str r1, [r2, #___cpu_t_nested_OFFSET] 134 get_cpu r2 135 ldr r1, [r2, #___cpu_t_nested_OFFSET] 137 str r1, [r2, #___cpu_t_nested_OFFSET] [all …]
|
D | exc_exit.S | 72 get_cpu r2 73 ldr r1, [r2, #___cpu_t_fp_ctx_OFFSET] 83 streq r1, [r2, #___cpu_t_fp_ctx_OFFSET] 88 ldm r3, {r1, r2} 89 tst r2, #FPEXC_EN 92 vmsr fpexc, r2 150 ldr r2, =_kernel 151 ldr r0, [r2, #_kernel_offset_to_ready_q_cache] 158 get_cpu r2 159 ldr r0, [r2, #___cpu_t_nested_OFFSET] [all …]
|
D | vector_table.S | 50 get_cpu r2 51 ldrb r1, [r2, #_cpu_offset_to_exc_depth] 53 strb r1, [r2, #_cpu_offset_to_exc_depth]
|
/Zephyr-latest/arch/arm/core/cortex_m/ |
D | swap_helper.S | 71 ldr r2, [r1, #_kernel_offset_to_current] 75 strb lr, [r2, #_thread_offset_to_mode_exc_return] 80 add r0, r2 108 add r0, r2, #_thread_offset_to_preempt_float 144 ldr r2, [r1, #_kernel_offset_to_ready_q_cache] 146 str r2, [r1, #_kernel_offset_to_current] 163 adds r4, r2, r4 176 ldrsb lr, [r2, #_thread_offset_to_mode_exc_return] 188 adds r4, r2, r4 194 ldr r0, [r2, #_thread_offset_to_basepri] [all …]
|
D | fault_s.S | 87 push { r1, r2 } 90 mov r2, r10 91 push {r2, r3} 93 mov r2, r8 94 push {r2, r3} 101 mov r2, lr /* EXC_RETURN */
|
/Zephyr-latest/soc/nxp/lpc/lpc54xxx/gcc/ |
D | startup_LPC54114_cm4.S | 53 ldrh r2, [r6, #16] /* Mask for CPU ID bits */ 54 ands r2, r1, r2 /* r2 = ARM COrtex CPU ID */ 56 cmp r3, r2 /* Core ID matches M4 identifier */ 79 ldr r2, [r0] /* r1 = SYSCON co-processor boot address */ 81 cmp r2, #0 /* Slave boot address = 0 (not set up)? */ 91 bx r2 /* Jump to slave boot address */
|
/Zephyr-latest/soc/ti/lm3s6965/ |
D | reboot.S | 44 ldreq r2, =z_force_exit_one_nested_irq 46 ldrne r2, =z_do_software_reboot 51 and.w r2, r1 52 str r2, [ip, #(6 * 4)] 53 ldr r2, =0x01000000 54 str r2, [ip, #(7 * 4)]
|
/Zephyr-latest/arch/arc/core/ |
D | fast_irq.S | 55 lr r2, [_ARC_V2_SEC_STAT] 56 bclr r2, r2, _ARC_V2_SEC_STAT_SSC_BIT 57 sflag r2 60 lr r2, [_ARC_V2_STATUS32] 61 bclr r2, r2, _ARC_V2_STATUS32_SC_BIT 62 kflag r2 184 st r2, [r1, -8] 238 ld r2, [r1, -8] 241 st _CAUSE_FIRQ, [r2, _thread_offset_to_relinquish_cause] 247 mov r2, r0
|
D | isr_wrapper.S | 227 mov_s r2, _firq_enter 228 j_s [r2] 232 mov_s r2, _rirq_enter 233 j_s [r2] 236 mov.z r2, _firq_enter 238 mov.nz r2, _rirq_enter 239 j_s [r2] 243 MOVR r2, _rirq_enter 244 j_s [r2]
|
D | reset.S | 97 mov_s r2, 0 98 sr r2, [_ARC_V2_IC_IVIC] 140 mov_s r2, 0 143 brge r2, r3, done_mpu_regions_reset 144 sr r2, [_ARC_V2_MPU_INDEX] 148 add_s r2, r2, 1 203 mov_s r2, CONFIG_ISR_STACK_SIZE
|
D | fault_s.S | 124 STR r2, r2, ___thread_t_switch_handle_OFFSET 126 MOVR r2, r0 149 mov ilink, r2 168 push_s r2 173 pop_s r2 179 mov r2, ilink
|
D | userspace.S | 18 mov_s r2, 0 64 pop_s r2 74 st.aw r2, [r5, -4] 109 push_s r2 132 mov r2, r6 205 ld_s r2, [sp, ___isf_t_r2_OFFSET] 279 st_s r1, [r2, 0]
|
/Zephyr-latest/include/zephyr/arch/arm/ |
D | syscall.h | 46 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke6() 53 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3) in arch_syscall_invoke6() 55 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke6() 69 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke5() 75 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3) in arch_syscall_invoke5() 77 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke5() 90 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke4() 95 : "=r"(ret), "=r"(r1), "=r"(r2), "=r"(r3) in arch_syscall_invoke4() 97 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke4() 110 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke3() [all …]
|
/Zephyr-latest/arch/arm/core/ |
D | userspace.S | 111 push {r1,r2,r3,lr} 160 mov r2, ip 183 pop {r1,r2,r3,r4} 188 pop {r1,r2,r3,lr} 253 push {r0, r1, r2, r3} 258 movs r2, #1 259 orrs r1, r1, r2 261 orrs r3, r3, r2 264 ldr r2, =_thread_offset_to_mode 265 str r1, [r0, r2] [all …]
|
/Zephyr-latest/arch/arc/include/ |
D | swap_macros.h | 73 ld_s r13, [r2, ___thread_base_t_user_options_OFFSET] 94 STR sp, r2, _thread_offset_to_sp 100 LDR sp, r2, _thread_offset_to_sp 110 ld_s r13, [r2, ___thread_base_t_user_options_OFFSET] 199 STR r2, sp, ___isf_t_r2_OFFSET 215 lr r2, [_ARC_V2_EI_BASE] 218 st_s r2, [sp, ___isf_t_ei_base_OFFSET] 234 ld_s r2, [sp, ___isf_t_ei_base_OFFSET] 237 sr r2, [_ARC_V2_EI_BASE] 260 LDR r2, sp, ___isf_t_r2_OFFSET [all …]
|
/Zephyr-latest/soc/nxp/mcx/mcxw/ |
D | mcxw71_platform_init.S | 32 ldr r2, =0 37 stmia r0!, {r2 - r5} 44 stmia r0!, {r2 - r5} 51 stmia r0!, {r2 - r5}
|
/Zephyr-latest/include/zephyr/arch/arm64/ |
D | syscall.h | 46 register uint64_t r2 __asm__("x2") = arg3; in arch_syscall_invoke6() 55 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke6() 69 register uint64_t r2 __asm__("x2") = arg3; in arch_syscall_invoke5() 77 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke5() 90 register uint64_t r2 __asm__("x2") = arg3; in arch_syscall_invoke4() 97 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke4() 110 register uint64_t r2 __asm__("x2") = arg3; in arch_syscall_invoke3() 116 "r" (ret), "r" (r1), "r" (r2), "r" (r8) in arch_syscall_invoke3()
|
/Zephyr-latest/include/zephyr/arch/arc/ |
D | syscall.h | 47 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke6() 59 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke6() 72 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke5() 83 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke5() 95 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke4() 105 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in arch_syscall_invoke4() 117 register uint32_t r2 __asm__("r2") = arg3; in arch_syscall_invoke3() 126 "r" (ret), "r" (r1), "r" (r2), "r" (r6)); in arch_syscall_invoke3()
|
/Zephyr-latest/include/zephyr/arch/arc/v2/secureshield/ |
D | arc_secure.h | 59 register uint32_t r2 __asm__("r2") = arg3; in _arc_s_call_invoke6() 71 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in _arc_s_call_invoke6() 82 register uint32_t r2 __asm__("r2") = arg3; in _arc_s_call_invoke5() 93 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in _arc_s_call_invoke5() 104 register uint32_t r2 __asm__("r2") = arg3; in _arc_s_call_invoke4() 114 "r" (ret), "r" (r1), "r" (r2), "r" (r3), in _arc_s_call_invoke4() 125 register uint32_t r2 __asm__("r2") = arg3; in _arc_s_call_invoke3() 134 "r" (ret), "r" (r1), "r" (r2), "r" (r6)); in _arc_s_call_invoke3()
|