1/*************************************************************************** 2 * Copyright (c) 2024 Microsoft Corporation 3 * 4 * This program and the accompanying materials are made available under the 5 * terms of the MIT License which is available at 6 * https://opensource.org/licenses/MIT. 7 * 8 * SPDX-License-Identifier: MIT 9 **************************************************************************/ 10 11 12/**************************************************************************/ 13/**************************************************************************/ 14/** */ 15/** ThreadX Component */ 16/** */ 17/** Thread */ 18/** */ 19/**************************************************************************/ 20/**************************************************************************/ 21 22 23/* #define TX_SOURCE_CODE */ 24 25 26/* Include necessary system files. */ 27 28/* #include "tx_api.h" 29 #include "tx_thread.h" 30 #include "tx_timer.h" */ 31 32 EXTERN _tx_thread_execute_ptr 33 EXTERN _tx_thread_current_ptr 34 EXTERN _tx_timer_time_slice 35 EXTERN _tx_thread_preempt_disable 36 EXTERN _tx_thread_schedule 37 EXTERN _tx_thread_system_state 38#ifdef TX_ENABLE_EXECUTION_CHANGE_NOTIFY 39 EXTERN _tx_execution_isr_exit 40#endif 41 42 43 SECTION `.text`:CODE:REORDER:NOROOT(2) 44 CODE 45/**************************************************************************/ 46/* */ 47/* FUNCTION RELEASE */ 48/* */ 49/* _tx_thread_context_restore RISC-V32/IAR */ 50/* 6.1 */ 51/* AUTHOR */ 52/* */ 53/* William E. Lamie, Microsoft Corporation */ 54/* Tom van Leeuwen, Technolution B.V. */ 55/* */ 56/* DESCRIPTION */ 57/* */ 58/* This function restores the interrupt context if it is processing a */ 59/* nested interrupt. If not, it returns to the interrupt thread if no */ 60/* preemption is necessary. Otherwise, if preemption is necessary or */ 61/* if no thread was running, the function returns to the scheduler. */ 62/* */ 63/* INPUT */ 64/* */ 65/* None */ 66/* */ 67/* OUTPUT */ 68/* */ 69/* None */ 70/* */ 71/* CALLS */ 72/* */ 73/* _tx_thread_schedule Thread scheduling routine */ 74/* */ 75/* CALLED BY */ 76/* */ 77/* ISRs Interrupt Service Routines */ 78/* */ 79/* RELEASE HISTORY */ 80/* */ 81/* DATE NAME DESCRIPTION */ 82/* */ 83/* 09-30-2020 William E. Lamie Initial Version 6.1 */ 84/* */ 85/**************************************************************************/ 86/* VOID _tx_thread_context_restore(VOID) 87{ */ 88 PUBLIC _tx_thread_context_restore 89_tx_thread_context_restore: 90 91 /* Lockout interrupts. */ 92 93 csrci mstatus, 0x08 ; Disable interrupts 94 95#ifdef TX_ENABLE_EXECUTION_CHANGE_NOTIFY 96 call _tx_execution_isr_exit ; Call the ISR execution exit function 97#endif 98 99 /* Determine if interrupts are nested. */ 100 /* if (--_tx_thread_system_state) 101 { */ 102 103 la t0, _tx_thread_system_state ; Pickup addr of nested interrupt count 104 lw t1, 0(t0) ; Pickup nested interrupt count 105 addi t1, t1, -1 ; Decrement the nested interrupt counter 106 sw t1, 0(t0) ; Store new nested count 107 beqz t1, _tx_thread_not_nested_restore ; If 0, not nested restore 108 109 /* Interrupts are nested. */ 110 111 /* Just recover the saved registers and return to the point of 112 interrupt. */ 113 114#if __iar_riscv_base_isa == rv32e 115 116 /* Recover floating point registers. */ 117 118 flw f0, 0x7C(sp) ; Recover ft0 119 flw f1, 0x80(sp) ; Recover ft1 120 flw f2, 0x84(sp) ; Recover ft2 121 flw f3, 0x88(sp) ; Recover ft3 122 flw f4, 0x8C(sp) ; Recover ft4 123 flw f5, 0x90(sp) ; Recover ft5 124 flw f6, 0x94(sp) ; Recover ft6 125 flw f7, 0x98(sp) ; Recover ft7 126 flw f10,0xA4(sp) ; Recover fa0 127 flw f11,0xA8(sp) ; Recover fa1 128 flw f12,0xAC(sp) ; Recover fa2 129 flw f13,0xB0(sp) ; Recover fa3 130 flw f14,0xB4(sp) ; Recover fa4 131 flw f15,0xB8(sp) ; Recover fa5 132 flw f16,0xBC(sp) ; Recover fa6 133 flw f17,0xC0(sp) ; Recover fa7 134 flw f28,0xEC(sp) ; Recover ft8 135 flw f29,0xF0(sp) ; Recover ft9 136 flw f30,0xF4(sp) ; Recover ft10 137 flw f31,0xF8(sp) ; Recover ft11 138 lw t0, 0xFC(sp) ; Recover fcsr 139 csrw fcsr, t0 ; 140#endif 141 142 /* Recover standard registers. */ 143 144 /* Restore registers, 145 Skip global pointer because that does not change 146 Also skip the saved registers since they have been restored by any function we called. 147 Except s0 since we use it ourselves. */ 148 149 lw t0, 0x78(sp) ; Recover mepc 150 csrw mepc, t0 ; Setup mepc 151 li t0, 0x1880 ; Prepare MPIP 152 csrw mstatus, t0 ; Enable MPIP 153 154 lw x1, 0x70(sp) ; Recover RA 155 lw x5, 0x4C(sp) ; Recover t0 156 lw x6, 0x48(sp) ; Recover t1 157 lw x7, 0x44(sp) ; Recover t2 158 lw x8, 0x30(sp) ; Recover s0 159 lw x10, 0x6C(sp) ; Recover a0 160 lw x11, 0x68(sp) ; Recover a1 161 lw x12, 0x64(sp) ; Recover a2 162 lw x13, 0x60(sp) ; Recover a3 163 lw x14, 0x5C(sp) ; Recover a4 164 lw x15, 0x58(sp) ; Recover a5 165 lw x16, 0x54(sp) ; Recover a6 166 lw x17, 0x50(sp) ; Recover a7 167 lw x28, 0x40(sp) ; Recover t3 168 lw x29, 0x3C(sp) ; Recover t4 169 lw x30, 0x38(sp) ; Recover t5 170 lw x31, 0x34(sp) ; Recover t6 171 172#if __iar_riscv_base_isa == rv32e 173 addi sp, sp, 260 ; Recover stack frame - with floating point enabled 174#else 175 addi sp, sp, 128 ; Recover stack frame - without floating point enabled 176#endif 177 mret ; Return to point of interrupt 178 179 /* } */ 180_tx_thread_not_nested_restore: 181 /* Determine if a thread was interrupted and no preemption is required. */ 182 /* else if (((_tx_thread_current_ptr) && (_tx_thread_current_ptr == _tx_thread_execute_ptr)) 183 || (_tx_thread_preempt_disable)) 184 { */ 185 186 lw t1, _tx_thread_current_ptr ; Pickup current thread pointer 187 beqz t1, _tx_thread_idle_system_restore ; If NULL, idle system restore 188 189 lw t2, _tx_thread_preempt_disable ; Pickup preempt disable flag 190 bgtz t2, _tx_thread_no_preempt_restore ; If set, restore interrupted thread 191 192 lw t2, _tx_thread_execute_ptr ; Pickup thread execute pointer 193 bne t1, t2, _tx_thread_preempt_restore ; If higher-priority thread is ready, preempt 194 195 196_tx_thread_no_preempt_restore: 197 /* Restore interrupted thread or ISR. */ 198 199 /* Pickup the saved stack pointer. */ 200 /* SP = _tx_thread_current_ptr -> tx_thread_stack_ptr; */ 201 202 lw sp, 8(t1) ; Switch back to thread's stack 203 204#if __iar_riscv_base_isa == rv32e 205 206 /* Recover floating point registers. */ 207 208 flw f0, 0x7C(sp) ; Recover ft0 209 flw f1, 0x80(sp) ; Recover ft1 210 flw f2, 0x84(sp) ; Recover ft2 211 flw f3, 0x88(sp) ; Recover ft3 212 flw f4, 0x8C(sp) ; Recover ft4 213 flw f5, 0x90(sp) ; Recover ft5 214 flw f6, 0x94(sp) ; Recover ft6 215 flw f7, 0x98(sp) ; Recover ft7 216 flw f10,0xA4(sp) ; Recover fa0 217 flw f11,0xA8(sp) ; Recover fa1 218 flw f12,0xAC(sp) ; Recover fa2 219 flw f13,0xB0(sp) ; Recover fa3 220 flw f14,0xB4(sp) ; Recover fa4 221 flw f15,0xB8(sp) ; Recover fa5 222 flw f16,0xBC(sp) ; Recover fa6 223 flw f17,0xC0(sp) ; Recover fa7 224 flw f28,0xEC(sp) ; Recover ft8 225 flw f29,0xF0(sp) ; Recover ft9 226 flw f30,0xF4(sp) ; Recover ft10 227 flw f31,0xF8(sp) ; Recover ft11 228 lw t0, 0xFC(sp) ; Recover fcsr 229 csrw fcsr, t0 ; 230#endif 231 232 /* Recover the saved context and return to the point of interrupt. */ 233 234 /* Recover standard registers. */ 235 /* Restore registers, 236 Skip global pointer because that does not change */ 237 238 lw t0, 0x78(sp) ; Recover mepc 239 csrw mepc, t0 ; Setup mepc 240 li t0, 0x1880 ; Prepare MPIP 241 csrw mstatus, t0 ; Enable MPIP 242 243 lw x1, 0x70(sp) ; Recover RA 244 lw x5, 0x4C(sp) ; Recover t0 245 lw x6, 0x48(sp) ; Recover t1 246 lw x7, 0x44(sp) ; Recover t2 247 lw x8, 0x30(sp) ; Recover s0 248 lw x10, 0x6C(sp) ; Recover a0 249 lw x11, 0x68(sp) ; Recover a1 250 lw x12, 0x64(sp) ; Recover a2 251 lw x13, 0x60(sp) ; Recover a3 252 lw x14, 0x5C(sp) ; Recover a4 253 lw x15, 0x58(sp) ; Recover a5 254 lw x16, 0x54(sp) ; Recover a6 255 lw x17, 0x50(sp) ; Recover a7 256 lw x28, 0x40(sp) ; Recover t3 257 lw x29, 0x3C(sp) ; Recover t4 258 lw x30, 0x38(sp) ; Recover t5 259 lw x31, 0x34(sp) ; Recover t6 260 261#if __iar_riscv_base_isa == rv32e 262 addi sp, sp, 260 ; Recover stack frame - with floating point enabled 263#else 264 addi sp, sp, 128 ; Recover stack frame - without floating point enabled 265#endif 266 mret ; Return to point of interrupt 267 268 /* } 269 else 270 { */ 271_tx_thread_preempt_restore: 272 /* Instead of directly activating the thread again, ensure we save the 273 entire stack frame by saving the remaining registers. */ 274 275 lw t0, 8(t1) ; Pickup thread's stack pointer 276 ori t3, x0, 1 ; Build interrupt stack type 277 sw t3, 0(t0) ; Store stack type 278 279#if __iar_riscv_base_isa == rv32e 280 281 /* Store floating point preserved registers. */ 282 283 fsw f8, 0x9C(t0) ; Store fs0 284 fsw f9, 0xA0(t0) ; Store fs1 285 fsw f18, 0xC4(t0) ; Store fs2 286 fsw f19, 0xC8(t0) ; Store fs3 287 fsw f20, 0xCC(t0) ; Store fs4 288 fsw f21, 0xD0(t0) ; Store fs5 289 fsw f22, 0xD4(t0) ; Store fs6 290 fsw f23, 0xD8(t0) ; Store fs7 291 fsw f24, 0xDC(t0) ; Store fs8 292 fsw f25, 0xE0(t0) ; Store fs9 293 fsw f26, 0xE4(t0) ; Store fs10 294 fsw f27, 0xE8(t0) ; Store fs11 295#endif 296 297 /* Store standard preserved registers. */ 298 299 sw x9, 0x2C(t0) ; Store s1 300 sw x18, 0x28(t0) ; Store s2 301 sw x19, 0x24(t0) ; Store s3 302 sw x20, 0x20(t0) ; Store s4 303 sw x21, 0x1C(t0) ; Store s5 304 sw x22, 0x18(t0) ; Store s6 305 sw x23, 0x14(t0) ; Store s7 306 sw x24, 0x10(t0) ; Store s8 307 sw x25, 0x0C(t0) ; Store s9 308 sw x26, 0x08(t0) ; Store s10 309 sw x27, 0x04(t0) ; Store s11 310 ; Note: s0 is already stored! 311 312 /* Save the remaining time-slice and disable it. */ 313 /* if (_tx_timer_time_slice) 314 { */ 315 316 la t0, _tx_timer_time_slice ; Pickup time slice variable address 317 lw t2, 0(t0) ; Pickup time slice 318 beqz t2, _tx_thread_dont_save_ts ; If 0, skip time slice processing 319 320 /* _tx_thread_current_ptr -> tx_thread_time_slice = _tx_timer_time_slice 321 _tx_timer_time_slice = 0; */ 322 323 sw t2, 24(t1) ; Save current time slice 324 sw x0, 0(t0) ; Clear global time slice 325 326 327 /* } */ 328_tx_thread_dont_save_ts: 329 /* Clear the current task pointer. */ 330 /* _tx_thread_current_ptr = TX_NULL; */ 331 332 /* Return to the scheduler. */ 333 /* _tx_thread_schedule(); */ 334 335 sw x0, _tx_thread_current_ptr, t0 ; Clear current thread pointer*/ 336 /* } */ 337 338_tx_thread_idle_system_restore: 339 /* Just return back to the scheduler! */ 340 j _tx_thread_schedule ; Return to scheduler 341 342/* } */ 343 END 344