1/*************************************************************************** 2 * Copyright (c) 2024 Microsoft Corporation 3 * 4 * This program and the accompanying materials are made available under the 5 * terms of the MIT License which is available at 6 * https://opensource.org/licenses/MIT. 7 * 8 * SPDX-License-Identifier: MIT 9 **************************************************************************/ 10 11 12/**************************************************************************/ 13/**************************************************************************/ 14/** */ 15/** ThreadX Component */ 16/** */ 17/** Thread */ 18/** */ 19/**************************************************************************/ 20/**************************************************************************/ 21#ifdef TX_INCLUDE_USER_DEFINE_FILE 22#include "tx_user.h" 23#endif 24 25 .text 26 .align 3 27/**************************************************************************/ 28/* */ 29/* FUNCTION RELEASE */ 30/* */ 31/* _tx_thread_schedule ARMv8-A-SMP */ 32/* 6.3.0 */ 33/* AUTHOR */ 34/* */ 35/* William E. Lamie, Microsoft Corporation */ 36/* */ 37/* DESCRIPTION */ 38/* */ 39/* This function waits for a thread control block pointer to appear in */ 40/* the _tx_thread_execute_ptr variable. Once a thread pointer appears */ 41/* in the variable, the corresponding thread is resumed. */ 42/* */ 43/* INPUT */ 44/* */ 45/* None */ 46/* */ 47/* OUTPUT */ 48/* */ 49/* None */ 50/* */ 51/* CALLS */ 52/* */ 53/* None */ 54/* */ 55/* CALLED BY */ 56/* */ 57/* _tx_initialize_kernel_enter ThreadX entry function */ 58/* _tx_thread_system_return Return to system from thread */ 59/* _tx_thread_context_restore Restore thread's context */ 60/* */ 61/* RELEASE HISTORY */ 62/* */ 63/* DATE NAME DESCRIPTION */ 64/* */ 65/* 09-30-2020 William E. Lamie Initial Version 6.1 */ 66/* 01-31-2022 Andres Mlinar Updated comments, */ 67/* added ARMv8.2-A support, */ 68/* resulting in version 6.1.10 */ 69/* 10-31-2023 Tiejun Zhou Modified comment(s), added */ 70/* #include tx_user.h, */ 71/* added memory barrier, */ 72/* resulting in version 6.3.0 */ 73/* */ 74/**************************************************************************/ 75// VOID _tx_thread_schedule(VOID) 76// { 77 .global _tx_thread_schedule 78 .type _tx_thread_schedule, @function 79_tx_thread_schedule: 80 81 /* Enable interrupts. */ 82 83 MSR DAIFClr, 0x3 // Enable interrupts 84 85 /* Pickup the CPU ID. */ 86 87 MRS x20, MPIDR_EL1 // Pickup the core ID 88#ifdef TX_ARMV8_2 89#if TX_THREAD_SMP_CLUSTERS > 1 90 UBFX x1, x20, #16, #8 // Isolate cluster ID 91#endif 92 UBFX x20, x20, #8, #8 // Isolate core ID 93#else 94#if TX_THREAD_SMP_CLUSTERS > 1 95 UBFX x1, x20, #8, #8 // Isolate cluster ID 96#endif 97 UBFX x20, x20, #0, #8 // Isolate core ID 98#endif 99#if TX_THREAD_SMP_CLUSTERS > 1 100 ADDS x20, x20, x1, LSL #2 // Calculate CPU ID 101#endif 102 103 /* Wait for a thread to execute. */ 104 // do 105 // { 106 107 LDR x1, =_tx_thread_execute_ptr // Address of thread execute ptr 108 109#ifdef TX_ENABLE_WFI 110__tx_thread_schedule_loop: 111 MSR DAIFSet, 0x3 // Lockout interrupts 112 LDR x0, [x1, x20, LSL #3] // Pickup next thread to execute 113 CMP x0, #0 // Is it NULL? 114 BNE _tx_thread_schedule_thread // 115 MSR DAIFClr, 0x3 // Enable interrupts 116 WFI // 117 B __tx_thread_schedule_loop // Keep looking for a thread 118_tx_thread_schedule_thread: 119#else 120 MSR DAIFSet, 0x3 // Lockout interrupts 121 LDR x0, [x1, x20, LSL #3] // Pickup next thread to execute 122 CMP x0, #0 // Is it NULL? 123 BEQ _tx_thread_schedule // Keep looking for a thread 124#endif 125 126 // } 127 // while(_tx_thread_execute_ptr == TX_NULL); 128 129 /* Get the lock for accessing the thread's ready bit. */ 130 131 MOV w2, #280 // Build offset to the lock 132 ADD x2, x0, x2 // Get the address to the lock 133 LDAXR w3, [x2] // Pickup the lock value 134 CMP w3, #0 // Check if it's available 135 BNE _tx_thread_schedule // No, lock not available 136 MOV w3, #1 // Build the lock set value 137 STXR w4, w3, [x2] // Try to get the lock 138 CMP w4, #0 // Check if we got the lock 139 BNE _tx_thread_schedule // No, another core got it first 140 DMB ISH // Ensure write to lock completes 141 142 /* Now make sure the thread's ready bit is set. */ 143 144 LDR w3, [x0, #260] // Pickup the thread ready bit 145 CMP w3, #0 // Is it set? 146 BNE _tx_thread_ready_for_execution // Yes, schedule the thread 147 148 /* The ready bit isn't set. Release the lock and jump back to the scheduler. */ 149 150 MOV w3, #0 // Build clear value 151 STR w3, [x2] // Release the lock 152 DMB ISH // Ensure write to lock completes 153 B _tx_thread_schedule // Jump back to the scheduler 154 155_tx_thread_ready_for_execution: 156 157 /* We have a thread to execute. */ 158 159 /* Clear the ready bit and release the lock. */ 160 161 MOV w3, #0 // Build clear value 162 STR w3, [x0, #260] // Store it back in the thread control block 163 DMB ISH 164 MOV w3, #0 // Build clear value for the lock 165 STR w3, [x2] // Release the lock 166 DMB ISH 167 168 /* Setup the current thread pointer. */ 169 // _tx_thread_current_ptr = _tx_thread_execute_ptr; 170 171 LDR x2, =_tx_thread_current_ptr // Pickup address of current thread 172 STR x0, [x2, x20, LSL #3] // Setup current thread pointer 173 DMB ISH 174 LDR x1, [x1, x20, LSL #3] // Reload the execute pointer 175 CMP w0, w1 // Did it change? 176 BEQ _execute_pointer_did_not_change // If not, skip handling 177 178 /* In the time between reading the execute pointer and assigning 179 it to the current pointer, the execute pointer was changed by 180 some external code. If the current pointer was still null when 181 the external code checked if a core preempt was necessary, then 182 it wouldn't have done it and a preemption will be missed. To 183 handle this, undo some things and jump back to the scheduler so 184 it can schedule the new thread. */ 185 186 MOV w1, #0 // Build clear value 187 STR x1, [x2, x20, LSL #3] // Clear current thread pointer 188 189 MOV w1, #1 // Build set value 190 STR w1, [x0, #260] // Re-set the ready bit 191 DMB ISH // 192 193 B _tx_thread_schedule // Jump back to the scheduler to schedule the new thread 194 195_execute_pointer_did_not_change: 196 /* Increment the run count for this thread. */ 197 // _tx_thread_current_ptr -> tx_thread_run_count++; 198 199 LDR w2, [x0, #4] // Pickup run counter 200 LDR w3, [x0, #36] // Pickup time-slice for this thread 201 ADD w2, w2, #1 // Increment thread run-counter 202 STR w2, [x0, #4] // Store the new run counter 203 204 /* Setup time-slice, if present. */ 205 // _tx_timer_time_slice = _tx_thread_current_ptr -> tx_thread_time_slice; 206 207 LDR x2, =_tx_timer_time_slice // Pickup address of time slice 208 // variable 209 LDR x4, [x0, #8] // Switch stack pointers 210 MOV sp, x4 // 211 STR w3, [x2, x20, LSL #2] // Setup time-slice 212 213#if (defined(TX_ENABLE_EXECUTION_CHANGE_NOTIFY) || defined(TX_EXECUTION_PROFILE_ENABLE)) 214 215 /* Call the thread entry function to indicate the thread is executing. */ 216 217 MOV x19, x0 // Save x0 218 BL _tx_execution_thread_enter // Call the thread execution enter function 219 MOV x0, x19 // Restore x0 220#endif 221 222 /* Switch to the thread's stack. */ 223 // sp = _tx_thread_execute_ptr -> tx_thread_stack_ptr; 224 225 /* Determine if an interrupt frame or a synchronous task suspension frame 226 is present. */ 227 228 LDP x4, x5, [sp], #16 // Pickup saved SPSR/DAIF and ELR_EL1 229 CMP x5, #0 // Check for synchronous context switch (ELR_EL1 = NULL) 230 BEQ _tx_solicited_return 231#ifdef EL1 232 MSR SPSR_EL1, x4 // Setup SPSR for return 233 MSR ELR_EL1, x5 // Setup point of interrupt 234#else 235#ifdef EL2 236 MSR SPSR_EL2, x4 // Setup SPSR for return 237 MSR ELR_EL2, x5 // Setup point of interrupt 238#else 239 MSR SPSR_EL3, x4 // Setup SPSR for return 240 MSR ELR_EL3, x5 // Setup point of interrupt 241#endif 242#endif 243#ifdef ENABLE_ARM_FP 244 LDR w1, [x0, #268] // Pickup FP enable flag 245 CMP w1, #0 // Is FP enabled? 246 BEQ _skip_interrupt_fp_restore // No, skip FP restore 247 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 248 MSR FPSR, x0 // Recover FPSR 249 MSR FPCR, x1 // Recover FPCR 250 LDP q30, q31, [sp], #32 // Recover q30, q31 251 LDP q28, q29, [sp], #32 // Recover q28, q29 252 LDP q26, q27, [sp], #32 // Recover q26, q27 253 LDP q24, q25, [sp], #32 // Recover q24, q25 254 LDP q22, q23, [sp], #32 // Recover q22, q23 255 LDP q20, q21, [sp], #32 // Recover q20, q21 256 LDP q18, q19, [sp], #32 // Recover q18, q19 257 LDP q16, q17, [sp], #32 // Recover q16, q17 258 LDP q14, q15, [sp], #32 // Recover q14, q15 259 LDP q12, q13, [sp], #32 // Recover q12, q13 260 LDP q10, q11, [sp], #32 // Recover q10, q11 261 LDP q8, q9, [sp], #32 // Recover q8, q9 262 LDP q6, q7, [sp], #32 // Recover q6, q7 263 LDP q4, q5, [sp], #32 // Recover q4, q5 264 LDP q2, q3, [sp], #32 // Recover q2, q3 265 LDP q0, q1, [sp], #32 // Recover q0, q1 266_skip_interrupt_fp_restore: 267#endif 268 LDP x28, x29, [sp], #16 // Recover x28 269 LDP x26, x27, [sp], #16 // Recover x26, x27 270 LDP x24, x25, [sp], #16 // Recover x24, x25 271 LDP x22, x23, [sp], #16 // Recover x22, x23 272 LDP x20, x21, [sp], #16 // Recover x20, x21 273 LDP x18, x19, [sp], #16 // Recover x18, x19 274 LDP x16, x17, [sp], #16 // Recover x16, x17 275 LDP x14, x15, [sp], #16 // Recover x14, x15 276 LDP x12, x13, [sp], #16 // Recover x12, x13 277 LDP x10, x11, [sp], #16 // Recover x10, x11 278 LDP x8, x9, [sp], #16 // Recover x8, x9 279 LDP x6, x7, [sp], #16 // Recover x6, x7 280 LDP x4, x5, [sp], #16 // Recover x4, x5 281 LDP x2, x3, [sp], #16 // Recover x2, x3 282 LDP x0, x1, [sp], #16 // Recover x0, x1 283 LDP x29, x30, [sp], #16 // Recover x29, x30 284 ERET // Return to point of interrupt 285 286_tx_solicited_return: 287 288#ifdef ENABLE_ARM_FP 289 LDR w1, [x0, #268] // Pickup FP enable flag 290 CMP w1, #0 // Is FP enabled? 291 BEQ _skip_solicited_fp_restore // No, skip FP restore 292 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 293 MSR FPSR, x0 // Recover FPSR 294 MSR FPCR, x1 // Recover FPCR 295 LDP q14, q15, [sp], #32 // Recover q14, q15 296 LDP q12, q13, [sp], #32 // Recover q12, q13 297 LDP q10, q11, [sp], #32 // Recover q10, q11 298 LDP q8, q9, [sp], #32 // Recover q8, q9 299_skip_solicited_fp_restore: 300#endif 301 LDP x27, x28, [sp], #16 // Recover x27, x28 302 LDP x25, x26, [sp], #16 // Recover x25, x26 303 LDP x23, x24, [sp], #16 // Recover x23, x24 304 LDP x21, x22, [sp], #16 // Recover x21, x22 305 LDP x19, x20, [sp], #16 // Recover x19, x20 306 LDP x29, x30, [sp], #16 // Recover x29, x30 307 MSR DAIF, x4 // Recover DAIF 308 RET // Return to caller 309// } 310