1/*************************************************************************** 2 * Copyright (c) 2024 Microsoft Corporation 3 * 4 * This program and the accompanying materials are made available under the 5 * terms of the MIT License which is available at 6 * https://opensource.org/licenses/MIT. 7 * 8 * SPDX-License-Identifier: MIT 9 **************************************************************************/ 10 11 12/**************************************************************************/ 13/**************************************************************************/ 14/** */ 15/** ThreadX Component */ 16/** */ 17/** Thread */ 18/** */ 19/**************************************************************************/ 20/**************************************************************************/ 21#ifdef TX_INCLUDE_USER_DEFINE_FILE 22#include "tx_user.h" 23#endif 24 25 .text 26 .align 3 27/**************************************************************************/ 28/* */ 29/* FUNCTION RELEASE */ 30/* */ 31/* _tx_thread_schedule ARMv8-A */ 32/* 6.3.0 */ 33/* AUTHOR */ 34/* */ 35/* William E. Lamie, Microsoft Corporation */ 36/* */ 37/* DESCRIPTION */ 38/* */ 39/* This function waits for a thread control block pointer to appear in */ 40/* the _tx_thread_execute_ptr variable. Once a thread pointer appears */ 41/* in the variable, the corresponding thread is resumed. */ 42/* */ 43/* INPUT */ 44/* */ 45/* None */ 46/* */ 47/* OUTPUT */ 48/* */ 49/* None */ 50/* */ 51/* CALLS */ 52/* */ 53/* None */ 54/* */ 55/* CALLED BY */ 56/* */ 57/* _tx_initialize_kernel_enter ThreadX entry function */ 58/* _tx_thread_system_return Return to system from thread */ 59/* _tx_thread_context_restore Restore thread's context */ 60/* */ 61/* RELEASE HISTORY */ 62/* */ 63/* DATE NAME DESCRIPTION */ 64/* */ 65/* 09-30-2020 William E. Lamie Initial Version 6.1 */ 66/* 01-31-2022 Andres Mlinar Updated comments, */ 67/* added ARMv8.2-A support, */ 68/* resulting in version 6.1.10 */ 69/* 10-31-2023 Tiejun Zhou Modified comment(s), added */ 70/* #include tx_user.h, */ 71/* resulting in version 6.3.0 */ 72/* */ 73/**************************************************************************/ 74// VOID _tx_thread_schedule(VOID) 75// { 76 .global _tx_thread_schedule 77 .type _tx_thread_schedule, @function 78_tx_thread_schedule: 79 80 /* Enable interrupts. */ 81 82 MSR DAIFClr, 0x3 // Enable interrupts 83 84 /* Wait for a thread to execute. */ 85 // do 86 // { 87 88 LDR x1, =_tx_thread_execute_ptr // Address of thread execute ptr 89 90#ifdef TX_ENABLE_WFI 91__tx_thread_schedule_loop: 92 LDR x0, [x1, #0] // Pickup next thread to execute 93 CMP x0, #0 // Is it NULL? 94 BNE _tx_thread_schedule_thread // 95 WFI // 96 B __tx_thread_schedule_loop // Keep looking for a thread 97_tx_thread_schedule_thread: 98#else 99__tx_thread_schedule_loop: 100 LDR x0, [x1, #0] // Pickup next thread to execute 101 CMP x0, #0 // Is it NULL? 102 BEQ __tx_thread_schedule_loop // If so, keep looking for a thread 103#endif 104 105 // } 106 // while(_tx_thread_execute_ptr == TX_NULL); 107 108 /* Yes! We have a thread to execute. Lockout interrupts and 109 transfer control to it. */ 110 111 MSR DAIFSet, 0x3 // Lockout interrupts 112 113 /* Setup the current thread pointer. */ 114 // _tx_thread_current_ptr = _tx_thread_execute_ptr; 115 116 LDR x1, =_tx_thread_current_ptr // Pickup address of current thread 117 STR x0, [x1, #0] // Setup current thread pointer 118 119 /* Increment the run count for this thread. */ 120 // _tx_thread_current_ptr -> tx_thread_run_count++; 121 122 LDR w2, [x0, #4] // Pickup run counter 123 LDR w3, [x0, #36] // Pickup time-slice for this thread 124 ADD w2, w2, #1 // Increment thread run-counter 125 STR w2, [x0, #4] // Store the new run counter 126 127 /* Setup time-slice, if present. */ 128 // _tx_timer_time_slice = _tx_thread_current_ptr -> tx_thread_time_slice; 129 130 LDR x2, =_tx_timer_time_slice // Pickup address of time slice 131 // variable 132 LDR x4, [x0, #8] // Switch stack pointers 133 MOV sp, x4 // 134 STR w3, [x2, #0] // Setup time-slice 135 136#if (defined(TX_ENABLE_EXECUTION_CHANGE_NOTIFY) || defined(TX_EXECUTION_PROFILE_ENABLE)) 137 138 /* Call the thread entry function to indicate the thread is executing. */ 139 140 MOV x19, x0 // Save x0 141 BL _tx_execution_thread_enter // Call the thread execution enter function 142 MOV x0, x19 // Restore x0 143#endif 144 145 /* Switch to the thread's stack. */ 146 // sp = _tx_thread_execute_ptr -> tx_thread_stack_ptr; 147 148 /* Determine if an interrupt frame or a synchronous task suspension frame 149 is present. */ 150 151 LDP x4, x5, [sp], #16 // Pickup saved SPSR/DAIF and ELR_EL1 152 CMP x5, #0 // Check for synchronous context switch (ELR_EL1 = NULL) 153 BEQ _tx_solicited_return 154#ifdef EL1 155 MSR SPSR_EL1, x4 // Setup SPSR for return 156 MSR ELR_EL1, x5 // Setup point of interrupt 157#else 158#ifdef EL2 159 MSR SPSR_EL2, x4 // Setup SPSR for return 160 MSR ELR_EL2, x5 // Setup point of interrupt 161#else 162 MSR SPSR_EL3, x4 // Setup SPSR for return 163 MSR ELR_EL3, x5 // Setup point of interrupt 164#endif 165#endif 166#ifdef ENABLE_ARM_FP 167 LDR w1, [x0, #248] // Pickup FP enable flag 168 CMP w1, #0 // Is FP enabled? 169 BEQ _skip_interrupt_fp_restore // No, skip FP restore 170 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 171 MSR FPSR, x0 // Recover FPSR 172 MSR FPCR, x1 // Recover FPCR 173 LDP q30, q31, [sp], #32 // Recover q30, q31 174 LDP q28, q29, [sp], #32 // Recover q28, q29 175 LDP q26, q27, [sp], #32 // Recover q26, q27 176 LDP q24, q25, [sp], #32 // Recover q24, q25 177 LDP q22, q23, [sp], #32 // Recover q22, q23 178 LDP q20, q21, [sp], #32 // Recover q20, q21 179 LDP q18, q19, [sp], #32 // Recover q18, q19 180 LDP q16, q17, [sp], #32 // Recover q16, q17 181 LDP q14, q15, [sp], #32 // Recover q14, q15 182 LDP q12, q13, [sp], #32 // Recover q12, q13 183 LDP q10, q11, [sp], #32 // Recover q10, q11 184 LDP q8, q9, [sp], #32 // Recover q8, q9 185 LDP q6, q7, [sp], #32 // Recover q6, q7 186 LDP q4, q5, [sp], #32 // Recover q4, q5 187 LDP q2, q3, [sp], #32 // Recover q2, q3 188 LDP q0, q1, [sp], #32 // Recover q0, q1 189_skip_interrupt_fp_restore: 190#endif 191 LDP x28, x29, [sp], #16 // Recover x28 192 LDP x26, x27, [sp], #16 // Recover x26, x27 193 LDP x24, x25, [sp], #16 // Recover x24, x25 194 LDP x22, x23, [sp], #16 // Recover x22, x23 195 LDP x20, x21, [sp], #16 // Recover x20, x21 196 LDP x18, x19, [sp], #16 // Recover x18, x19 197 LDP x16, x17, [sp], #16 // Recover x16, x17 198 LDP x14, x15, [sp], #16 // Recover x14, x15 199 LDP x12, x13, [sp], #16 // Recover x12, x13 200 LDP x10, x11, [sp], #16 // Recover x10, x11 201 LDP x8, x9, [sp], #16 // Recover x8, x9 202 LDP x6, x7, [sp], #16 // Recover x6, x7 203 LDP x4, x5, [sp], #16 // Recover x4, x5 204 LDP x2, x3, [sp], #16 // Recover x2, x3 205 LDP x0, x1, [sp], #16 // Recover x0, x1 206 LDP x29, x30, [sp], #16 // Recover x29, x30 207 ERET // Return to point of interrupt 208 209_tx_solicited_return: 210 211#ifdef ENABLE_ARM_FP 212 LDR w1, [x0, #248] // Pickup FP enable flag 213 CMP w1, #0 // Is FP enabled? 214 BEQ _skip_solicited_fp_restore // No, skip FP restore 215 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 216 MSR FPSR, x0 // Recover FPSR 217 MSR FPCR, x1 // Recover FPCR 218 LDP q14, q15, [sp], #32 // Recover q14, q15 219 LDP q12, q13, [sp], #32 // Recover q12, q13 220 LDP q10, q11, [sp], #32 // Recover q10, q11 221 LDP q8, q9, [sp], #32 // Recover q8, q9 222_skip_solicited_fp_restore: 223#endif 224 LDP x27, x28, [sp], #16 // Recover x27, x28 225 LDP x25, x26, [sp], #16 // Recover x25, x26 226 LDP x23, x24, [sp], #16 // Recover x23, x24 227 LDP x21, x22, [sp], #16 // Recover x21, x22 228 LDP x19, x20, [sp], #16 // Recover x19, x20 229 LDP x29, x30, [sp], #16 // Recover x29, x30 230 MSR DAIF, x4 // Recover DAIF 231 RET // Return to caller 232// } 233