1/**************************************************************************/ 2/* */ 3/* Copyright (c) Microsoft Corporation. All rights reserved. */ 4/* */ 5/* This software is licensed under the Microsoft Software License */ 6/* Terms for Microsoft Azure RTOS. Full text of the license can be */ 7/* found in the LICENSE file at https://aka.ms/AzureRTOS_EULA */ 8/* and in the root directory of this software. */ 9/* */ 10/**************************************************************************/ 11 12 13/**************************************************************************/ 14/**************************************************************************/ 15/** */ 16/** ThreadX Component */ 17/** */ 18/** Thread */ 19/** */ 20/**************************************************************************/ 21/**************************************************************************/ 22#ifdef TX_INCLUDE_USER_DEFINE_FILE 23#include "tx_user.h" 24#endif 25 26 .text 27 .align 3 28/**************************************************************************/ 29/* */ 30/* FUNCTION RELEASE */ 31/* */ 32/* _tx_thread_schedule ARMv8-A */ 33/* 6.3.0 */ 34/* AUTHOR */ 35/* */ 36/* William E. Lamie, Microsoft Corporation */ 37/* */ 38/* DESCRIPTION */ 39/* */ 40/* This function waits for a thread control block pointer to appear in */ 41/* the _tx_thread_execute_ptr variable. Once a thread pointer appears */ 42/* in the variable, the corresponding thread is resumed. */ 43/* */ 44/* INPUT */ 45/* */ 46/* None */ 47/* */ 48/* OUTPUT */ 49/* */ 50/* None */ 51/* */ 52/* CALLS */ 53/* */ 54/* None */ 55/* */ 56/* CALLED BY */ 57/* */ 58/* _tx_initialize_kernel_enter ThreadX entry function */ 59/* _tx_thread_system_return Return to system from thread */ 60/* _tx_thread_context_restore Restore thread's context */ 61/* */ 62/* RELEASE HISTORY */ 63/* */ 64/* DATE NAME DESCRIPTION */ 65/* */ 66/* 09-30-2020 William E. Lamie Initial Version 6.1 */ 67/* 01-31-2022 Andres Mlinar Updated comments, */ 68/* added ARMv8.2-A support, */ 69/* resulting in version 6.1.10 */ 70/* 10-31-2023 Tiejun Zhou Modified comment(s), added */ 71/* #include tx_user.h, */ 72/* resulting in version 6.3.0 */ 73/* */ 74/**************************************************************************/ 75// VOID _tx_thread_schedule(VOID) 76// { 77 .global _tx_thread_schedule 78 .type _tx_thread_schedule, @function 79_tx_thread_schedule: 80 81 /* Enable interrupts. */ 82 83 MSR DAIFClr, 0x3 // Enable interrupts 84 85 /* Wait for a thread to execute. */ 86 // do 87 // { 88 89 LDR x1, =_tx_thread_execute_ptr // Address of thread execute ptr 90 91#ifdef TX_ENABLE_WFI 92__tx_thread_schedule_loop: 93 LDR x0, [x1, #0] // Pickup next thread to execute 94 CMP x0, #0 // Is it NULL? 95 BNE _tx_thread_schedule_thread // 96 WFI // 97 B __tx_thread_schedule_loop // Keep looking for a thread 98_tx_thread_schedule_thread: 99#else 100__tx_thread_schedule_loop: 101 LDR x0, [x1, #0] // Pickup next thread to execute 102 CMP x0, #0 // Is it NULL? 103 BEQ __tx_thread_schedule_loop // If so, keep looking for a thread 104#endif 105 106 // } 107 // while(_tx_thread_execute_ptr == TX_NULL); 108 109 /* Yes! We have a thread to execute. Lockout interrupts and 110 transfer control to it. */ 111 112 MSR DAIFSet, 0x3 // Lockout interrupts 113 114 /* Setup the current thread pointer. */ 115 // _tx_thread_current_ptr = _tx_thread_execute_ptr; 116 117 LDR x1, =_tx_thread_current_ptr // Pickup address of current thread 118 STR x0, [x1, #0] // Setup current thread pointer 119 120 /* Increment the run count for this thread. */ 121 // _tx_thread_current_ptr -> tx_thread_run_count++; 122 123 LDR w2, [x0, #4] // Pickup run counter 124 LDR w3, [x0, #36] // Pickup time-slice for this thread 125 ADD w2, w2, #1 // Increment thread run-counter 126 STR w2, [x0, #4] // Store the new run counter 127 128 /* Setup time-slice, if present. */ 129 // _tx_timer_time_slice = _tx_thread_current_ptr -> tx_thread_time_slice; 130 131 LDR x2, =_tx_timer_time_slice // Pickup address of time slice 132 // variable 133 LDR x4, [x0, #8] // Switch stack pointers 134 MOV sp, x4 // 135 STR w3, [x2, #0] // Setup time-slice 136 137#if (defined(TX_ENABLE_EXECUTION_CHANGE_NOTIFY) || defined(TX_EXECUTION_PROFILE_ENABLE)) 138 139 /* Call the thread entry function to indicate the thread is executing. */ 140 141 MOV x19, x0 // Save x0 142 BL _tx_execution_thread_enter // Call the thread execution enter function 143 MOV x0, x19 // Restore x0 144#endif 145 146 /* Switch to the thread's stack. */ 147 // sp = _tx_thread_execute_ptr -> tx_thread_stack_ptr; 148 149 /* Determine if an interrupt frame or a synchronous task suspension frame 150 is present. */ 151 152 LDP x4, x5, [sp], #16 // Pickup saved SPSR/DAIF and ELR_EL1 153 CMP x5, #0 // Check for synchronous context switch (ELR_EL1 = NULL) 154 BEQ _tx_solicited_return 155#ifdef EL1 156 MSR SPSR_EL1, x4 // Setup SPSR for return 157 MSR ELR_EL1, x5 // Setup point of interrupt 158#else 159#ifdef EL2 160 MSR SPSR_EL2, x4 // Setup SPSR for return 161 MSR ELR_EL2, x5 // Setup point of interrupt 162#else 163 MSR SPSR_EL3, x4 // Setup SPSR for return 164 MSR ELR_EL3, x5 // Setup point of interrupt 165#endif 166#endif 167#ifdef ENABLE_ARM_FP 168 LDR w1, [x0, #248] // Pickup FP enable flag 169 CMP w1, #0 // Is FP enabled? 170 BEQ _skip_interrupt_fp_restore // No, skip FP restore 171 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 172 MSR FPSR, x0 // Recover FPSR 173 MSR FPCR, x1 // Recover FPCR 174 LDP q30, q31, [sp], #32 // Recover q30, q31 175 LDP q28, q29, [sp], #32 // Recover q28, q29 176 LDP q26, q27, [sp], #32 // Recover q26, q27 177 LDP q24, q25, [sp], #32 // Recover q24, q25 178 LDP q22, q23, [sp], #32 // Recover q22, q23 179 LDP q20, q21, [sp], #32 // Recover q20, q21 180 LDP q18, q19, [sp], #32 // Recover q18, q19 181 LDP q16, q17, [sp], #32 // Recover q16, q17 182 LDP q14, q15, [sp], #32 // Recover q14, q15 183 LDP q12, q13, [sp], #32 // Recover q12, q13 184 LDP q10, q11, [sp], #32 // Recover q10, q11 185 LDP q8, q9, [sp], #32 // Recover q8, q9 186 LDP q6, q7, [sp], #32 // Recover q6, q7 187 LDP q4, q5, [sp], #32 // Recover q4, q5 188 LDP q2, q3, [sp], #32 // Recover q2, q3 189 LDP q0, q1, [sp], #32 // Recover q0, q1 190_skip_interrupt_fp_restore: 191#endif 192 LDP x28, x29, [sp], #16 // Recover x28 193 LDP x26, x27, [sp], #16 // Recover x26, x27 194 LDP x24, x25, [sp], #16 // Recover x24, x25 195 LDP x22, x23, [sp], #16 // Recover x22, x23 196 LDP x20, x21, [sp], #16 // Recover x20, x21 197 LDP x18, x19, [sp], #16 // Recover x18, x19 198 LDP x16, x17, [sp], #16 // Recover x16, x17 199 LDP x14, x15, [sp], #16 // Recover x14, x15 200 LDP x12, x13, [sp], #16 // Recover x12, x13 201 LDP x10, x11, [sp], #16 // Recover x10, x11 202 LDP x8, x9, [sp], #16 // Recover x8, x9 203 LDP x6, x7, [sp], #16 // Recover x6, x7 204 LDP x4, x5, [sp], #16 // Recover x4, x5 205 LDP x2, x3, [sp], #16 // Recover x2, x3 206 LDP x0, x1, [sp], #16 // Recover x0, x1 207 LDP x29, x30, [sp], #16 // Recover x29, x30 208 ERET // Return to point of interrupt 209 210_tx_solicited_return: 211 212#ifdef ENABLE_ARM_FP 213 LDR w1, [x0, #248] // Pickup FP enable flag 214 CMP w1, #0 // Is FP enabled? 215 BEQ _skip_solicited_fp_restore // No, skip FP restore 216 LDP x0, x1, [sp], #16 // Pickup FPSR, FPCR 217 MSR FPSR, x0 // Recover FPSR 218 MSR FPCR, x1 // Recover FPCR 219 LDP q14, q15, [sp], #32 // Recover q14, q15 220 LDP q12, q13, [sp], #32 // Recover q12, q13 221 LDP q10, q11, [sp], #32 // Recover q10, q11 222 LDP q8, q9, [sp], #32 // Recover q8, q9 223_skip_solicited_fp_restore: 224#endif 225 LDP x27, x28, [sp], #16 // Recover x27, x28 226 LDP x25, x26, [sp], #16 // Recover x25, x26 227 LDP x23, x24, [sp], #16 // Recover x23, x24 228 LDP x21, x22, [sp], #16 // Recover x21, x22 229 LDP x19, x20, [sp], #16 // Recover x19, x20 230 LDP x29, x30, [sp], #16 // Recover x29, x30 231 MSR DAIF, x4 // Recover DAIF 232 RET // Return to caller 233// } 234