1 ;/*************************************************************************** 2 ; * Copyright (c) 2024 Microsoft Corporation 3 ; * 4 ; * This program and the accompanying materials are made available under the 5 ; * terms of the MIT License which is available at 6 ; * https://opensource.org/licenses/MIT. 7 ; * 8 ; * SPDX-License-Identifier: MIT 9 ; **************************************************************************/ 10 ; 11 ; 12 ;/**************************************************************************/ 13 ;/**************************************************************************/ 14 ;/** */ 15 ;/** ThreadX Component */ 16 ;/** */ 17 ;/** Thread - Low Level SMP Support */ 18 ;/** */ 19 ;/**************************************************************************/ 20 ;/**************************************************************************/ 21 22 MACRO 23 $label _tx_thread_smp_protect_lock_got 24 ; 25 ; /* Set the currently owned core. */ 26 ; _tx_thread_smp_protection.tx_thread_smp_protect_core = this_core; 27 ; 28 STR r1, [r2, #8] ; Store this core 29 ; 30 ; /* Increment the protection count. */ 31 ; _tx_thread_smp_protection.tx_thread_smp_protect_count++; 32 ; 33 LDR r3, [r2, #12] ; Pickup ownership count 34 ADD r3, r3, #1 ; Increment ownership count 35 STR r3, [r2, #12] ; Store ownership count 36 DMB 37 38 IF :DEF:TX_MPCORE_DEBUG_ENABLE 39 LSL r3, r1, #2 ; Build offset to array indexes 40 LDR r4, =_tx_thread_current_ptr ; Pickup start of the current thread array 41 ADD r4, r3, r4 ; Build index into the current thread array 42 LDR r3, [r4] ; Pickup current thread for this core 43 STR r3, [r2, #4] ; Save current thread pointer 44 STR LR, [r2, #16] ; Save caller's return address 45 STR r0, [r2, #20] ; Save CPSR 46 ENDIF 47 48 MEND 49 50 MACRO 51 $label _tx_thread_smp_protect_remove_from_front_of_list 52 ; 53 ; /* Remove ourselves from the list. */ 54 ; _tx_thread_smp_protect_wait_list[_tx_thread_smp_protect_wait_list_head++] = 0xFFFFFFFF; 55 ; 56 MOV r3, #0xFFFFFFFF ; Build the invalid core value 57 LDR r4, =_tx_thread_smp_protect_wait_list_head ; Get the address of the head 58 LDR r5, [r4] ; Get the value of the head 59 LDR r6, =_tx_thread_smp_protect_wait_list ; Get the address of the list 60 STR r3, [r6, r5, LSL #2] ; Store the invalid core value 61 ADD r5, r5, #1 ; Increment the head 62 ; 63 ; /* Did we wrap? */ 64 ; if (_tx_thread_smp_protect_wait_list_head == TX_THREAD_SMP_MAX_CORES + 1) 65 ; { 66 ; 67 LDR r3, =_tx_thread_smp_protect_wait_list_size ; Load address of core list size 68 LDR r3, [r3] ; Load the max cores value 69 CMP r5, r3 ; Compare the head to it 70 BNE $label._store_new_head ; Are we at the max? 71 ; 72 ; _tx_thread_smp_protect_wait_list_head = 0; 73 ; 74 EOR r5, r5, r5 ; We're at the max. Set it to zero 75 ; 76 ; } 77 ; 78 $label._store_new_head 79 80 STR r5, [r4] ; Store the new head 81 ; 82 ; /* We have the lock! */ 83 ; return; 84 ; 85 MEND 86 87 88 MACRO 89 $label _tx_thread_smp_protect_wait_list_lock_get 90 ;VOID _tx_thread_smp_protect_wait_list_lock_get() 91 ;{ 92 ; /* We do this until we have the lock. */ 93 ; while (1) 94 ; { 95 ; 96 $label._tx_thread_smp_protect_wait_list_lock_get__try_to_get_lock 97 ; 98 ; /* Is the list lock available? */ 99 ; _tx_thread_smp_protect_wait_list_lock_protect_in_force = load_exclusive(&_tx_thread_smp_protect_wait_list_lock_protect_in_force); 100 ; 101 LDR r1, =_tx_thread_smp_protect_wait_list_lock_protect_in_force 102 LDREX r2, [r1] ; Pickup the protection flag 103 ; 104 ; if (protect_in_force == 0) 105 ; { 106 ; 107 CMP r2, #0 108 BNE $label._tx_thread_smp_protect_wait_list_lock_get__try_to_get_lock ; No, protection not available 109 ; 110 ; /* Try to get the list. */ 111 ; int status = store_exclusive(&_tx_thread_smp_protect_wait_list_lock_protect_in_force, 1); 112 ; 113 MOV r2, #1 ; Build lock value 114 STREX r3, r2, [r1] ; Attempt to get the protection 115 ; 116 ; if (status == SUCCESS) 117 ; 118 CMP r3, #0 119 BNE $label._tx_thread_smp_protect_wait_list_lock_get__try_to_get_lock ; Did it fail? If so, try again. 120 ; 121 ; /* We have the lock! */ 122 ; return; 123 ; 124 MEND 125 126 127 MACRO 128 $label _tx_thread_smp_protect_wait_list_add 129 ;VOID _tx_thread_smp_protect_wait_list_add(UINT new_core) 130 ;{ 131 ; 132 ; /* We're about to modify the list, so get the list lock. */ 133 ; _tx_thread_smp_protect_wait_list_lock_get(); 134 ; 135 PUSH {r1-r2} 136 137 $label _tx_thread_smp_protect_wait_list_lock_get 138 139 POP {r1-r2} 140 ; 141 ; /* Add this core. */ 142 ; _tx_thread_smp_protect_wait_list[_tx_thread_smp_protect_wait_list_tail++] = new_core; 143 ; 144 LDR r3, =_tx_thread_smp_protect_wait_list_tail ; Get the address of the tail 145 LDR r4, [r3] ; Get the value of tail 146 LDR r5, =_tx_thread_smp_protect_wait_list ; Get the address of the list 147 STR r1, [r5, r4, LSL #2] ; Store the new core value 148 ADD r4, r4, #1 ; Increment the tail 149 ; 150 ; /* Did we wrap? */ 151 ; if (_tx_thread_smp_protect_wait_list_tail == _tx_thread_smp_protect_wait_list_size) 152 ; { 153 ; 154 LDR r5, =_tx_thread_smp_protect_wait_list_size ; Load max cores address 155 LDR r5, [r5] ; Load max cores value 156 CMP r4, r5 ; Compare max cores to tail 157 BNE $label._tx_thread_smp_protect_wait_list_add__no_wrap ; Did we wrap? 158 ; 159 ; _tx_thread_smp_protect_wait_list_tail = 0; 160 ; 161 MOV r4, #0 162 ; 163 ; } 164 ; 165 $label._tx_thread_smp_protect_wait_list_add__no_wrap 166 167 STR r4, [r3] ; Store the new tail value. 168 ; 169 ; /* Release the list lock. */ 170 ; _tx_thread_smp_protect_wait_list_lock_protect_in_force = 0; 171 ; 172 MOV r3, #0 ; Build lock value 173 LDR r4, =_tx_thread_smp_protect_wait_list_lock_protect_in_force 174 STR r3, [r4] ; Store the new value 175 176 MEND 177 178 179 MACRO 180 $label _tx_thread_smp_protect_wait_list_remove 181 ;VOID _tx_thread_smp_protect_wait_list_remove(UINT core) 182 ;{ 183 ; 184 ; /* Get the core index. */ 185 ; UINT core_index; 186 ; for (core_index = 0;; core_index++) 187 ; 188 EOR r1, r1, r1 ; Clear for 'core_index' 189 LDR r2, =_tx_thread_smp_protect_wait_list ; Get the address of the list 190 ; 191 ; { 192 ; 193 $label._tx_thread_smp_protect_wait_list_remove__check_cur_core 194 ; 195 ; /* Is this the core? */ 196 ; if (_tx_thread_smp_protect_wait_list[core_index] == core) 197 ; { 198 ; break; 199 ; 200 LDR r3, [r2, r1, LSL #2] ; Get the value at the current index 201 CMP r3, r10 ; Did we find the core? 202 BEQ $label._tx_thread_smp_protect_wait_list_remove__found_core 203 ; 204 ; } 205 ; 206 ADD r1, r1, #1 ; Increment cur index 207 B $label._tx_thread_smp_protect_wait_list_remove__check_cur_core ; Restart the loop 208 ; 209 ; } 210 ; 211 $label._tx_thread_smp_protect_wait_list_remove__found_core 212 ; 213 ; /* We're about to modify the list. Get the lock. We need the lock because another 214 ; core could be simultaneously adding (a core is simultaneously trying to get 215 ; the inter-core lock) or removing (a core is simultaneously being preempted, 216 ; like what is currently happening). */ 217 ; _tx_thread_smp_protect_wait_list_lock_get(); 218 ; 219 PUSH {r1} 220 221 $label _tx_thread_smp_protect_wait_list_lock_get 222 223 POP {r1} 224 ; 225 ; /* We remove by shifting. */ 226 ; while (core_index != _tx_thread_smp_protect_wait_list_tail) 227 ; { 228 ; 229 $label._tx_thread_smp_protect_wait_list_remove__compare_index_to_tail 230 231 LDR r2, =_tx_thread_smp_protect_wait_list_tail ; Load tail address 232 LDR r2, [r2] ; Load tail value 233 CMP r1, r2 ; Compare cur index and tail 234 BEQ $label._tx_thread_smp_protect_wait_list_remove__removed 235 ; 236 ; UINT next_index = core_index + 1; 237 ; 238 MOV r2, r1 ; Move current index to next index register 239 ADD r2, r2, #1 ; Add 1 240 ; 241 ; if (next_index == _tx_thread_smp_protect_wait_list_size) 242 ; { 243 ; 244 LDR r3, =_tx_thread_smp_protect_wait_list_size 245 LDR r3, [r3] 246 CMP r2, r3 247 BNE $label._tx_thread_smp_protect_wait_list_remove__next_index_no_wrap 248 ; 249 ; next_index = 0; 250 ; 251 MOV r2, #0 252 ; 253 ; } 254 ; 255 $label._tx_thread_smp_protect_wait_list_remove__next_index_no_wrap 256 ; 257 ; list_cores[core_index] = list_cores[next_index]; 258 ; 259 LDR r0, =_tx_thread_smp_protect_wait_list ; Get the address of the list 260 LDR r3, [r0, r2, LSL #2] ; Get the value at the next index 261 STR r3, [r0, r1, LSL #2] ; Store the value at the current index 262 ; 263 ; core_index = next_index; 264 ; 265 MOV r1, r2 266 267 B $label._tx_thread_smp_protect_wait_list_remove__compare_index_to_tail 268 ; 269 ; } 270 ; 271 $label._tx_thread_smp_protect_wait_list_remove__removed 272 ; 273 ; /* Now update the tail. */ 274 ; if (_tx_thread_smp_protect_wait_list_tail == 0) 275 ; { 276 ; 277 LDR r0, =_tx_thread_smp_protect_wait_list_tail ; Load tail address 278 LDR r1, [r0] ; Load tail value 279 CMP r1, #0 280 BNE $label._tx_thread_smp_protect_wait_list_remove__tail_not_zero 281 ; 282 ; _tx_thread_smp_protect_wait_list_tail = _tx_thread_smp_protect_wait_list_size; 283 ; 284 LDR r2, =_tx_thread_smp_protect_wait_list_size 285 LDR r1, [r2] 286 ; 287 ; } 288 ; 289 $label._tx_thread_smp_protect_wait_list_remove__tail_not_zero 290 ; 291 ; _tx_thread_smp_protect_wait_list_tail--; 292 ; 293 SUB r1, r1, #1 294 STR r1, [r0] ; Store new tail value 295 ; 296 ; /* Release the list lock. */ 297 ; _tx_thread_smp_protect_wait_list_lock_protect_in_force = 0; 298 ; 299 MOV r0, #0 ; Build lock value 300 LDR r1, =_tx_thread_smp_protect_wait_list_lock_protect_in_force ; Load lock address 301 STR r0, [r1] ; Store the new value 302 ; 303 ; /* We're no longer waiting. Note that this should be zero since, again, 304 ; this function is only called when a thread preemption is occurring. */ 305 ; _tx_thread_smp_protect_wait_counts[core]--; 306 ; 307 LDR r1, =_tx_thread_smp_protect_wait_counts ; Load wait list counts 308 LDR r2, [r1, r10, LSL #2] ; Load waiting value 309 SUB r2, r2, #1 ; Subtract 1 310 STR r2, [r1, r10, LSL #2] ; Store new waiting value 311 MEND 312 313