1 /*
2  * Copyright (c) 2020 Intel Corporation.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <kernel_internal.h>
8 #include <zephyr/arch/x86/ia32/arch.h>
9 #include <zephyr/arch/x86/ia32/segmentation.h>
10 
11 #define ENTRY_NUM	(GS_TLS_SEG >> 3)
12 
z_x86_tls_update_gdt(struct k_thread * thread)13 void z_x86_tls_update_gdt(struct k_thread *thread)
14 {
15 	/*
16 	 * GS is used for thread local storage to pointer to
17 	 * the TLS storage area in stack. Here we update one
18 	 * of the descriptor so GS has the new address.
19 	 *
20 	 * The re-loading of descriptor into GS is taken care
21 	 * of inside the assembly swap code just before
22 	 * swapping into the new thread.
23 	 */
24 
25 	struct segment_descriptor *sd = &_gdt.entries[ENTRY_NUM];
26 
27 	sd->base_low = thread->tls & 0xFFFFU;
28 	sd->base_mid = (thread->tls >> 16) & 0xFFU;
29 	sd->base_hi = (thread->tls >> 24) & 0xFFU;
30 }
31 
32 FUNC_NO_STACK_PROTECTOR
z_x86_early_tls_update_gdt(char * stack_ptr)33 void z_x86_early_tls_update_gdt(char *stack_ptr)
34 {
35 	uintptr_t *self_ptr;
36 	uintptr_t tls_seg = GS_TLS_SEG;
37 	struct segment_descriptor *sd = &_gdt.entries[ENTRY_NUM];
38 
39 	/*
40 	 * Since we are populating things backwards, store
41 	 * the pointer to the TLS area at top of stack.
42 	 */
43 	stack_ptr -= sizeof(uintptr_t);
44 	self_ptr = (void *)stack_ptr;
45 	*self_ptr = POINTER_TO_UINT(stack_ptr);
46 
47 	sd->base_low = POINTER_TO_UINT(self_ptr) & 0xFFFFU;
48 	sd->base_mid = (POINTER_TO_UINT(self_ptr) >> 16) & 0xFFU;
49 	sd->base_hi = (POINTER_TO_UINT(self_ptr) >> 24) & 0xFFU;
50 
51 	__asm__ volatile(
52 		"movl %0, %%eax;\n\t"
53 		"movl %%eax, %%gs;\n\t"
54 		:
55 		: "r"(tls_seg));
56 }
57