• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #define SYSCALL_NO_TLS 1
2 #include <elf.h>
3 #include <limits.h>
4 #include <sys/mman.h>
5 #include <string.h>
6 #include <stddef.h>
7 #include "pthread_impl.h"
8 #include "libc.h"
9 #include "atomic.h"
10 #include "syscall.h"
11 
12 volatile int __thread_list_lock;
13 
__init_tp(void * p)14 int __init_tp(void *p)
15 {
16 	pthread_t td = p;
17 	td->self = td;
18 	int r = __set_thread_area(TP_ADJ(p));
19 	if (r < 0) return -1;
20 	if (!r) libc.can_do_threads = 1;
21 	td->detach_state = DT_JOINABLE;
22 	td->tid = __syscall(SYS_set_tid_address, &__thread_list_lock);
23 	td->locale = &libc.global_locale;
24 	td->robust_list.head = &td->robust_list.head;
25 	td->sysinfo = __sysinfo;
26 	td->next = td->prev = td;
27 	return 0;
28 }
29 
30 static struct builtin_tls {
31 	char c;
32 	struct pthread pt;
33 	void *space[16];
34 } builtin_tls[1];
35 #define MIN_TLS_ALIGN offsetof(struct builtin_tls, pt)
36 
37 static struct tls_module main_tls;
38 
__copy_tls(unsigned char * mem)39 void *__copy_tls(unsigned char *mem)
40 {
41 	pthread_t td;
42 	struct tls_module *p;
43 	size_t i;
44 	uintptr_t *dtv;
45 
46 #ifdef TLS_ABOVE_TP
47 	dtv = (uintptr_t*)(mem + libc.tls_size) - (libc.tls_cnt + 1);
48 
49 	mem += -((uintptr_t)mem + sizeof(struct pthread)) & (libc.tls_align-1);
50 	td = (pthread_t)mem;
51 	mem += sizeof(struct pthread);
52 
53 	for (i=1, p=libc.tls_head; p; i++, p=p->next) {
54 		dtv[i] = (uintptr_t)(mem + p->offset) + DTP_OFFSET;
55 		memcpy(mem + p->offset, p->image, p->len);
56 	}
57 #else
58 	dtv = (uintptr_t *)mem;
59 
60 	mem += libc.tls_size - sizeof(struct pthread);
61 	mem -= (uintptr_t)mem & (libc.tls_align-1);
62 	td = (pthread_t)mem;
63 
64 	for (i=1, p=libc.tls_head; p; i++, p=p->next) {
65 		dtv[i] = (uintptr_t)(mem - p->offset) + DTP_OFFSET;
66 		memcpy(mem - p->offset, p->image, p->len);
67 	}
68 #endif
69 	dtv[0] = libc.tls_cnt;
70 	td->dtv = dtv;
71 	return td;
72 }
73 
74 #if ULONG_MAX == 0xffffffff
75 typedef Elf32_Phdr Phdr;
76 #else
77 typedef Elf64_Phdr Phdr;
78 #endif
79 
80 extern weak hidden const size_t _DYNAMIC[];
81 
static_init_tls(size_t * aux)82 static void static_init_tls(size_t *aux)
83 {
84 	unsigned char *p;
85 	size_t n;
86 	Phdr *phdr, *tls_phdr=0;
87 	size_t base = 0;
88 	void *mem;
89 
90 	for (p=(void *)aux[AT_PHDR],n=aux[AT_PHNUM]; n; n--,p+=aux[AT_PHENT]) {
91 		phdr = (void *)p;
92 		if (phdr->p_type == PT_PHDR)
93 			base = aux[AT_PHDR] - phdr->p_vaddr;
94 		if (phdr->p_type == PT_DYNAMIC && _DYNAMIC)
95 			base = (size_t)_DYNAMIC - phdr->p_vaddr;
96 		if (phdr->p_type == PT_TLS)
97 			tls_phdr = phdr;
98 		if (phdr->p_type == PT_GNU_STACK &&
99 		    phdr->p_memsz > __default_stacksize)
100 			__default_stacksize =
101 				phdr->p_memsz < DEFAULT_STACK_MAX ?
102 				phdr->p_memsz : DEFAULT_STACK_MAX;
103 	}
104 
105 	if (tls_phdr) {
106 		main_tls.image = (void *)(base + tls_phdr->p_vaddr);
107 		main_tls.len = tls_phdr->p_filesz;
108 		main_tls.size = tls_phdr->p_memsz;
109 		main_tls.align = tls_phdr->p_align;
110 		libc.tls_cnt = 1;
111 		libc.tls_head = &main_tls;
112 	}
113 
114 	main_tls.size += (-main_tls.size - (uintptr_t)main_tls.image)
115 		& (main_tls.align-1);
116 #ifdef TLS_ABOVE_TP
117 	main_tls.offset = GAP_ABOVE_TP;
118 	main_tls.offset += (-GAP_ABOVE_TP + (uintptr_t)main_tls.image)
119 		& (main_tls.align-1);
120 #else
121 	main_tls.offset = main_tls.size;
122 #endif
123 	if (main_tls.align < MIN_TLS_ALIGN) main_tls.align = MIN_TLS_ALIGN;
124 
125 	libc.tls_align = main_tls.align;
126 	libc.tls_size = 2*sizeof(void *) + sizeof(struct pthread)
127 #ifdef TLS_ABOVE_TP
128 		+ main_tls.offset
129 #endif
130 		+ main_tls.size + main_tls.align
131 		+ MIN_TLS_ALIGN-1 & -MIN_TLS_ALIGN;
132 
133 	if (libc.tls_size > sizeof builtin_tls) {
134 #ifndef SYS_mmap2
135 #define SYS_mmap2 SYS_mmap
136 #endif
137 		mem = (void *)__syscall(
138 			SYS_mmap2,
139 			0, libc.tls_size, PROT_READ|PROT_WRITE,
140 			MAP_ANONYMOUS|MAP_PRIVATE, -1, 0);
141 		/* -4095...-1 cast to void * will crash on dereference anyway,
142 		 * so don't bloat the init code checking for error codes and
143 		 * explicitly calling a_crash(). */
144 	} else {
145 		mem = builtin_tls;
146 	}
147 
148 	/* Failure to initialize thread pointer is always fatal. */
149 	if (__init_tp(__copy_tls(mem)) < 0)
150 		a_crash();
151 }
152 
153 weak_alias(static_init_tls, __init_tls);
154