xref: /freebsd/lib/libc/gen/tls.c (revision e1e636193db45630c7881246d25902e57c43d24e)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause
3  *
4  * Copyright (c) 2004 Doug Rabson
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  */
28 
29 /*
30  * Define stubs for TLS internals so that programs and libraries can
31  * link. These functions will be replaced by functional versions at
32  * runtime from ld-elf.so.1.
33  */
34 
35 #include <sys/param.h>
36 #include <stdlib.h>
37 #include <string.h>
38 #include <elf.h>
39 #include <unistd.h>
40 
41 #include "rtld.h"
42 #include "libc_private.h"
43 
44 #define	tls_assert(cond)	((cond) ? (void) 0 :			\
45     (tls_msg(#cond ": assert failed: " __FILE__ ":"			\
46       __XSTRING(__LINE__) "\n"), abort()))
47 #define	tls_msg(s)		write(STDOUT_FILENO, s, strlen(s))
48 
49 /* Provided by jemalloc to avoid bootstrapping issues. */
50 void	*__je_bootstrap_malloc(size_t size);
51 void	*__je_bootstrap_calloc(size_t num, size_t size);
52 void	__je_bootstrap_free(void *ptr);
53 
54 __weak_reference(__libc_allocate_tls, _rtld_allocate_tls);
55 __weak_reference(__libc_free_tls, _rtld_free_tls);
56 
57 #ifdef __i386__
58 
59 __weak_reference(___libc_tls_get_addr, ___tls_get_addr);
60 __attribute__((__regparm__(1))) void * ___libc_tls_get_addr(void *);
61 
62 #endif
63 
64 void * __libc_tls_get_addr(void *);
65 __weak_reference(__libc_tls_get_addr, __tls_get_addr);
66 
67 void *_rtld_allocate_tls(void *oldtls, size_t tcbsize, size_t tcbalign);
68 void _rtld_free_tls(void *tls, size_t tcbsize, size_t tcbalign);
69 void *__libc_allocate_tls(void *oldtls, size_t tcbsize, size_t tcbalign);
70 void __libc_free_tls(void *tls, size_t tcbsize, size_t tcbalign);
71 
72 #ifndef PIC
73 
74 static size_t libc_tls_static_space;
75 static size_t libc_tls_init_size;
76 static size_t libc_tls_init_align;
77 static void *libc_tls_init;
78 #endif
79 
80 void *
81 __libc_tls_get_addr(void *vti)
82 {
83 	uintptr_t *dtv;
84 	tls_index *ti;
85 
86 	dtv = _tcb_get()->tcb_dtv;
87 	ti = vti;
88 	return ((char *)(dtv[ti->ti_module + 1] + ti->ti_offset) +
89 	    TLS_DTV_OFFSET);
90 }
91 
92 #ifdef __i386__
93 
94 /* GNU ABI */
95 
96 __attribute__((__regparm__(1)))
97 void *
98 ___libc_tls_get_addr(void *vti)
99 {
100 	return (__libc_tls_get_addr(vti));
101 }
102 
103 #endif
104 
105 #ifndef PIC
106 
107 static void *
108 libc_malloc_aligned(size_t size, size_t align)
109 {
110 	void *mem, *res;
111 
112 	if (align < sizeof(void *))
113 		align = sizeof(void *);
114 
115 	mem = __je_bootstrap_malloc(size + sizeof(void *) + align - 1);
116 	res = (void *)roundup2((uintptr_t)mem + sizeof(void *), align);
117 	*(void **)((uintptr_t)res - sizeof(void *)) = mem;
118 	return (res);
119 }
120 
121 static void
122 libc_free_aligned(void *ptr)
123 {
124 	void *mem;
125 	uintptr_t x;
126 
127 	if (ptr == NULL)
128 		return;
129 
130 	x = (uintptr_t)ptr;
131 	x -= sizeof(void *);
132 	mem = *(void **)x;
133 	__je_bootstrap_free(mem);
134 }
135 
136 #ifdef TLS_VARIANT_I
137 
138 /*
139  * There are two versions of variant I of TLS
140  *
141  * - ARM and aarch64 uses original variant I as is described in [1] and [2],
142  *   where TP points to start of TCB followed by aligned TLS segment.
143  *   Both TCB and TLS must be aligned to alignment of TLS section. The TCB[0]
144  *   points to DTV vector and DTV values are real addresses (without bias).
145  *   Note: for Local Exec TLS Model, the offsets from TP (TCB in this case) to
146  *   TLS variables are computed by linker, so we cannot overalign TLS section.
147  *
148  * - PowerPC and RISC-V use modified version of variant I, described in [3]
149  *   where TP points (with bias) to TLS and TCB immediately precedes TLS without
150  *   any alignment gap[4]. Only TLS should be aligned.  The TCB[0] points to DTV
151  *   vector and DTV values are biased by constant value (TLS_DTV_OFFSET) from
152  *   real addresses[5].
153  *
154  * [1] Ulrich Drepper: ELF Handling for Thread-Local Storage
155  *     www.akkadia.org/drepper/tls.pdf
156  *
157  * [2] ARM IHI 0045E: Addenda to, and Errata in, the ABI for the ARM(r)
158  *     Architecture
159  *   infocenter.arm.com/help/topic/com.arm.doc.ihi0045e/IHI0045E_ABI_addenda.pdf
160  *
161  * [3] OpenPOWER: Power Architecture 64-Bit ELF V2 ABI Specification
162  *     https://members.openpowerfoundation.org/document/dl/576
163  *
164  * [4] Its unclear if "without any alignment gap" is hard ABI requirement,
165  *     but we must follow this rule due to suboptimal _tcb_set()
166  *     (aka <ARCH>_SET_TP) implementation. This function doesn't expect TP but
167  *     TCB as argument.
168  *
169  * [5] I'm not able to validate "values are biased" assertions.
170  */
171 
172 /*
173  * Return pointer to allocated TLS block
174  */
175 static void *
176 get_tls_block_ptr(void *tcb, size_t tcbsize)
177 {
178 	size_t extra_size, post_size, pre_size, tls_block_size;
179 
180 	/* Compute fragments sizes. */
181 	extra_size = tcbsize - TLS_TCB_SIZE;
182 #if defined(__aarch64__) || defined(__arm__)
183 	post_size =  roundup2(TLS_TCB_SIZE, libc_tls_init_align) - TLS_TCB_SIZE;
184 #else
185 	post_size = 0;
186 #endif
187 	tls_block_size = tcbsize + post_size;
188 	pre_size = roundup2(tls_block_size, libc_tls_init_align) -
189 	    tls_block_size;
190 
191 	return ((char *)tcb - pre_size - extra_size);
192 }
193 
194 /*
195  * Free Static TLS using the Variant I method. The tcbsize
196  * and tcbalign parameters must be the same as those used to allocate
197  * the block.
198  */
199 void
200 __libc_free_tls(void *tcb, size_t tcbsize, size_t tcbalign __unused)
201 {
202 	Elf_Addr *dtv;
203 	Elf_Addr **tls;
204 
205 	tls = (Elf_Addr **)tcb;
206 	dtv = tls[0];
207 	__je_bootstrap_free(dtv);
208 	libc_free_aligned(get_tls_block_ptr(tcb, tcbsize));
209 }
210 
211 /*
212  * Allocate Static TLS using the Variant I method.
213  *
214  * To handle all above requirements, we setup the following layout for
215  * TLS block:
216  * (whole memory block is aligned with MAX(TLS_TCB_ALIGN, tls_init_align))
217  *
218  * +----------+--------------+--------------+-----------+------------------+
219  * | pre gap  | extended TCB |     TCB      | post gap  |    TLS segment   |
220  * | pre_size |  extra_size  | TLS_TCB_SIZE | post_size | tls_static_space |
221  * +----------+--------------+--------------+-----------+------------------+
222  *
223  * where:
224  *  extra_size is tcbsize - TLS_TCB_SIZE
225  *  post_size is used to adjust TCB to TLS alignment for first version of TLS
226  *            layout and is always 0 for second version.
227  *  pre_size  is used to adjust TCB alignment for first version and to adjust
228  *            TLS alignment for second version.
229  *
230  */
231 void *
232 __libc_allocate_tls(void *oldtcb, size_t tcbsize, size_t tcbalign)
233 {
234 	Elf_Addr *dtv, **tcb;
235 	char *tls_block, *tls;
236 	size_t extra_size, maxalign, post_size, pre_size, tls_block_size;
237 
238 	if (oldtcb != NULL && tcbsize == TLS_TCB_SIZE)
239 		return (oldtcb);
240 
241 	tls_assert(tcbalign >= TLS_TCB_ALIGN);
242 	maxalign = MAX(tcbalign, libc_tls_init_align);
243 
244 	/* Compute fragmets sizes. */
245 	extra_size = tcbsize - TLS_TCB_SIZE;
246 #if defined(__aarch64__) || defined(__arm__)
247 	post_size = roundup2(TLS_TCB_SIZE, libc_tls_init_align) - TLS_TCB_SIZE;
248 #else
249 	post_size = 0;
250 #endif
251 	tls_block_size = tcbsize + post_size;
252 	pre_size = roundup2(tls_block_size, libc_tls_init_align) -
253 	    tls_block_size;
254 	tls_block_size += pre_size + libc_tls_static_space;
255 
256 	/* Allocate whole TLS block */
257 	tls_block = libc_malloc_aligned(tls_block_size, maxalign);
258 	if (tls_block == NULL) {
259 		tls_msg("__libc_allocate_tls: Out of memory.\n");
260 		abort();
261 	}
262 	memset(tls_block, 0, tls_block_size);
263 	tcb = (Elf_Addr **)(tls_block + pre_size + extra_size);
264 	tls = (char *)tcb + TLS_TCB_SIZE + post_size;
265 
266 	if (oldtcb != NULL) {
267 		memcpy(tls_block, get_tls_block_ptr(oldtcb, tcbsize),
268 		    tls_block_size);
269 		libc_free_aligned(oldtcb);
270 
271 		/* Adjust the DTV. */
272 		dtv = tcb[0];
273 		dtv[2] = (Elf_Addr)(tls + TLS_DTV_OFFSET);
274 	} else {
275 		dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
276 		if (dtv == NULL) {
277 			tls_msg("__libc_allocate_tls: Out of memory.\n");
278 			abort();
279 		}
280 		/* Build the DTV. */
281 		tcb[0] = dtv;
282 		dtv[0] = 1;		/* Generation. */
283 		dtv[1] = 1;		/* Segments count. */
284 		dtv[2] = (Elf_Addr)(tls + TLS_DTV_OFFSET);
285 
286 		if (libc_tls_init_size > 0)
287 			memcpy(tls, libc_tls_init, libc_tls_init_size);
288 	}
289 
290 	return (tcb);
291 }
292 
293 #endif
294 
295 #ifdef TLS_VARIANT_II
296 
297 /*
298  * Free Static TLS using the Variant II method.
299  */
300 void
301 __libc_free_tls(void *tcb, size_t tcbsize __unused, size_t tcbalign)
302 {
303 	size_t size;
304 	Elf_Addr* dtv;
305 	Elf_Addr tlsstart, tlsend;
306 
307 	/*
308 	 * Figure out the size of the initial TLS block so that we can
309 	 * find stuff which ___tls_get_addr() allocated dynamically.
310 	 */
311 	tcbalign = MAX(tcbalign, libc_tls_init_align);
312 	size = roundup2(libc_tls_static_space, tcbalign);
313 
314 	dtv = ((Elf_Addr**)tcb)[1];
315 	tlsend = (Elf_Addr) tcb;
316 	tlsstart = tlsend - size;
317 	libc_free_aligned((void*)tlsstart);
318 	__je_bootstrap_free(dtv);
319 }
320 
321 /*
322  * Allocate Static TLS using the Variant II method.
323  */
324 void *
325 __libc_allocate_tls(void *oldtls, size_t tcbsize, size_t tcbalign)
326 {
327 	size_t size;
328 	char *tls;
329 	Elf_Addr *dtv;
330 	Elf_Addr segbase, oldsegbase;
331 
332 	tcbalign = MAX(tcbalign, libc_tls_init_align);
333 	size = roundup2(libc_tls_static_space, tcbalign);
334 
335 	if (tcbsize < 2 * sizeof(Elf_Addr))
336 		tcbsize = 2 * sizeof(Elf_Addr);
337 	tls = libc_malloc_aligned(size + tcbsize, tcbalign);
338 	if (tls == NULL) {
339 		tls_msg("__libc_allocate_tls: Out of memory.\n");
340 		abort();
341 	}
342 	memset(tls, 0, size + tcbsize);
343 	dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
344 	if (dtv == NULL) {
345 		tls_msg("__libc_allocate_tls: Out of memory.\n");
346 		abort();
347 	}
348 
349 	segbase = (Elf_Addr)(tls + size);
350 	((Elf_Addr*)segbase)[0] = segbase;
351 	((Elf_Addr*)segbase)[1] = (Elf_Addr) dtv;
352 
353 	dtv[0] = 1;
354 	dtv[1] = 1;
355 	dtv[2] = segbase - libc_tls_static_space;
356 
357 	if (oldtls) {
358 		/*
359 		 * Copy the static TLS block over whole.
360 		 */
361 		oldsegbase = (Elf_Addr) oldtls;
362 		memcpy((void *)(segbase - libc_tls_static_space),
363 		    (const void *)(oldsegbase - libc_tls_static_space),
364 		    libc_tls_static_space);
365 
366 		/*
367 		 * We assume that this block was the one we created with
368 		 * allocate_initial_tls().
369 		 */
370 		_rtld_free_tls(oldtls, 2*sizeof(Elf_Addr), sizeof(Elf_Addr));
371 	} else {
372 		memcpy((void *)(segbase - libc_tls_static_space),
373 		    libc_tls_init, libc_tls_init_size);
374 		memset((void *)(segbase - libc_tls_static_space +
375 		    libc_tls_init_size), 0,
376 		    libc_tls_static_space - libc_tls_init_size);
377 	}
378 
379 	return (void*) segbase;
380 }
381 
382 #endif /* TLS_VARIANT_II */
383 
384 #else
385 
386 void *
387 __libc_allocate_tls(void *oldtls __unused, size_t tcbsize __unused,
388 	size_t tcbalign __unused)
389 {
390 	return (0);
391 }
392 
393 void
394 __libc_free_tls(void *tcb __unused, size_t tcbsize __unused,
395 	size_t tcbalign __unused)
396 {
397 }
398 
399 #endif /* PIC */
400 
401 void
402 _init_tls(void)
403 {
404 #ifndef PIC
405 	Elf_Addr *sp;
406 	Elf_Auxinfo *aux, *auxp;
407 	Elf_Phdr *phdr;
408 	size_t phent, phnum;
409 	int i;
410 	void *tls;
411 
412 	sp = (Elf_Addr *) environ;
413 	while (*sp++ != 0)
414 		;
415 	aux = (Elf_Auxinfo *) sp;
416 	phdr = NULL;
417 	phent = phnum = 0;
418 	for (auxp = aux; auxp->a_type != AT_NULL; auxp++) {
419 		switch (auxp->a_type) {
420 		case AT_PHDR:
421 			phdr = auxp->a_un.a_ptr;
422 			break;
423 
424 		case AT_PHENT:
425 			phent = auxp->a_un.a_val;
426 			break;
427 
428 		case AT_PHNUM:
429 			phnum = auxp->a_un.a_val;
430 			break;
431 		}
432 	}
433 	if (phdr == NULL || phent != sizeof(Elf_Phdr) || phnum == 0)
434 		return;
435 
436 	for (i = 0; (unsigned) i < phnum; i++) {
437 		if (phdr[i].p_type == PT_TLS) {
438 			libc_tls_static_space = roundup2(phdr[i].p_memsz,
439 			    phdr[i].p_align);
440 			libc_tls_init_size = phdr[i].p_filesz;
441 			libc_tls_init_align = phdr[i].p_align;
442 			libc_tls_init = (void *)phdr[i].p_vaddr;
443 			break;
444 		}
445 	}
446 	tls = _rtld_allocate_tls(NULL, TLS_TCB_SIZE, TLS_TCB_ALIGN);
447 
448 	_tcb_set(tls);
449 #endif
450 }
451