tls.c (75dfc66c1b2b44609e5a7c3e1d6a751be4922689) tls.c (7f7489eba391a858b3930a34e7749d642b374c5c)
1/*-
2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3 *
4 * Copyright (c) 2004 Doug Rabson
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions

--- 27 unchanged lines hidden (view full) ---

36
37#include <sys/cdefs.h>
38#include <sys/param.h>
39#include <stdlib.h>
40#include <string.h>
41#include <elf.h>
42#include <unistd.h>
43
1/*-
2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3 *
4 * Copyright (c) 2004 Doug Rabson
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions

--- 27 unchanged lines hidden (view full) ---

36
37#include <sys/cdefs.h>
38#include <sys/param.h>
39#include <stdlib.h>
40#include <string.h>
41#include <elf.h>
42#include <unistd.h>
43
44#include "rtld.h"
44#include "libc_private.h"
45
46#define tls_assert(cond) ((cond) ? (void) 0 : \
47 (tls_msg(#cond ": assert failed: " __FILE__ ":" \
48 __XSTRING(__LINE__) "\n"), abort()))
49#define tls_msg(s) write(STDOUT_FILENO, s, strlen(s))
50
51/* Provided by jemalloc to avoid bootstrapping issues. */

--- 39 unchanged lines hidden (view full) ---

91#if defined(__mips__) || defined(__powerpc__) || defined(__riscv)
92#define DTV_OFFSET 0x8000
93#else
94#define DTV_OFFSET 0
95#endif
96
97#ifndef PIC
98
45#include "libc_private.h"
46
47#define tls_assert(cond) ((cond) ? (void) 0 : \
48 (tls_msg(#cond ": assert failed: " __FILE__ ":" \
49 __XSTRING(__LINE__) "\n"), abort()))
50#define tls_msg(s) write(STDOUT_FILENO, s, strlen(s))
51
52/* Provided by jemalloc to avoid bootstrapping issues. */

--- 39 unchanged lines hidden (view full) ---

92#if defined(__mips__) || defined(__powerpc__) || defined(__riscv)
93#define DTV_OFFSET 0x8000
94#else
95#define DTV_OFFSET 0
96#endif
97
98#ifndef PIC
99
99static size_t tls_static_space;
100static size_t tls_init_size;
101static size_t tls_init_align;
102static void *tls_init;
100static size_t libc_tls_static_space;
101static size_t libc_tls_init_size;
102static size_t libc_tls_init_align;
103static void *libc_tls_init;
103#endif
104
105#ifdef __i386__
106
107/* GNU ABI */
108
109__attribute__((__regparm__(1)))
110void *

--- 8 unchanged lines hidden (view full) ---

119__libc_tls_get_addr(void *ti __unused)
120{
121 return (0);
122}
123
124#ifndef PIC
125
126static void *
104#endif
105
106#ifdef __i386__
107
108/* GNU ABI */
109
110__attribute__((__regparm__(1)))
111void *

--- 8 unchanged lines hidden (view full) ---

120__libc_tls_get_addr(void *ti __unused)
121{
122 return (0);
123}
124
125#ifndef PIC
126
127static void *
127malloc_aligned(size_t size, size_t align)
128libc_malloc_aligned(size_t size, size_t align)
128{
129 void *mem, *res;
130
131 if (align < sizeof(void *))
132 align = sizeof(void *);
133
134 mem = __je_bootstrap_malloc(size + sizeof(void *) + align - 1);
135 res = (void *)roundup2((uintptr_t)mem + sizeof(void *), align);
136 *(void **)((uintptr_t)res - sizeof(void *)) = mem;
137 return (res);
138}
139
140static void
129{
130 void *mem, *res;
131
132 if (align < sizeof(void *))
133 align = sizeof(void *);
134
135 mem = __je_bootstrap_malloc(size + sizeof(void *) + align - 1);
136 res = (void *)roundup2((uintptr_t)mem + sizeof(void *), align);
137 *(void **)((uintptr_t)res - sizeof(void *)) = mem;
138 return (res);
139}
140
141static void
141free_aligned(void *ptr)
142libc_free_aligned(void *ptr)
142{
143 void *mem;
144 uintptr_t x;
145
146 if (ptr == NULL)
147 return;
148
149 x = (uintptr_t)ptr;

--- 33 unchanged lines hidden (view full) ---

183 * [4] Its unclear if "without any alignment gap" is hard ABI requirement,
184 * but we must follow this rule due to suboptimal _set_tp()
185 * (aka <ARCH>_SET_TP) implementation. This function doesn't expect TP but
186 * TCB as argument.
187 *
188 * [5] I'm not able to validate "values are biased" assertions.
189 */
190
143{
144 void *mem;
145 uintptr_t x;
146
147 if (ptr == NULL)
148 return;
149
150 x = (uintptr_t)ptr;

--- 33 unchanged lines hidden (view full) ---

184 * [4] Its unclear if "without any alignment gap" is hard ABI requirement,
185 * but we must follow this rule due to suboptimal _set_tp()
186 * (aka <ARCH>_SET_TP) implementation. This function doesn't expect TP but
187 * TCB as argument.
188 *
189 * [5] I'm not able to validate "values are biased" assertions.
190 */
191
191#define TLS_TCB_SIZE (2 * sizeof(void *))
192
193/*
194 * Return pointer to allocated TLS block
195 */
196static void *
197get_tls_block_ptr(void *tcb, size_t tcbsize)
198{
199 size_t extra_size, post_size, pre_size, tls_block_size;
200
201 /* Compute fragments sizes. */
202 extra_size = tcbsize - TLS_TCB_SIZE;
203#if defined(__aarch64__) || defined(__arm__)
192/*
193 * Return pointer to allocated TLS block
194 */
195static void *
196get_tls_block_ptr(void *tcb, size_t tcbsize)
197{
198 size_t extra_size, post_size, pre_size, tls_block_size;
199
200 /* Compute fragments sizes. */
201 extra_size = tcbsize - TLS_TCB_SIZE;
202#if defined(__aarch64__) || defined(__arm__)
204 post_size = roundup2(TLS_TCB_SIZE, tls_init_align) - TLS_TCB_SIZE;
203 post_size = roundup2(TLS_TCB_SIZE, libc_tls_init_align) - TLS_TCB_SIZE;
205#else
206 post_size = 0;
207#endif
208 tls_block_size = tcbsize + post_size;
204#else
205 post_size = 0;
206#endif
207 tls_block_size = tcbsize + post_size;
209 pre_size = roundup2(tls_block_size, tls_init_align) - tls_block_size;
208 pre_size = roundup2(tls_block_size, libc_tls_init_align) -
209 tls_block_size;
210
211 return ((char *)tcb - pre_size - extra_size);
212}
213
214/*
215 * Free Static TLS using the Variant I method. The tcbsize
216 * and tcbalign parameters must be the same as those used to allocate
217 * the block.
218 */
219void
220__libc_free_tls(void *tcb, size_t tcbsize, size_t tcbalign __unused)
221{
222 Elf_Addr *dtv;
223 Elf_Addr **tls;
224
225 tls = (Elf_Addr **)tcb;
226 dtv = tls[0];
227 __je_bootstrap_free(dtv);
210
211 return ((char *)tcb - pre_size - extra_size);
212}
213
214/*
215 * Free Static TLS using the Variant I method. The tcbsize
216 * and tcbalign parameters must be the same as those used to allocate
217 * the block.
218 */
219void
220__libc_free_tls(void *tcb, size_t tcbsize, size_t tcbalign __unused)
221{
222 Elf_Addr *dtv;
223 Elf_Addr **tls;
224
225 tls = (Elf_Addr **)tcb;
226 dtv = tls[0];
227 __je_bootstrap_free(dtv);
228 free_aligned(get_tls_block_ptr(tcb, tcbsize));
228 libc_free_aligned(get_tls_block_ptr(tcb, tcbsize));
229}
230
231/*
232 * Allocate Static TLS using the Variant I method.
233 *
234 * To handle all above requirements, we setup the following layout for
235 * TLS block:
236 * (whole memory block is aligned with MAX(TLS_TCB_ALIGN, tls_init_align))

--- 17 unchanged lines hidden (view full) ---

254 Elf_Addr *dtv, **tcb;
255 char *tls_block, *tls;
256 size_t extra_size, maxalign, post_size, pre_size, tls_block_size;
257
258 if (oldtcb != NULL && tcbsize == TLS_TCB_SIZE)
259 return (oldtcb);
260
261 tls_assert(tcbalign >= TLS_TCB_ALIGN);
229}
230
231/*
232 * Allocate Static TLS using the Variant I method.
233 *
234 * To handle all above requirements, we setup the following layout for
235 * TLS block:
236 * (whole memory block is aligned with MAX(TLS_TCB_ALIGN, tls_init_align))

--- 17 unchanged lines hidden (view full) ---

254 Elf_Addr *dtv, **tcb;
255 char *tls_block, *tls;
256 size_t extra_size, maxalign, post_size, pre_size, tls_block_size;
257
258 if (oldtcb != NULL && tcbsize == TLS_TCB_SIZE)
259 return (oldtcb);
260
261 tls_assert(tcbalign >= TLS_TCB_ALIGN);
262 maxalign = MAX(tcbalign, tls_init_align);
262 maxalign = MAX(tcbalign, libc_tls_init_align);
263
264 /* Compute fragmets sizes. */
265 extra_size = tcbsize - TLS_TCB_SIZE;
266#if defined(__aarch64__) || defined(__arm__)
263
264 /* Compute fragmets sizes. */
265 extra_size = tcbsize - TLS_TCB_SIZE;
266#if defined(__aarch64__) || defined(__arm__)
267 post_size = roundup2(TLS_TCB_SIZE, tls_init_align) - TLS_TCB_SIZE;
267 post_size = roundup2(TLS_TCB_SIZE, libc_tls_init_align) - TLS_TCB_SIZE;
268#else
269 post_size = 0;
270#endif
271 tls_block_size = tcbsize + post_size;
268#else
269 post_size = 0;
270#endif
271 tls_block_size = tcbsize + post_size;
272 pre_size = roundup2(tls_block_size, tls_init_align) - tls_block_size;
273 tls_block_size += pre_size + tls_static_space;
272 pre_size = roundup2(tls_block_size, libc_tls_init_align) -
273 tls_block_size;
274 tls_block_size += pre_size + libc_tls_static_space;
274
275 /* Allocate whole TLS block */
275
276 /* Allocate whole TLS block */
276 tls_block = malloc_aligned(tls_block_size, maxalign);
277 tls_block = libc_malloc_aligned(tls_block_size, maxalign);
277 if (tls_block == NULL) {
278 tls_msg("__libc_allocate_tls: Out of memory.\n");
279 abort();
280 }
281 memset(tls_block, 0, tls_block_size);
282 tcb = (Elf_Addr **)(tls_block + pre_size + extra_size);
283 tls = (char *)tcb + TLS_TCB_SIZE + post_size;
284
285 if (oldtcb != NULL) {
286 memcpy(tls_block, get_tls_block_ptr(oldtcb, tcbsize),
287 tls_block_size);
278 if (tls_block == NULL) {
279 tls_msg("__libc_allocate_tls: Out of memory.\n");
280 abort();
281 }
282 memset(tls_block, 0, tls_block_size);
283 tcb = (Elf_Addr **)(tls_block + pre_size + extra_size);
284 tls = (char *)tcb + TLS_TCB_SIZE + post_size;
285
286 if (oldtcb != NULL) {
287 memcpy(tls_block, get_tls_block_ptr(oldtcb, tcbsize),
288 tls_block_size);
288 free_aligned(oldtcb);
289 libc_free_aligned(oldtcb);
289
290 /* Adjust the DTV. */
291 dtv = tcb[0];
292 dtv[2] = (Elf_Addr)(tls + DTV_OFFSET);
293 } else {
294 dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
295 if (dtv == NULL) {
296 tls_msg("__libc_allocate_tls: Out of memory.\n");
297 abort();
298 }
299 /* Build the DTV. */
300 tcb[0] = dtv;
301 dtv[0] = 1; /* Generation. */
302 dtv[1] = 1; /* Segments count. */
303 dtv[2] = (Elf_Addr)(tls + DTV_OFFSET);
304
290
291 /* Adjust the DTV. */
292 dtv = tcb[0];
293 dtv[2] = (Elf_Addr)(tls + DTV_OFFSET);
294 } else {
295 dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
296 if (dtv == NULL) {
297 tls_msg("__libc_allocate_tls: Out of memory.\n");
298 abort();
299 }
300 /* Build the DTV. */
301 tcb[0] = dtv;
302 dtv[0] = 1; /* Generation. */
303 dtv[1] = 1; /* Segments count. */
304 dtv[2] = (Elf_Addr)(tls + DTV_OFFSET);
305
305 if (tls_init_size > 0)
306 memcpy(tls, tls_init, tls_init_size);
306 if (libc_tls_init_size > 0)
307 memcpy(tls, libc_tls_init, libc_tls_init_size);
307 }
308
309 return (tcb);
310}
311
312#endif
313
314#ifdef TLS_VARIANT_II

--- 9 unchanged lines hidden (view full) ---

324 size_t size;
325 Elf_Addr* dtv;
326 Elf_Addr tlsstart, tlsend;
327
328 /*
329 * Figure out the size of the initial TLS block so that we can
330 * find stuff which ___tls_get_addr() allocated dynamically.
331 */
308 }
309
310 return (tcb);
311}
312
313#endif
314
315#ifdef TLS_VARIANT_II

--- 9 unchanged lines hidden (view full) ---

325 size_t size;
326 Elf_Addr* dtv;
327 Elf_Addr tlsstart, tlsend;
328
329 /*
330 * Figure out the size of the initial TLS block so that we can
331 * find stuff which ___tls_get_addr() allocated dynamically.
332 */
332 tcbalign = MAX(tcbalign, tls_init_align);
333 size = roundup2(tls_static_space, tcbalign);
333 tcbalign = MAX(tcbalign, libc_tls_init_align);
334 size = roundup2(libc_tls_static_space, tcbalign);
334
335 dtv = ((Elf_Addr**)tcb)[1];
336 tlsend = (Elf_Addr) tcb;
337 tlsstart = tlsend - size;
335
336 dtv = ((Elf_Addr**)tcb)[1];
337 tlsend = (Elf_Addr) tcb;
338 tlsstart = tlsend - size;
338 free_aligned((void*)tlsstart);
339 libc_free_aligned((void*)tlsstart);
339 __je_bootstrap_free(dtv);
340}
341
342/*
343 * Allocate Static TLS using the Variant II method.
344 */
345void *
346__libc_allocate_tls(void *oldtls, size_t tcbsize, size_t tcbalign)
347{
348 size_t size;
349 char *tls;
350 Elf_Addr *dtv;
351 Elf_Addr segbase, oldsegbase;
352
340 __je_bootstrap_free(dtv);
341}
342
343/*
344 * Allocate Static TLS using the Variant II method.
345 */
346void *
347__libc_allocate_tls(void *oldtls, size_t tcbsize, size_t tcbalign)
348{
349 size_t size;
350 char *tls;
351 Elf_Addr *dtv;
352 Elf_Addr segbase, oldsegbase;
353
353 tcbalign = MAX(tcbalign, tls_init_align);
354 size = roundup2(tls_static_space, tcbalign);
354 tcbalign = MAX(tcbalign, libc_tls_init_align);
355 size = roundup2(libc_tls_static_space, tcbalign);
355
356 if (tcbsize < 2 * sizeof(Elf_Addr))
357 tcbsize = 2 * sizeof(Elf_Addr);
356
357 if (tcbsize < 2 * sizeof(Elf_Addr))
358 tcbsize = 2 * sizeof(Elf_Addr);
358 tls = malloc_aligned(size + tcbsize, tcbalign);
359 tls = libc_malloc_aligned(size + tcbsize, tcbalign);
359 if (tls == NULL) {
360 tls_msg("__libc_allocate_tls: Out of memory.\n");
361 abort();
362 }
363 memset(tls, 0, size + tcbsize);
364 dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
365 if (dtv == NULL) {
366 tls_msg("__libc_allocate_tls: Out of memory.\n");
367 abort();
368 }
369
370 segbase = (Elf_Addr)(tls + size);
371 ((Elf_Addr*)segbase)[0] = segbase;
372 ((Elf_Addr*)segbase)[1] = (Elf_Addr) dtv;
373
374 dtv[0] = 1;
375 dtv[1] = 1;
360 if (tls == NULL) {
361 tls_msg("__libc_allocate_tls: Out of memory.\n");
362 abort();
363 }
364 memset(tls, 0, size + tcbsize);
365 dtv = __je_bootstrap_malloc(3 * sizeof(Elf_Addr));
366 if (dtv == NULL) {
367 tls_msg("__libc_allocate_tls: Out of memory.\n");
368 abort();
369 }
370
371 segbase = (Elf_Addr)(tls + size);
372 ((Elf_Addr*)segbase)[0] = segbase;
373 ((Elf_Addr*)segbase)[1] = (Elf_Addr) dtv;
374
375 dtv[0] = 1;
376 dtv[1] = 1;
376 dtv[2] = segbase - tls_static_space;
377 dtv[2] = segbase - libc_tls_static_space;
377
378 if (oldtls) {
379 /*
380 * Copy the static TLS block over whole.
381 */
382 oldsegbase = (Elf_Addr) oldtls;
378
379 if (oldtls) {
380 /*
381 * Copy the static TLS block over whole.
382 */
383 oldsegbase = (Elf_Addr) oldtls;
383 memcpy((void *)(segbase - tls_static_space),
384 (const void *)(oldsegbase - tls_static_space),
385 tls_static_space);
384 memcpy((void *)(segbase - libc_tls_static_space),
385 (const void *)(oldsegbase - libc_tls_static_space),
386 libc_tls_static_space);
386
387 /*
388 * We assume that this block was the one we created with
389 * allocate_initial_tls().
390 */
391 _rtld_free_tls(oldtls, 2*sizeof(Elf_Addr), sizeof(Elf_Addr));
392 } else {
387
388 /*
389 * We assume that this block was the one we created with
390 * allocate_initial_tls().
391 */
392 _rtld_free_tls(oldtls, 2*sizeof(Elf_Addr), sizeof(Elf_Addr));
393 } else {
393 memcpy((void *)(segbase - tls_static_space),
394 tls_init, tls_init_size);
395 memset((void *)(segbase - tls_static_space + tls_init_size),
396 0, tls_static_space - tls_init_size);
394 memcpy((void *)(segbase - libc_tls_static_space),
395 libc_tls_init, libc_tls_init_size);
396 memset((void *)(segbase - libc_tls_static_space +
397 libc_tls_init_size), 0,
398 libc_tls_static_space - libc_tls_init_size);
397 }
398
399 return (void*) segbase;
400}
401
402#endif /* TLS_VARIANT_II */
403
404#else

--- 47 unchanged lines hidden (view full) ---

452 break;
453 }
454 }
455 if (phdr == NULL || phent != sizeof(Elf_Phdr) || phnum == 0)
456 return;
457
458 for (i = 0; (unsigned) i < phnum; i++) {
459 if (phdr[i].p_type == PT_TLS) {
399 }
400
401 return (void*) segbase;
402}
403
404#endif /* TLS_VARIANT_II */
405
406#else

--- 47 unchanged lines hidden (view full) ---

454 break;
455 }
456 }
457 if (phdr == NULL || phent != sizeof(Elf_Phdr) || phnum == 0)
458 return;
459
460 for (i = 0; (unsigned) i < phnum; i++) {
461 if (phdr[i].p_type == PT_TLS) {
460 tls_static_space = roundup2(phdr[i].p_memsz,
462 libc_tls_static_space = roundup2(phdr[i].p_memsz,
461 phdr[i].p_align);
463 phdr[i].p_align);
462 tls_init_size = phdr[i].p_filesz;
463 tls_init_align = phdr[i].p_align;
464 tls_init = (void*) phdr[i].p_vaddr;
464 libc_tls_init_size = phdr[i].p_filesz;
465 libc_tls_init_align = phdr[i].p_align;
466 libc_tls_init = (void *)phdr[i].p_vaddr;
465 break;
466 }
467 }
468 tls = _rtld_allocate_tls(NULL, TLS_TCB_SIZE, TLS_TCB_ALIGN);
469
470 _set_tp(tls);
471#endif
472}
467 break;
468 }
469 }
470 tls = _rtld_allocate_tls(NULL, TLS_TCB_SIZE, TLS_TCB_ALIGN);
471
472 _set_tp(tls);
473#endif
474}