akaros/tools/compilers/gcc-glibc/glibc-2.19-akaros/sysdeps/akaros/x86_64/tls.h
<<
>>
Prefs
   1/* Definition for thread-local data handling.  nptl/x86_64 version.
   2   Copyright (C) 2002-2014 Free Software Foundation, Inc.
   3   This file is part of the GNU C Library.
   4
   5   The GNU C Library is free software; you can redistribute it and/or
   6   modify it under the terms of the GNU Lesser General Public
   7   License as published by the Free Software Foundation; either
   8   version 2.1 of the License, or (at your option) any later version.
   9
  10   The GNU C Library is distributed in the hope that it will be useful,
  11   but WITHOUT ANY WARRANTY; without even the implied warranty of
  12   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  13   Lesser General Public License for more details.
  14
  15   You should have received a copy of the GNU Lesser General Public
  16   License along with the GNU C Library; if not, see
  17   <http://www.gnu.org/licenses/>.  */
  18
  19#ifndef _TLS_H
  20#define _TLS_H  1
  21
  22#ifndef __ASSEMBLER__
  23# include <stdbool.h>
  24# include <stddef.h>
  25# include <stdint.h>
  26# include <stdlib.h>
  27# include <sysdep.h>
  28# include <libc-internal.h>
  29# include <kernel-features.h>
  30#include <sys/mman.h>
  31#include <sys/syscall.h>
  32#include <ros/procinfo.h>
  33#include <ros/procdata.h>
  34#include <ros/arch/mmu.h>
  35#include <parlib/cpu_feat.h>
  36
  37/* Replacement type for __m128 since this file is included by ld.so,
  38   which is compiled with -mno-sse.  It must not change the alignment
  39   of rtld_savespace_sse.  */
  40typedef struct
  41{
  42  int i[4];
  43} __tlsh128bits;
  44
  45
  46/* Type for the dtv.  */
  47typedef union dtv
  48{
  49  size_t counter;
  50  struct
  51  {
  52    void *val;
  53    bool is_static;
  54  } pointer;
  55} dtv_t;
  56
  57
  58typedef struct
  59{
  60  void *tcb;            /* Pointer to the TCB.  Not necessarily the
  61                           thread descriptor used by libpthread.  */
  62  dtv_t *dtv;
  63  void *self;           /* Pointer to the thread descriptor.  */
  64  int multiple_threads;
  65  int gscope_flag;
  66  uintptr_t sysinfo;
  67  uintptr_t stack_guard;
  68  uintptr_t pointer_guard;
  69  unsigned long int vgetcpu_cache[2];
  70# ifndef __ASSUME_PRIVATE_FUTEX
  71  int private_futex;
  72# else
  73  int __glibc_reserved1;
  74# endif
  75  int rtld_must_xmm_save;
  76  /* Reservation of some values for the TM ABI.  */
  77  void *__private_tm[4];
  78  /* GCC split stack support.  */
  79  void *__private_ss;
  80  long int __glibc_reserved2;
  81  /* Have space for the post-AVX register size.  */
  82  __tlsh128bits rtld_savespace_sse[8][4] __attribute__ ((aligned (32)));
  83
  84  void *__padding[8];
  85} tcbhead_t;
  86
  87# define TLS_MULTIPLE_THREADS_IN_TCB 1
  88
  89typedef struct rthread {
  90    tcbhead_t header;
  91} rthread_t;
  92
  93#else /* __ASSEMBLER__ */
  94# include <tcb-offsets.h>
  95#endif
  96
  97
  98/* Alignment requirement for the stack.  */
  99#define STACK_ALIGN     16
 100
 101
 102#ifndef __ASSEMBLER__
 103/* Get system call information.  */
 104# include <sysdep.h>
 105
 106
 107/* Get the thread descriptor definition.  */
 108//# include <nptl/descr.h>
 109
 110#ifndef LOCK_PREFIX
 111# ifdef UP
 112#  define LOCK_PREFIX   /* nothing */
 113# else
 114#  define LOCK_PREFIX   "lock;"
 115# endif
 116#endif
 117
 118/* This is the size of the initial TCB.  Can't be just sizeof (tcbhead_t),
 119   because NPTL getpid, __libc_alloca_cutoff etc. need (almost) the whole
 120   struct rthread even when not linked with -lpthread.  */
 121# define TLS_INIT_TCB_SIZE sizeof (struct rthread)
 122
 123/* Alignment requirements for the initial TCB.  */
 124# define TLS_INIT_TCB_ALIGN __alignof__ (struct rthread)
 125
 126/* This is the size of the TCB.  */
 127# define TLS_TCB_SIZE sizeof (struct rthread)
 128
 129/* Alignment requirements for the TCB.  */
 130# define TLS_TCB_ALIGN __alignof__ (struct rthread)
 131
 132/* The TCB can have any size and the memory following the address the
 133   thread pointer points to is unspecified.  Allocate the TCB there.  */
 134# define TLS_TCB_AT_TP  1
 135
 136
 137/* Install the dtv pointer.  The pointer passed is to the element with
 138   index -1 which contain the length.  */
 139# define INSTALL_DTV(descr, dtvp) \
 140  ((tcbhead_t *) (descr))->dtv = (dtvp) + 1
 141
 142/* Install new dtv for current thread.  */
 143# define INSTALL_NEW_DTV(dtvp) \
 144  ({ struct rthread *__pd;                                                    \
 145     THREAD_SETMEM (__pd, header.dtv, (dtvp)); })
 146
 147/* Return dtv of given thread descriptor.  */
 148# define GET_DTV(descr) \
 149  (((tcbhead_t *) (descr))->dtv)
 150
 151
 152/* Code to initially initialize the thread pointer.  This might need
 153   special attention since 'errno' is not yet available and if the
 154   operation can cause a failure 'errno' must not be touched.  */
 155# define TLS_INIT_TP(thrdescr, secondcall) tls_init_tp(thrdescr)
 156
 157/* Return the address of the dtv for the current thread.  */
 158# define THREAD_DTV() \
 159  ({ struct rthread *__pd;                                                    \
 160     THREAD_GETMEM (__pd, header.dtv); })
 161
 162/* Return the thread descriptor for the current thread.
 163
 164   The contained asm must *not* be marked volatile since otherwise
 165   assignments like
 166        pthread_descr self = thread_self();
 167   do not get optimized away.  */
 168# define THREAD_SELF \
 169  ({ struct rthread *__self;                                                  \
 170     asm ("movq %%fs:%c1,%q0" : "=r" (__self)                                 \
 171          : "i" (offsetof (struct rthread, header.self)));                    \
 172     __self;})
 173
 174/* Magic for libthread_db to know how to do THREAD_SELF.  */
 175# define DB_THREAD_SELF_INCLUDE  <sys/reg.h> /* For the FS constant.  */
 176# define DB_THREAD_SELF CONST_THREAD_AREA (64, FS)
 177
 178
 179/* Read member of the thread descriptor directly.  */
 180# define THREAD_GETMEM(descr, member) \
 181  ({ __typeof (descr->member) __value;                                        \
 182     if (sizeof (__value) == 1)                                               \
 183       asm volatile ("movb %%fs:%P2,%b0"                                      \
 184                     : "=q" (__value)                                         \
 185                     : "0" (0), "i" (offsetof (struct rthread, member)));     \
 186     else if (sizeof (__value) == 4)                                          \
 187       asm volatile ("movl %%fs:%P1,%0"                                       \
 188                     : "=r" (__value)                                         \
 189                     : "i" (offsetof (struct rthread, member)));              \
 190     else                                                                     \
 191       {                                                                      \
 192         if (sizeof (__value) != 8)                                           \
 193           /* There should not be any value with a size other than 1,         \
 194              4 or 8.  */                                                     \
 195           abort ();                                                          \
 196                                                                              \
 197         asm volatile ("movq %%fs:%P1,%q0"                                    \
 198                       : "=r" (__value)                                       \
 199                       : "i" (offsetof (struct rthread, member)));            \
 200       }                                                                      \
 201     __value; })
 202
 203
 204/* Same as THREAD_GETMEM, but the member offset can be non-constant.  */
 205# define THREAD_GETMEM_NC(descr, member, idx) \
 206  ({ __typeof (descr->member[0]) __value;                                     \
 207     if (sizeof (__value) == 1)                                               \
 208       asm volatile ("movb %%fs:%P2(%q3),%b0"                                 \
 209                     : "=q" (__value)                                         \
 210                     : "0" (0), "i" (offsetof (struct rthread, member[0])),   \
 211                       "r" (idx));                                            \
 212     else if (sizeof (__value) == 4)                                          \
 213       asm volatile ("movl %%fs:%P1(,%q2,4),%0"                               \
 214                     : "=r" (__value)                                         \
 215                     : "i" (offsetof (struct rthread, member[0])), "r" (idx));\
 216     else                                                                     \
 217       {                                                                      \
 218         if (sizeof (__value) != 8)                                           \
 219           /* There should not be any value with a size other than 1,         \
 220              4 or 8.  */                                                     \
 221           abort ();                                                          \
 222                                                                              \
 223         asm volatile ("movq %%fs:%P1(,%q2,8),%q0"                            \
 224                       : "=r" (__value)                                       \
 225                       : "i" (offsetof (struct rthread, member[0])),          \
 226                         "r" (idx));                                          \
 227       }                                                                      \
 228     __value; })
 229
 230
 231/* Loading addresses of objects on x86-64 needs to be treated special
 232   when generating PIC code.  */
 233#ifdef __pic__
 234# define IMM_MODE "nr"
 235#else
 236# define IMM_MODE "ir"
 237#endif
 238
 239
 240/* Same as THREAD_SETMEM, but the member offset can be non-constant.  */
 241# define THREAD_SETMEM(descr, member, value) \
 242  ({ if (sizeof (descr->member) == 1)                                         \
 243       asm volatile ("movb %b0,%%fs:%P1" :                                    \
 244                     : "iq" (value),                                          \
 245                       "i" (offsetof (struct rthread, member)));              \
 246     else if (sizeof (descr->member) == 4)                                    \
 247       asm volatile ("movl %0,%%fs:%P1" :                                     \
 248                     : IMM_MODE (value),                                      \
 249                       "i" (offsetof (struct rthread, member)));              \
 250     else                                                                     \
 251       {                                                                      \
 252         if (sizeof (descr->member) != 8)                                     \
 253           /* There should not be any value with a size other than 1,         \
 254              4 or 8.  */                                                     \
 255           abort ();                                                          \
 256                                                                              \
 257         asm volatile ("movq %q0,%%fs:%P1" :                                  \
 258                       : IMM_MODE ((uint64_t) cast_to_integer (value)),       \
 259                         "i" (offsetof (struct rthread, member)));            \
 260       }})
 261
 262
 263/* Set member of the thread descriptor directly.  */
 264# define THREAD_SETMEM_NC(descr, member, idx, value) \
 265  ({ if (sizeof (descr->member[0]) == 1)                                      \
 266       asm volatile ("movb %b0,%%fs:%P1(%q2)" :                               \
 267                     : "iq" (value),                                          \
 268                       "i" (offsetof (struct rthread, member[0])),            \
 269                       "r" (idx));                                            \
 270     else if (sizeof (descr->member[0]) == 4)                                 \
 271       asm volatile ("movl %0,%%fs:%P1(,%q2,4)" :                             \
 272                     : IMM_MODE (value),                                      \
 273                       "i" (offsetof (struct rthread, member[0])),            \
 274                       "r" (idx));                                            \
 275     else                                                                     \
 276       {                                                                      \
 277         if (sizeof (descr->member[0]) != 8)                                  \
 278           /* There should not be any value with a size other than 1,         \
 279              4 or 8.  */                                                     \
 280           abort ();                                                          \
 281                                                                              \
 282         asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" :                          \
 283                       : IMM_MODE ((uint64_t) cast_to_integer (value)),       \
 284                         "i" (offsetof (struct rthread, member[0])),          \
 285                         "r" (idx));                                          \
 286       }})
 287
 288
 289/* Atomic compare and exchange on TLS, returning old value.  */
 290# define THREAD_ATOMIC_CMPXCHG_VAL(descr, member, newval, oldval) \
 291  ({ __typeof (descr->member) __ret;                                          \
 292     __typeof (oldval) __old = (oldval);                                      \
 293     if (sizeof (descr->member) == 4)                                         \
 294       asm volatile (LOCK_PREFIX "cmpxchgl %2, %%fs:%P3"                      \
 295                     : "=a" (__ret)                                           \
 296                     : "0" (__old), "r" (newval),                             \
 297                       "i" (offsetof (struct rthread, member)));              \
 298     else                                                                     \
 299       /* Not necessary for other sizes in the moment.  */                    \
 300       abort ();                                                              \
 301     __ret; })
 302
 303
 304/* Atomic logical and.  */
 305# define THREAD_ATOMIC_AND(descr, member, val) \
 306  (void) ({ if (sizeof ((descr)->member) == 4)                                \
 307              asm volatile (LOCK_PREFIX "andl %1, %%fs:%P0"                   \
 308                            :: "i" (offsetof (struct rthread, member)),       \
 309                               "ir" (val));                                   \
 310            else                                                              \
 311              /* Not necessary for other sizes in the moment.  */             \
 312              abort (); })
 313
 314
 315/* Atomic set bit.  */
 316# define THREAD_ATOMIC_BIT_SET(descr, member, bit) \
 317  (void) ({ if (sizeof ((descr)->member) == 4)                                \
 318              asm volatile (LOCK_PREFIX "orl %1, %%fs:%P0"                    \
 319                            :: "i" (offsetof (struct rthread, member)),       \
 320                               "ir" (1 << (bit)));                            \
 321            else                                                              \
 322              /* Not necessary for other sizes in the moment.  */             \
 323              abort (); })
 324
 325
 326# define CALL_THREAD_FCT(descr) \
 327  ({ void *__res;                                                             \
 328     asm volatile ("movq %%fs:%P2, %%rdi\n\t"                                 \
 329                   "callq *%%fs:%P1"                                          \
 330                   : "=a" (__res)                                             \
 331                   : "i" (offsetof (struct rthread, start_routine)),          \
 332                     "i" (offsetof (struct rthread, arg))                     \
 333                   : "di", "si", "cx", "dx", "r8", "r9", "r10", "r11",        \
 334                     "memory", "cc");                                         \
 335     __res; })
 336
 337
 338/* Set the stack guard field in TCB head.  */
 339# define THREAD_SET_STACK_GUARD(value) \
 340    THREAD_SETMEM (THREAD_SELF, header.stack_guard, value)
 341# define THREAD_COPY_STACK_GUARD(descr) \
 342    ((descr)->header.stack_guard                                                  \
 343     = THREAD_GETMEM (THREAD_SELF, header.stack_guard))
 344
 345
 346/* Set the pointer guard field in the TCB head.  */
 347# define THREAD_SET_POINTER_GUARD(value) \
 348  THREAD_SETMEM (THREAD_SELF, header.pointer_guard, value)
 349# define THREAD_COPY_POINTER_GUARD(descr) \
 350  ((descr)->header.pointer_guard                                              \
 351   = THREAD_GETMEM (THREAD_SELF, header.pointer_guard))
 352
 353
 354/* Get and set the global scope generation counter in the TCB head.  */
 355# define THREAD_GSCOPE_FLAG_UNUSED 0
 356# define THREAD_GSCOPE_FLAG_USED   1
 357# define THREAD_GSCOPE_FLAG_WAIT   2
 358# define THREAD_GSCOPE_RESET_FLAG() \
 359  do                                                                          \
 360    { int __res;                                                              \
 361      asm volatile ("xchgl %0, %%fs:%P1"                                      \
 362                    : "=r" (__res)                                            \
 363                    : "i" (offsetof (struct rthread, header.gscope_flag)),    \
 364                      "0" (THREAD_GSCOPE_FLAG_UNUSED));                       \
 365      if (__res == THREAD_GSCOPE_FLAG_WAIT)                                   \
 366        lll_futex_wake (&THREAD_SELF->header.gscope_flag, 1, LLL_PRIVATE);    \
 367    }                                                                         \
 368  while (0)
 369# define THREAD_GSCOPE_SET_FLAG() \
 370  THREAD_SETMEM (THREAD_SELF, header.gscope_flag, THREAD_GSCOPE_FLAG_USED)
 371# define THREAD_GSCOPE_WAIT() \
 372  GL(dl_wait_lookup_done) ()
 373
 374
 375# ifdef SHARED
 376/* Defined in dl-trampoline.S.  */
 377extern void _dl_x86_64_save_sse (void);
 378extern void _dl_x86_64_restore_sse (void);
 379
 380# define RTLD_CHECK_FOREIGN_CALL \
 381  (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) != 0)
 382
 383/* NB: Don't use the xchg operation because that would imply a lock
 384   prefix which is expensive and unnecessary.  The cache line is also
 385   not contested at all.  */
 386#  define RTLD_ENABLE_FOREIGN_CALL \
 387  int old_rtld_must_xmm_save = THREAD_GETMEM (THREAD_SELF,                    \
 388                                              header.rtld_must_xmm_save);     \
 389  THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 1)
 390
 391#  define RTLD_PREPARE_FOREIGN_CALL \
 392  do if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save))              \
 393    {                                                                         \
 394      _dl_x86_64_save_sse ();                                                 \
 395      THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 0);              \
 396    }                                                                         \
 397  while (0)
 398
 399#  define RTLD_FINALIZE_FOREIGN_CALL \
 400  do {                                                                        \
 401    if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) == 0)          \
 402      _dl_x86_64_restore_sse ();                                              \
 403    THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save,                    \
 404                   old_rtld_must_xmm_save);                                   \
 405  } while (0)
 406# endif
 407
 408static inline void *__get_tls_desc(void)
 409{
 410        /* the tcb->self pointer is set to the TLS base address */
 411        return THREAD_SELF;
 412}
 413
 414static inline void __set_tls_desc(void *tls_desc)
 415{
 416        if (!cpu_has_feat(CPU_FEAT_X86_FSGSBASE)) {
 417                __fastcall_setfsbase((uintptr_t)tls_desc);
 418                return;
 419        }
 420        asm volatile ("wrfsbase %0" : : "r"(tls_desc));
 421}
 422
 423static inline const char* tls_init_tp(void* thrdescr)
 424{
 425  // TCB lives at thrdescr.
 426  // The TCB's head pointer points to itself :-)
 427  tcbhead_t* head = (tcbhead_t*)thrdescr;
 428  head->tcb = thrdescr;
 429  head->self = thrdescr;
 430
 431  __set_tls_desc(thrdescr);
 432  return NULL;
 433}
 434
 435#endif /* __ASSEMBLER__ */
 436
 437#endif  /* tls.h */
 438