Back to index

glibc  2.9
atomic.h
Go to the documentation of this file.
00001 /* Copyright (C) 2003 Free Software Foundation, Inc.
00002    This file is part of the GNU C Library.
00003 
00004    The GNU C Library is free software; you can redistribute it and/or
00005    modify it under the terms of the GNU Lesser General Public
00006    License as published by the Free Software Foundation; either
00007    version 2.1 of the License, or (at your option) any later version.
00008 
00009    The GNU C Library is distributed in the hope that it will be useful,
00010    but WITHOUT ANY WARRANTY; without even the implied warranty of
00011    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00012    Lesser General Public License for more details.
00013 
00014    You should have received a copy of the GNU Lesser General Public
00015    License along with the GNU C Library; if not, write to the Free
00016    Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
00017    02111-1307 USA.  */
00018 
00019 #include <stdint.h>
00020 
00021 typedef int8_t atomic8_t;
00022 typedef uint8_t uatomic8_t;
00023 typedef int_fast8_t atomic_fast8_t;
00024 typedef uint_fast8_t uatomic_fast8_t;
00025 
00026 typedef int16_t atomic16_t;
00027 typedef uint16_t uatomic16_t;
00028 typedef int_fast16_t atomic_fast16_t;
00029 typedef uint_fast16_t uatomic_fast16_t;
00030 
00031 typedef int32_t atomic32_t;
00032 typedef uint32_t uatomic32_t;
00033 typedef int_fast32_t atomic_fast32_t;
00034 typedef uint_fast32_t uatomic_fast32_t;
00035 
00036 typedef int64_t atomic64_t;
00037 typedef uint64_t uatomic64_t;
00038 typedef int_fast64_t atomic_fast64_t;
00039 typedef uint_fast64_t uatomic_fast64_t;
00040 
00041 typedef intptr_t atomicptr_t;
00042 typedef uintptr_t uatomicptr_t;
00043 typedef intmax_t atomic_max_t;
00044 typedef uintmax_t uatomic_max_t;
00045 
00046 
00047 #ifdef UP
00048 # define __MB        /* nothing */
00049 #else
00050 # define __MB        "      mb\n"
00051 #endif
00052 
00053 
00054 /* Compare and exchange.  For all of the "xxx" routines, we expect a
00055    "__prev" and a "__cmp" variable to be provided by the enclosing scope,
00056    in which values are returned.  */
00057 
00058 #define __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2)       \
00059 ({                                                             \
00060   unsigned long __tmp, __snew, __addr64;                       \
00061   __asm__ __volatile__ (                                       \
00062               mb1                                              \
00063        "      andnot %[__addr8],7,%[__addr64]\n"               \
00064        "      insbl  %[__new],%[__addr8],%[__snew]\n"          \
00065        "1:    ldq_l  %[__tmp],0(%[__addr64])\n"                \
00066        "      extbl  %[__tmp],%[__addr8],%[__prev]\n"          \
00067        "      cmpeq  %[__prev],%[__old],%[__cmp]\n"                   \
00068        "      beq    %[__cmp],2f\n"                                   \
00069        "      mskbl  %[__tmp],%[__addr8],%[__tmp]\n"                  \
00070        "      or     %[__snew],%[__tmp],%[__tmp]\n"                   \
00071        "      stq_c  %[__tmp],0(%[__addr64])\n"                \
00072        "      beq    %[__tmp],1b\n"                                   \
00073               mb2                                              \
00074        "2:"                                                    \
00075        : [__prev] "=&r" (__prev),                              \
00076          [__snew] "=&r" (__snew),                              \
00077          [__tmp] "=&r" (__tmp),                                \
00078          [__cmp] "=&r" (__cmp),                                \
00079          [__addr64] "=&r" (__addr64)                                  \
00080        : [__addr8] "r" (mem),                                         \
00081          [__old] "Ir" ((uint64_t)(uint8_t)(uint64_t)(old)),           \
00082          [__new] "r" (new)                                     \
00083        : "memory");                                            \
00084 })
00085 
00086 #define __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2) \
00087 ({                                                             \
00088   unsigned long __tmp, __snew, __addr64;                       \
00089   __asm__ __volatile__ (                                       \
00090               mb1                                              \
00091        "      andnot %[__addr16],7,%[__addr64]\n"                     \
00092        "      inswl  %[__new],%[__addr16],%[__snew]\n"         \
00093        "1:    ldq_l  %[__tmp],0(%[__addr64])\n"                \
00094        "      extwl  %[__tmp],%[__addr16],%[__prev]\n"         \
00095        "      cmpeq  %[__prev],%[__old],%[__cmp]\n"                   \
00096        "      beq    %[__cmp],2f\n"                                   \
00097        "      mskwl  %[__tmp],%[__addr16],%[__tmp]\n"          \
00098        "      or     %[__snew],%[__tmp],%[__tmp]\n"                   \
00099        "      stq_c  %[__tmp],0(%[__addr64])\n"                \
00100        "      beq    %[__tmp],1b\n"                                   \
00101               mb2                                              \
00102        "2:"                                                    \
00103        : [__prev] "=&r" (__prev),                              \
00104          [__snew] "=&r" (__snew),                              \
00105          [__tmp] "=&r" (__tmp),                                \
00106          [__cmp] "=&r" (__cmp),                                \
00107          [__addr64] "=&r" (__addr64)                                  \
00108        : [__addr16] "r" (mem),                                        \
00109          [__old] "Ir" ((uint64_t)(uint16_t)(uint64_t)(old)),          \
00110          [__new] "r" (new)                                     \
00111        : "memory");                                            \
00112 })
00113 
00114 #define __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2) \
00115 ({                                                             \
00116   __asm__ __volatile__ (                                       \
00117               mb1                                              \
00118        "1:    ldl_l  %[__prev],%[__mem]\n"                            \
00119        "      cmpeq  %[__prev],%[__old],%[__cmp]\n"                   \
00120        "      beq    %[__cmp],2f\n"                                   \
00121        "      mov    %[__new],%[__cmp]\n"                      \
00122        "      stl_c  %[__cmp],%[__mem]\n"                      \
00123        "      beq    %[__cmp],1b\n"                                   \
00124               mb2                                              \
00125        "2:"                                                    \
00126        : [__prev] "=&r" (__prev),                              \
00127          [__cmp] "=&r" (__cmp)                                        \
00128        : [__mem] "m" (*(mem)),                                        \
00129          [__old] "Ir" ((uint64_t)(atomic32_t)(uint64_t)(old)),        \
00130          [__new] "Ir" (new)                                    \
00131        : "memory");                                            \
00132 })
00133 
00134 #define __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2) \
00135 ({                                                             \
00136   __asm__ __volatile__ (                                       \
00137               mb1                                              \
00138        "1:    ldq_l  %[__prev],%[__mem]\n"                            \
00139        "      cmpeq  %[__prev],%[__old],%[__cmp]\n"                   \
00140        "      beq    %[__cmp],2f\n"                                   \
00141        "      mov    %[__new],%[__cmp]\n"                      \
00142        "      stq_c  %[__cmp],%[__mem]\n"                      \
00143        "      beq    %[__cmp],1b\n"                                   \
00144               mb2                                              \
00145        "2:"                                                    \
00146        : [__prev] "=&r" (__prev),                              \
00147          [__cmp] "=&r" (__cmp)                                        \
00148        : [__mem] "m" (*(mem)),                                        \
00149          [__old] "Ir" ((uint64_t)(old)),                       \
00150          [__new] "Ir" (new)                                    \
00151        : "memory");                                            \
00152 })
00153 
00154 /* For all "bool" routines, we return FALSE if exchange succesful.  */
00155 
00156 #define __arch_compare_and_exchange_bool_8_int(mem, new, old, mb1, mb2)      \
00157 ({ unsigned long __prev; int __cmp;                                   \
00158    __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2);    \
00159    !__cmp; })
00160 
00161 #define __arch_compare_and_exchange_bool_16_int(mem, new, old, mb1, mb2) \
00162 ({ unsigned long __prev; int __cmp;                                   \
00163    __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2);   \
00164    !__cmp; })
00165 
00166 #define __arch_compare_and_exchange_bool_32_int(mem, new, old, mb1, mb2) \
00167 ({ unsigned long __prev; int __cmp;                                   \
00168    __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2);   \
00169    !__cmp; })
00170 
00171 #define __arch_compare_and_exchange_bool_64_int(mem, new, old, mb1, mb2) \
00172 ({ unsigned long __prev; int __cmp;                                   \
00173    __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2);   \
00174    !__cmp; })
00175 
00176 /* For all "val" routines, return the old value whether exchange
00177    successful or not.  */
00178 
00179 #define __arch_compare_and_exchange_val_8_int(mem, new, old, mb1, mb2)       \
00180 ({ unsigned long __prev; int __cmp;                                   \
00181    __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2);    \
00182    (typeof (*mem))__prev; })
00183 
00184 #define __arch_compare_and_exchange_val_16_int(mem, new, old, mb1, mb2) \
00185 ({ unsigned long __prev; int __cmp;                                   \
00186    __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2);   \
00187    (typeof (*mem))__prev; })
00188 
00189 #define __arch_compare_and_exchange_val_32_int(mem, new, old, mb1, mb2) \
00190 ({ unsigned long __prev; int __cmp;                                   \
00191    __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2);   \
00192    (typeof (*mem))__prev; })
00193 
00194 #define __arch_compare_and_exchange_val_64_int(mem, new, old, mb1, mb2) \
00195 ({ unsigned long __prev; int __cmp;                                   \
00196    __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2);   \
00197    (typeof (*mem))__prev; })
00198 
00199 /* Compare and exchange with "acquire" semantics, ie barrier after.  */
00200 
00201 #define atomic_compare_and_exchange_bool_acq(mem, new, old)    \
00202   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int, \
00203                       mem, new, old, "", __MB)
00204 
00205 #define atomic_compare_and_exchange_val_acq(mem, new, old)     \
00206   __atomic_val_bysize (__arch_compare_and_exchange_val, int,   \
00207                      mem, new, old, "", __MB)
00208 
00209 /* Compare and exchange with "release" semantics, ie barrier before.  */
00210 
00211 #define atomic_compare_and_exchange_bool_rel(mem, new, old)    \
00212   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int, \
00213                       mem, new, old, __MB, "")
00214 
00215 #define atomic_compare_and_exchange_val_rel(mem, new, old)     \
00216   __atomic_val_bysize (__arch_compare_and_exchange_val, int,   \
00217                      mem, new, old, __MB, "")
00218 
00219 
00220 /* Atomically store value and return the previous value.  */
00221 
00222 #define __arch_exchange_8_int(mem, value, mb1, mb2)                   \
00223 ({                                                             \
00224   unsigned long __ret, __tmp, __addr64, __sval;                       \
00225   __asm__ __volatile__ (                                       \
00226               mb1                                              \
00227        "      andnot %[__addr8],7,%[__addr64]\n"               \
00228        "      insbl  %[__value],%[__addr8],%[__sval]\n"        \
00229        "1:    ldq_l  %[__tmp],0(%[__addr64])\n"                \
00230        "      extbl  %[__tmp],%[__addr8],%[__ret]\n"                  \
00231        "      mskbl  %[__tmp],%[__addr8],%[__tmp]\n"                  \
00232        "      or     %[__sval],%[__tmp],%[__tmp]\n"                   \
00233        "      stq_c  %[__tmp],0(%[__addr64])\n"                \
00234        "      beq    %[__tmp],1b\n"                                   \
00235               mb2                                              \
00236        : [__ret] "=&r" (__ret),                                \
00237          [__sval] "=&r" (__sval),                              \
00238          [__tmp] "=&r" (__tmp),                                \
00239          [__addr64] "=&r" (__addr64)                                  \
00240        : [__addr8] "r" (mem),                                         \
00241          [__value] "r" (value)                                        \
00242        : "memory");                                            \
00243   __ret; })
00244 
00245 #define __arch_exchange_16_int(mem, value, mb1, mb2)                  \
00246 ({                                                             \
00247   unsigned long __ret, __tmp, __addr64, __sval;                       \
00248   __asm__ __volatile__ (                                       \
00249               mb1                                              \
00250        "      andnot %[__addr16],7,%[__addr64]\n"                     \
00251        "      inswl  %[__value],%[__addr16],%[__sval]\n"              \
00252        "1:    ldq_l  %[__tmp],0(%[__addr64])\n"                \
00253        "      extwl  %[__tmp],%[__addr16],%[__ret]\n"          \
00254        "      mskwl  %[__tmp],%[__addr16],%[__tmp]\n"          \
00255        "      or     %[__sval],%[__tmp],%[__tmp]\n"                   \
00256        "      stq_c  %[__tmp],0(%[__addr64])\n"                \
00257        "      beq    %[__tmp],1b\n"                                   \
00258               mb2                                              \
00259        : [__ret] "=&r" (__ret),                                \
00260          [__sval] "=&r" (__sval),                              \
00261          [__tmp] "=&r" (__tmp),                                \
00262          [__addr64] "=&r" (__addr64)                                  \
00263        : [__addr16] "r" (mem),                                        \
00264          [__value] "r" (value)                                        \
00265        : "memory");                                            \
00266   __ret; })
00267 
00268 #define __arch_exchange_32_int(mem, value, mb1, mb2)                  \
00269 ({                                                             \
00270   signed int __ret, __tmp;                                     \
00271   __asm__ __volatile__ (                                       \
00272               mb1                                              \
00273        "1:    ldl_l  %[__ret],%[__mem]\n"                      \
00274        "      mov    %[__val],%[__tmp]\n"                      \
00275        "      stl_c  %[__tmp],%[__mem]\n"                      \
00276        "      beq    %[__tmp],1b\n"                                   \
00277               mb2                                              \
00278        : [__ret] "=&r" (__ret),                                \
00279          [__tmp] "=&r" (__tmp)                                        \
00280        : [__mem] "m" (*(mem)),                                        \
00281          [__val] "Ir" (value)                                         \
00282        : "memory");                                            \
00283   __ret; })
00284 
00285 #define __arch_exchange_64_int(mem, value, mb1, mb2)                  \
00286 ({                                                             \
00287   unsigned long __ret, __tmp;                                         \
00288   __asm__ __volatile__ (                                       \
00289               mb1                                              \
00290        "1:    ldq_l  %[__ret],%[__mem]\n"                      \
00291        "      mov    %[__val],%[__tmp]\n"                      \
00292        "      stq_c  %[__tmp],%[__mem]\n"                      \
00293        "      beq    %[__tmp],1b\n"                                   \
00294               mb2                                              \
00295        : [__ret] "=&r" (__ret),                                \
00296          [__tmp] "=&r" (__tmp)                                        \
00297        : [__mem] "m" (*(mem)),                                        \
00298          [__val] "Ir" (value)                                         \
00299        : "memory");                                            \
00300   __ret; })
00301 
00302 #define atomic_exchange_acq(mem, value) \
00303   __atomic_val_bysize (__arch_exchange, int, mem, value, "", __MB)
00304 
00305 #define atomic_exchange_rel(mem, value) \
00306   __atomic_val_bysize (__arch_exchange, int, mem, value, __MB, "")
00307 
00308 
00309 /* Atomically add value and return the previous (unincremented) value.  */
00310 
00311 #define __arch_exchange_and_add_8_int(mem, value, mb1, mb2) \
00312   ({ __builtin_trap (); 0; })
00313 
00314 #define __arch_exchange_and_add_16_int(mem, value, mb1, mb2) \
00315   ({ __builtin_trap (); 0; })
00316 
00317 #define __arch_exchange_and_add_32_int(mem, value, mb1, mb2)          \
00318 ({                                                             \
00319   signed int __ret, __tmp;                                     \
00320   __asm__ __volatile__ (                                       \
00321               mb1                                              \
00322        "1:    ldl_l  %[__ret],%[__mem]\n"                      \
00323        "      addl   %[__ret],%[__val],%[__tmp]\n"                    \
00324        "      stl_c  %[__tmp],%[__mem]\n"                      \
00325        "      beq    %[__tmp],1b\n"                                   \
00326               mb2                                              \
00327        : [__ret] "=&r" (__ret),                                \
00328          [__tmp] "=&r" (__tmp)                                        \
00329        : [__mem] "m" (*(mem)),                                        \
00330          [__val] "Ir" ((signed int)(value))                           \
00331        : "memory");                                            \
00332   __ret; })
00333 
00334 #define __arch_exchange_and_add_64_int(mem, value, mb1, mb2)          \
00335 ({                                                             \
00336   unsigned long __ret, __tmp;                                         \
00337   __asm__ __volatile__ (                                       \
00338               mb1                                              \
00339        "1:    ldq_l  %[__ret],%[__mem]\n"                      \
00340        "      addq   %[__ret],%[__val],%[__tmp]\n"                    \
00341        "      stq_c  %[__tmp],%[__mem]\n"                      \
00342        "      beq    %[__tmp],1b\n"                                   \
00343               mb2                                              \
00344        : [__ret] "=&r" (__ret),                                \
00345          [__tmp] "=&r" (__tmp)                                        \
00346        : [__mem] "m" (*(mem)),                                        \
00347          [__val] "Ir" ((unsigned long)(value))                        \
00348        : "memory");                                            \
00349   __ret; })
00350 
00351 /* ??? Barrier semantics for atomic_exchange_and_add appear to be 
00352    undefined.  Use full barrier for now, as that's safe.  */
00353 #define atomic_exchange_and_add(mem, value) \
00354   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value, __MB, __MB)
00355 
00356 
00357 /* ??? Blah, I'm lazy.  Implement these later.  Can do better than the
00358    compare-and-exchange loop provided by generic code.
00359 
00360 #define atomic_decrement_if_positive(mem)
00361 #define atomic_bit_test_set(mem, bit)
00362 
00363 */
00364 
00365 #ifndef UP
00366 # define atomic_full_barrier()     __asm ("mb" : : : "memory");
00367 # define atomic_read_barrier()     __asm ("mb" : : : "memory");
00368 # define atomic_write_barrier()    __asm ("wmb" : : : "memory");
00369 #endif