Back to index

glibc  2.9
atomic.h
Go to the documentation of this file.
00001 /* Atomic operations.  PowerPC32 version.
00002    Copyright (C) 2003, 2004, 2007 Free Software Foundation, Inc.
00003    This file is part of the GNU C Library.
00004    Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
00005 
00006    The GNU C Library is free software; you can redistribute it and/or
00007    modify it under the terms of the GNU Lesser General Public
00008    License as published by the Free Software Foundation; either
00009    version 2.1 of the License, or (at your option) any later version.
00010 
00011    The GNU C Library is distributed in the hope that it will be useful,
00012    but WITHOUT ANY WARRANTY; without even the implied warranty of
00013    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00014    Lesser General Public License for more details.
00015 
00016    You should have received a copy of the GNU Lesser General Public
00017    License along with the GNU C Library; if not, write to the Free
00018    Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
00019    02111-1307 USA.  */
00020 
00021 /*  POWER6 adds a "Mutex Hint" to the Load and Reserve instruction.
00022     This is a hint to the hardware to expect additional updates adjacent
00023     to the lock word or not.  If we are acquiring a Mutex, the hint
00024     should be true. Otherwise we releasing a Mutex or doing a simple
00025     atomic operation.  In that case we don't expect addtional updates
00026     adjacent to the lock word after the Store Conditional and the hint
00027     should be false.  */
00028     
00029 #if defined _ARCH_PWR6 || defined _ARCH_PWR6X
00030 # define MUTEX_HINT_ACQ     ",1"
00031 # define MUTEX_HINT_REL     ",0"
00032 #else
00033 # define MUTEX_HINT_ACQ
00034 # define MUTEX_HINT_REL
00035 #endif
00036 
00037 /*
00038  * The 32-bit exchange_bool is different on powerpc64 because the subf
00039  * does signed 64-bit arthmatic while the lwarx is 32-bit unsigned
00040  * (a load word and zero (high 32) form).  So powerpc64 has a slightly
00041  * different version in sysdeps/powerpc/powerpc64/bits/atomic.h.
00042  */
00043 #define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval)         \
00044 ({                                                                   \
00045   unsigned int __tmp;                                                       \
00046   __asm __volatile (                                                 \
00047                   "1:       lwarx  %0,0,%1" MUTEX_HINT_ACQ "\n"                    \
00048                   "  subf.  %0,%2,%0\n"                              \
00049                   "  bne    2f\n"                                    \
00050                   "  stwcx. %3,0,%1\n"                               \
00051                   "  bne-   1b\n"                                    \
00052                   "2:       " __ARCH_ACQ_INSTR                              \
00053                   : "=&r" (__tmp)                                    \
00054                   : "b" (mem), "r" (oldval), "r" (newval)                   \
00055                   : "cr0", "memory");                                       \
00056   __tmp != 0;                                                        \
00057 })
00058 
00059 #define __arch_compare_and_exchange_bool_32_rel(mem, newval, oldval)        \
00060 ({                                                                   \
00061   unsigned int __tmp;                                                       \
00062   __asm __volatile (__ARCH_REL_INSTR "\n"                            \
00063                   "1:       lwarx  %0,0,%1" MUTEX_HINT_REL "\n"                    \
00064                   "  subf.  %0,%2,%0\n"                              \
00065                   "  bne    2f\n"                                    \
00066                   "  stwcx. %3,0,%1\n"                               \
00067                   "  bne-   1b\n"                                    \
00068                   "2:       "                                               \
00069                   : "=&r" (__tmp)                                    \
00070                   : "b" (mem), "r" (oldval), "r" (newval)                   \
00071                   : "cr0", "memory");                                       \
00072   __tmp != 0;                                                        \
00073 })
00074 
00075 /* Powerpc32 processors don't implement the 64-bit (doubleword) forms of
00076    load and reserve (ldarx) and store conditional (stdcx.) instructions.
00077    So for powerpc32 we stub out the 64-bit forms.  */
00078 #define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
00079   (abort (), 0)
00080 
00081 #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
00082   (abort (), (__typeof (*mem)) 0)
00083 
00084 #define __arch_compare_and_exchange_bool_64_rel(mem, newval, oldval) \
00085   (abort (), 0)
00086 
00087 #define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
00088   (abort (), (__typeof (*mem)) 0)
00089 
00090 #define __arch_atomic_exchange_64_acq(mem, value) \
00091     ({ abort (); (*mem) = (value); })
00092 
00093 #define __arch_atomic_exchange_64_rel(mem, value) \
00094     ({ abort (); (*mem) = (value); })
00095 
00096 #define __arch_atomic_exchange_and_add_64(mem, value) \
00097     ({ abort (); (*mem) = (value); })
00098 
00099 #define __arch_atomic_increment_val_64(mem) \
00100     ({ abort (); (*mem)++; })
00101 
00102 #define __arch_atomic_decrement_val_64(mem) \
00103     ({ abort (); (*mem)--; })
00104 
00105 #define __arch_atomic_decrement_if_positive_64(mem) \
00106     ({ abort (); (*mem)--; })
00107 
00108 #ifdef _ARCH_PWR4
00109 /*
00110  * Newer powerpc64 processors support the new "light weight" sync (lwsync)
00111  * So if the build is using -mcpu=[power4,power5,power5+,970] we can
00112  * safely use lwsync.
00113  */
00114 # define atomic_read_barrier()     __asm ("lwsync" ::: "memory")
00115 /*
00116  * "light weight" sync can also be used for the release barrier.
00117  */
00118 # ifndef UP
00119 #  define __ARCH_REL_INSTR  "lwsync"
00120 # endif
00121 #else
00122 /*
00123  * Older powerpc32 processors don't support the new "light weight"
00124  * sync (lwsync).  So the only safe option is to use normal sync
00125  * for all powerpc32 applications.
00126  */
00127 # define atomic_read_barrier()     __asm ("sync" ::: "memory")
00128 #endif
00129 
00130 /*
00131  * Include the rest of the atomic ops macros which are common to both
00132  * powerpc32 and powerpc64.
00133  */
00134 #include_next <bits/atomic.h>