00001
00002 #ifndef __DEFAULT_PPCQ_H__
00003 #define __DEFAULT_PPCQ_H__
00004
00005 #include "pami.h"
00006
00008
00009
00010
00011
00012
00013
00015
00016 #define CMI_PPC_ATOMIC_FAIL 0x8000000000000000UL
00017
00018 typedef uint64_t ppc_atomic_type_t;
00019
00020 typedef struct _ppc_atomic_t {
00021 volatile uint64_t val;
00022 char _pad[56];
00023 } ppc_atomic_t;
00024
00025 #define PPC_AQVal(x) ((x).val)
00026
00027 static inline void PPC_AtomicCounterAllocate (void **atomic_mem,
00028 size_t atomic_memsize)
00029 {
00030 posix_memalign(atomic_mem, 64, atomic_memsize);
00031 }
00032
00033
00034 static inline ppc_atomic_type_t PPC_AtomicLoadReserved ( volatile ppc_atomic_t *ptr )
00035 {
00036 ppc_atomic_type_t val;
00037 __asm__ __volatile__ ("ldarx %[val],0,%[ptr]"
00038 : [val] "=r" (val)
00039 : [ptr] "r" (&ptr->val)
00040 : "cc");
00041
00042 return( val );
00043 }
00044
00045 static inline int PPC_AtomicStoreConditional( volatile ppc_atomic_t *ptr, ppc_atomic_type_t val )
00046 {
00047 int rc = 1;
00048 __asm__ __volatile__ ("stdcx. %[val],0,%[ptr];\n"
00049 "beq 1f;\n"
00050 "li %[rc], 0;\n"
00051 "1: ;\n"
00052 : [rc] "=r" (rc)
00053 : [ptr] "r" (&ptr->val), [val] "r" (val), "0" (rc)
00054 : "cc", "memory");
00055 return( rc );
00056 }
00057
00058 static inline ppc_atomic_type_t PPC_AtomicLoadIncrementBounded (volatile ppc_atomic_t *counter)
00059 {
00060 ppc_atomic_type_t old_val, tmp_val, bound;
00061 bound = counter[1].val;
00062 do
00063 {
00064 old_val = PPC_AtomicLoadReserved( counter );
00065 tmp_val = old_val + 1;
00066
00067 if (tmp_val > bound)
00068 return CMI_PPC_ATOMIC_FAIL;
00069 }
00070 while ( !PPC_AtomicStoreConditional( counter, tmp_val ) );
00071
00072 return( old_val );
00073 }
00074
00075 static inline void PPC_AtomicStore(volatile ppc_atomic_t *counter, ppc_atomic_type_t val)
00076 {
00077
00078
00079 counter->val = val;
00080 }
00081
00082 static inline void PPC_AtomicReadFence(void)
00083 {
00084 #if !CMK_BLUEGENEQ //full memory barrier executed on Producer
00085 __asm__ __volatile__ ("isync":::"memory");
00086 #endif
00087 }
00088
00089 static inline void PPC_AtomicWriteFence(void)
00090 {
00091 #if CMK_BLUEGENEQ //execute full memory barrier
00092 __asm__ __volatile__ ("sync":::"memory");
00093 #else
00094 __asm__ __volatile__ ("lwsync":::"memory");
00095 #endif
00096 }
00097
00098 #endif