84#if __cplusplus >= 201103L && \
85 (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
90 using std::memory_order_relaxed;
91 using std::memory_order_acquire;
92 using std::memory_order_release;
93 using std::memory_order_seq_cst;
94 using std::atomic_thread_fence;
115 asm volatile(
"lwsync" : : :
"memory");
116#elif defined(_ARCH_PPC)
117 asm volatile(
"sync" : : :
"memory");
118#elif defined(__ARM_ARCH_7A__)
119 asm volatile(
"dmb" : : :
"memory");
121 asm volatile(
"" : : :
"memory");
127 asm volatile(
"sync" : : :
"memory");
128#elif defined(__i386__)
129 asm volatile(
"lock; addl $0,0(%%esp)" : : :
"memory");
130#elif defined(__x86_64__)
131 asm volatile(
"mfence" : : :
"memory");
132#elif defined(__ARM_ARCH_7A__)
133 asm volatile(
"dmb" : : :
"memory");
135 asm volatile(
"" : : :
"memory");
164 "lwz%U1%X1 %0,%1\n\t"
int load(memory_order order=memory_order_seq_cst) const volatile
void store(int m, memory_order order=memory_order_seq_cst) volatile
void atomic_thread_fence(memory_order order)