… | |
… | |
95 | |
95 | |
96 | #if ECB_NO_THREADS |
96 | #if ECB_NO_THREADS |
97 | # define ECB_NO_SMP 1 |
97 | # define ECB_NO_SMP 1 |
98 | #endif |
98 | #endif |
99 | |
99 | |
100 | #if ECB_NO_THREADS || ECB_NO_SMP |
100 | #if ECB_NO_SMP |
101 | #define ECB_MEMORY_FENCE do { } while (0) |
101 | #define ECB_MEMORY_FENCE do { } while (0) |
102 | #endif |
102 | #endif |
103 | |
103 | |
104 | #ifndef ECB_MEMORY_FENCE |
104 | #ifndef ECB_MEMORY_FENCE |
105 | #if ECB_C11 && !defined __STDC_NO_ATOMICS__ |
105 | #if ECB_C11 && !defined __STDC_NO_ATOMICS__ |
106 | /* we assume that these memory fences work on all variables/all memory accesses, */ |
106 | /* we assume that these memory fences work on all variables/all memory accesses, */ |
107 | /* not just C11 atomics and atomic accesses */ |
107 | /* not just C11 atomics and atomic accesses */ |
108 | #include <stdatomic.h> |
108 | #include <stdatomic.h> |
|
|
109 | #if 0 |
109 | #define ECB_MEMORY_FENCE atomic_thread_fence (memory_order_acq_rel) |
110 | #define ECB_MEMORY_FENCE atomic_thread_fence (memory_order_acq_rel) |
110 | #define ECB_MEMORY_FENCE_ACQUIRE atomic_thread_fence (memory_order_acquire) |
111 | #define ECB_MEMORY_FENCE_ACQUIRE atomic_thread_fence (memory_order_acquire) |
111 | #define ECB_MEMORY_FENCE_RELEASE atomic_thread_fence (memory_order_release) |
112 | #define ECB_MEMORY_FENCE_RELEASE atomic_thread_fence (memory_order_release) |
|
|
113 | #else |
|
|
114 | /* the above *should* be enough in my book, but after experiences with gcc-4.7 */ |
|
|
115 | /* and clang, better play safe */ |
|
|
116 | #define ECB_MEMORY_FENCE atomic_thread_fence (memory_order_seq_cst) |
112 | #endif |
117 | #endif |
113 | #endif |
|
|
114 | |
|
|
115 | #ifndef ECB_MEMORY_FENCE_RELEASE |
|
|
116 | #if ECB_GCC_VERSION(4,7) |
|
|
117 | #define ECB_MEMORY_FENCE __atomic_thread_fence (__ATOMIC_ACQ_REL) |
|
|
118 | #define ECB_MEMORY_FENCE_ACQUIRE __atomic_thread_fence (__ATOMIC_ACQUIRE) |
|
|
119 | #define ECB_MEMORY_FENCE_RELEASE __atomic_thread_fence (__ATOMIC_RELEASE) |
|
|
120 | #endif |
118 | #endif |
121 | #endif |
119 | #endif |
122 | |
120 | |
123 | #ifndef ECB_MEMORY_FENCE |
121 | #ifndef ECB_MEMORY_FENCE |
124 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
122 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
… | |
… | |
151 | #endif |
149 | #endif |
152 | #endif |
150 | #endif |
153 | #endif |
151 | #endif |
154 | |
152 | |
155 | #ifndef ECB_MEMORY_FENCE |
153 | #ifndef ECB_MEMORY_FENCE |
|
|
154 | #if ECB_GCC_VERSION(4,7) |
|
|
155 | /* unsolved mystery: ACQ_REL should be enough, but doesn't generate any code */ |
|
|
156 | /* which in turn actually breaks libev */ |
|
|
157 | #define ECB_MEMORY_FENCE __atomic_thread_fence (__ATOMIC_SEQ_CST) |
|
|
158 | #elif defined __clang && __has_feature (cxx_atomic) |
|
|
159 | /* see above */ |
|
|
160 | #define ECB_MEMORY_FENCE __c11_atomic_thread_fence (__ATOMIC_SEQ_CST) |
156 | #if ECB_GCC_VERSION(4,4) || defined __INTEL_COMPILER || defined __clang__ |
161 | #elif ECB_GCC_VERSION(4,4) || defined __INTEL_COMPILER || defined __clang__ |
157 | #define ECB_MEMORY_FENCE __sync_synchronize () |
162 | #define ECB_MEMORY_FENCE __sync_synchronize () |
158 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
163 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
159 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
164 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
160 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
165 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
161 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |
166 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |