… | |
… | |
54 | * or so. |
54 | * or so. |
55 | * we try to detect these and simply assume they are not gcc - if they have |
55 | * we try to detect these and simply assume they are not gcc - if they have |
56 | * an issue with that they should have done it right in the first place. |
56 | * an issue with that they should have done it right in the first place. |
57 | */ |
57 | */ |
58 | #ifndef ECB_GCC_VERSION |
58 | #ifndef ECB_GCC_VERSION |
59 | #if !defined(__GNUC_MINOR__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) || defined(__llvm__) || defined(__clang__) |
59 | #if !defined __GNUC_MINOR__ || defined __INTEL_COMPILER || defined __SUNPRO_C || defined __SUNPRO_CC || defined __llvm__ || defined __clang__ |
60 | #define ECB_GCC_VERSION(major,minor) 0 |
60 | #define ECB_GCC_VERSION(major,minor) 0 |
61 | #else |
61 | #else |
62 | #define ECB_GCC_VERSION(major,minor) (__GNUC__ > (major) || (__GNUC__ == (major) && __GNUC_MINOR__ >= (minor))) |
62 | #define ECB_GCC_VERSION(major,minor) (__GNUC__ > (major) || (__GNUC__ == (major) && __GNUC_MINOR__ >= (minor))) |
63 | #endif |
63 | #endif |
64 | #endif |
64 | #endif |
… | |
… | |
66 | /*****************************************************************************/ |
66 | /*****************************************************************************/ |
67 | |
67 | |
68 | /* ECB_NO_THREADS - ecb is not used by multiple threads, ever */ |
68 | /* ECB_NO_THREADS - ecb is not used by multiple threads, ever */ |
69 | /* ECB_NO_SMP - ecb might be used in multiple threads, but only on a single cpu */ |
69 | /* ECB_NO_SMP - ecb might be used in multiple threads, but only on a single cpu */ |
70 | |
70 | |
|
|
71 | #if ECB_NO_THREADS |
|
|
72 | # define ECB_NO_SMP 1 |
|
|
73 | #endif |
|
|
74 | |
71 | #if ECB_NO_THREADS || ECB_NO_SMP |
75 | #if ECB_NO_THREADS || ECB_NO_SMP |
72 | #define ECB_MEMORY_FENCE do { } while (0) |
76 | #define ECB_MEMORY_FENCE do { } while (0) |
73 | #endif |
77 | #endif |
74 | |
78 | |
75 | #ifndef ECB_MEMORY_FENCE |
79 | #ifndef ECB_MEMORY_FENCE |
76 | #if ECB_GCC_VERSION(2,5) || defined(__INTEL_COMPILER) || defined(__clang__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
80 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
77 | #if __i386 || __i386__ |
81 | #if __i386 || __i386__ |
78 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
82 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
79 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE /* non-lock xchg might be enough */ |
83 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE /* non-lock xchg might be enough */ |
80 | #define ECB_MEMORY_FENCE_RELEASE do { } while (0) /* unlikely to change in future cpus */ |
84 | #define ECB_MEMORY_FENCE_RELEASE do { } while (0) /* unlikely to change in future cpus */ |
81 | #elif __amd64 || __amd64__ || __x86_64 || __x86_64__ |
85 | #elif __amd64 || __amd64__ || __x86_64 || __x86_64__ |
82 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
86 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
83 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("lfence" : : : "memory") |
87 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("lfence" : : : "memory") |
84 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("sfence") /* play safe - not needed in any current cpu */ |
88 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("sfence") /* play safe - not needed in any current cpu */ |
85 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
89 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
86 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
90 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
87 | #elif defined(__ARM_ARCH_6__ ) || defined(__ARM_ARCH_6J__ ) \ |
91 | #elif defined __ARM_ARCH_6__ || defined __ARM_ARCH_6J__ \ |
88 | || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6ZK__) |
92 | || defined __ARM_ARCH_6K__ || defined __ARM_ARCH_6ZK__ |
89 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mcr p15,0,%0,c7,c10,5" : : "r" (0) : "memory") |
93 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mcr p15,0,%0,c7,c10,5" : : "r" (0) : "memory") |
90 | #elif defined(__ARM_ARCH_7__ ) || defined(__ARM_ARCH_7A__ ) \ |
94 | #elif defined __ARM_ARCH_7__ || defined __ARM_ARCH_7A__ \ |
91 | || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7R__ ) |
95 | || defined __ARM_ARCH_7M__ || defined __ARM_ARCH_7R__ |
92 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("dmb" : : : "memory") |
96 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("dmb" : : : "memory") |
93 | #elif __sparc || __sparc__ |
97 | #elif __sparc || __sparc__ |
94 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad | #StoreStore | #StoreLoad | " : : : "memory") |
98 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad | #StoreStore | #StoreLoad | " : : : "memory") |
95 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad" : : : "memory") |
99 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad" : : : "memory") |
96 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("membar #LoadStore | #StoreStore") |
100 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("membar #LoadStore | #StoreStore") |
|
|
101 | #elif defined __s390__ || defined __s390x__ |
|
|
102 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("bcr 15,0" : : : "memory") |
|
|
103 | #elif defined __mips__ |
|
|
104 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
97 | #endif |
105 | #endif |
98 | #endif |
106 | #endif |
99 | #endif |
107 | #endif |
100 | |
108 | |
101 | #ifndef ECB_MEMORY_FENCE |
109 | #ifndef ECB_MEMORY_FENCE |
102 | #if ECB_GCC_VERSION(4,4) || defined(__INTEL_COMPILER) || defined(__clang__) |
110 | #if ECB_GCC_VERSION(4,4) || defined __INTEL_COMPILER || defined __clang__ |
103 | #define ECB_MEMORY_FENCE __sync_synchronize () |
111 | #define ECB_MEMORY_FENCE __sync_synchronize () |
104 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
112 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
105 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
113 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
106 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
114 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
107 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |
115 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |
108 | #define ECB_MEMORY_FENCE _ReadWriteBarrier () |
116 | #define ECB_MEMORY_FENCE _ReadWriteBarrier () |
109 | #define ECB_MEMORY_FENCE_ACQUIRE _ReadWriteBarrier () /* according to msdn, _ReadBarrier is not a load fence */ |
117 | #define ECB_MEMORY_FENCE_ACQUIRE _ReadWriteBarrier () /* according to msdn, _ReadBarrier is not a load fence */ |
110 | #define ECB_MEMORY_FENCE_RELEASE _WriteBarrier () |
118 | #define ECB_MEMORY_FENCE_RELEASE _WriteBarrier () |
111 | #elif defined(_WIN32) |
119 | #elif defined _WIN32 |
112 | #include <WinNT.h> |
120 | #include <WinNT.h> |
113 | #define ECB_MEMORY_FENCE MemoryBarrier () /* actually just xchg on x86... scary */ |
121 | #define ECB_MEMORY_FENCE MemoryBarrier () /* actually just xchg on x86... scary */ |
114 | #elif __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
122 | #elif __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
115 | #include <mbarrier.h> |
123 | #include <mbarrier.h> |
116 | #define ECB_MEMORY_FENCE __machine_rw_barrier () |
124 | #define ECB_MEMORY_FENCE __machine_rw_barrier () |
117 | #define ECB_MEMORY_FENCE_ACQUIRE __machine_r_barrier () |
125 | #define ECB_MEMORY_FENCE_ACQUIRE __machine_r_barrier () |
118 | #define ECB_MEMORY_FENCE_RELEASE __machine_w_barrier () |
126 | #define ECB_MEMORY_FENCE_RELEASE __machine_w_barrier () |
|
|
127 | #elif __xlC__ |
|
|
128 | #define ECB_MEMORY_FENCE __sync () |
119 | #endif |
129 | #endif |
120 | #endif |
130 | #endif |
121 | |
131 | |
122 | #ifndef ECB_MEMORY_FENCE |
132 | #ifndef ECB_MEMORY_FENCE |
123 | #if !ECB_AVOID_PTHREADS |
133 | #if !ECB_AVOID_PTHREADS |
… | |
… | |
135 | static pthread_mutex_t ecb_mf_lock = PTHREAD_MUTEX_INITIALIZER; |
145 | static pthread_mutex_t ecb_mf_lock = PTHREAD_MUTEX_INITIALIZER; |
136 | #define ECB_MEMORY_FENCE do { pthread_mutex_lock (&ecb_mf_lock); pthread_mutex_unlock (&ecb_mf_lock); } while (0) |
146 | #define ECB_MEMORY_FENCE do { pthread_mutex_lock (&ecb_mf_lock); pthread_mutex_unlock (&ecb_mf_lock); } while (0) |
137 | #endif |
147 | #endif |
138 | #endif |
148 | #endif |
139 | |
149 | |
140 | #if !defined(ECB_MEMORY_FENCE_ACQUIRE) && defined(ECB_MEMORY_FENCE) |
150 | #if !defined ECB_MEMORY_FENCE_ACQUIRE && defined ECB_MEMORY_FENCE |
141 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE |
151 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE |
142 | #endif |
152 | #endif |
143 | |
153 | |
144 | #if !defined(ECB_MEMORY_FENCE_RELEASE) && defined(ECB_MEMORY_FENCE) |
154 | #if !defined ECB_MEMORY_FENCE_RELEASE && defined ECB_MEMORY_FENCE |
145 | #define ECB_MEMORY_FENCE_RELEASE ECB_MEMORY_FENCE |
155 | #define ECB_MEMORY_FENCE_RELEASE ECB_MEMORY_FENCE |
146 | #endif |
156 | #endif |
147 | |
157 | |
148 | /*****************************************************************************/ |
158 | /*****************************************************************************/ |
149 | |
159 | |