… | |
… | |
533 | * or so. |
533 | * or so. |
534 | * we try to detect these and simply assume they are not gcc - if they have |
534 | * we try to detect these and simply assume they are not gcc - if they have |
535 | * an issue with that they should have done it right in the first place. |
535 | * an issue with that they should have done it right in the first place. |
536 | */ |
536 | */ |
537 | #ifndef ECB_GCC_VERSION |
537 | #ifndef ECB_GCC_VERSION |
538 | #if !defined(__GNUC_MINOR__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) || defined(__llvm__) || defined(__clang__) |
538 | #if !defined __GNUC_MINOR__ || defined __INTEL_COMPILER || defined __SUNPRO_C || defined __SUNPRO_CC || defined __llvm__ || defined __clang__ |
539 | #define ECB_GCC_VERSION(major,minor) 0 |
539 | #define ECB_GCC_VERSION(major,minor) 0 |
540 | #else |
540 | #else |
541 | #define ECB_GCC_VERSION(major,minor) (__GNUC__ > (major) || (__GNUC__ == (major) && __GNUC_MINOR__ >= (minor))) |
541 | #define ECB_GCC_VERSION(major,minor) (__GNUC__ > (major) || (__GNUC__ == (major) && __GNUC_MINOR__ >= (minor))) |
542 | #endif |
542 | #endif |
543 | #endif |
543 | #endif |
… | |
… | |
554 | #if ECB_NO_THREADS || ECB_NO_SMP |
554 | #if ECB_NO_THREADS || ECB_NO_SMP |
555 | #define ECB_MEMORY_FENCE do { } while (0) |
555 | #define ECB_MEMORY_FENCE do { } while (0) |
556 | #endif |
556 | #endif |
557 | |
557 | |
558 | #ifndef ECB_MEMORY_FENCE |
558 | #ifndef ECB_MEMORY_FENCE |
559 | #if ECB_GCC_VERSION(2,5) || defined(__INTEL_COMPILER) || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
559 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
560 | #if __i386 || __i386__ |
560 | #if __i386 || __i386__ |
561 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
561 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
562 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE /* non-lock xchg might be enough */ |
562 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE /* non-lock xchg might be enough */ |
563 | #define ECB_MEMORY_FENCE_RELEASE do { } while (0) /* unlikely to change in future cpus */ |
563 | #define ECB_MEMORY_FENCE_RELEASE do { } while (0) /* unlikely to change in future cpus */ |
564 | #elif __amd64 || __amd64__ || __x86_64 || __x86_64__ |
564 | #elif __amd64 || __amd64__ || __x86_64 || __x86_64__ |
565 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
565 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
566 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("lfence" : : : "memory") |
566 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("lfence" : : : "memory") |
567 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("sfence") /* play safe - not needed in any current cpu */ |
567 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("sfence") /* play safe - not needed in any current cpu */ |
568 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
568 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
569 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
569 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
570 | #elif defined(__ARM_ARCH_6__ ) || defined(__ARM_ARCH_6J__ ) \ |
570 | #elif defined __ARM_ARCH_6__ || defined __ARM_ARCH_6J__ \ |
571 | || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6ZK__) |
571 | || defined __ARM_ARCH_6K__ || defined __ARM_ARCH_6ZK__ |
572 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mcr p15,0,%0,c7,c10,5" : : "r" (0) : "memory") |
572 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mcr p15,0,%0,c7,c10,5" : : "r" (0) : "memory") |
573 | #elif defined(__ARM_ARCH_7__ ) || defined(__ARM_ARCH_7A__ ) \ |
573 | #elif defined __ARM_ARCH_7__ || defined __ARM_ARCH_7A__ \ |
574 | || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7R__ ) |
574 | || defined __ARM_ARCH_7M__ || defined __ARM_ARCH_7R__ |
575 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("dmb" : : : "memory") |
575 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("dmb" : : : "memory") |
576 | #elif __sparc || __sparc__ |
576 | #elif __sparc || __sparc__ |
577 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad | #StoreStore | #StoreLoad | " : : : "memory") |
577 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad | #StoreStore | #StoreLoad | " : : : "memory") |
578 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad" : : : "memory") |
578 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("membar #LoadStore | #LoadLoad" : : : "memory") |
579 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("membar #LoadStore | #StoreStore") |
579 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("membar #LoadStore | #StoreStore") |
580 | #elif defined(__s390__) || defined(__s390x__) |
580 | #elif defined __s390__ || defined __s390x__ |
581 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("bcr 15,0" : : : "memory") |
581 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("bcr 15,0" : : : "memory") |
582 | #elif defined(__mips__) |
582 | #elif defined __mips__ |
583 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
583 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
584 | #endif |
584 | #endif |
585 | #endif |
585 | #endif |
586 | #endif |
586 | #endif |
587 | |
587 | |
588 | #ifndef ECB_MEMORY_FENCE |
588 | #ifndef ECB_MEMORY_FENCE |
589 | #if ECB_GCC_VERSION(4,4) || defined(__INTEL_COMPILER) || defined(__clang__) |
589 | #if ECB_GCC_VERSION(4,4) || defined __INTEL_COMPILER || defined __clang__ |
590 | #define ECB_MEMORY_FENCE __sync_synchronize () |
590 | #define ECB_MEMORY_FENCE __sync_synchronize () |
591 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
591 | /*#define ECB_MEMORY_FENCE_ACQUIRE ({ char dummy = 0; __sync_lock_test_and_set (&dummy, 1); }) */ |
592 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
592 | /*#define ECB_MEMORY_FENCE_RELEASE ({ char dummy = 1; __sync_lock_release (&dummy ); }) */ |
593 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
593 | #elif _MSC_VER >= 1400 /* VC++ 2005 */ |
594 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |
594 | #pragma intrinsic(_ReadBarrier,_WriteBarrier,_ReadWriteBarrier) |
595 | #define ECB_MEMORY_FENCE _ReadWriteBarrier () |
595 | #define ECB_MEMORY_FENCE _ReadWriteBarrier () |
596 | #define ECB_MEMORY_FENCE_ACQUIRE _ReadWriteBarrier () /* according to msdn, _ReadBarrier is not a load fence */ |
596 | #define ECB_MEMORY_FENCE_ACQUIRE _ReadWriteBarrier () /* according to msdn, _ReadBarrier is not a load fence */ |
597 | #define ECB_MEMORY_FENCE_RELEASE _WriteBarrier () |
597 | #define ECB_MEMORY_FENCE_RELEASE _WriteBarrier () |
598 | #elif defined(_WIN32) |
598 | #elif defined _WIN32 |
599 | #include <WinNT.h> |
599 | #include <WinNT.h> |
600 | #define ECB_MEMORY_FENCE MemoryBarrier () /* actually just xchg on x86... scary */ |
600 | #define ECB_MEMORY_FENCE MemoryBarrier () /* actually just xchg on x86... scary */ |
601 | #elif __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
601 | #elif __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
602 | #include <mbarrier.h> |
602 | #include <mbarrier.h> |
603 | #define ECB_MEMORY_FENCE __machine_rw_barrier () |
603 | #define ECB_MEMORY_FENCE __machine_rw_barrier () |
… | |
… | |
624 | static pthread_mutex_t ecb_mf_lock = PTHREAD_MUTEX_INITIALIZER; |
624 | static pthread_mutex_t ecb_mf_lock = PTHREAD_MUTEX_INITIALIZER; |
625 | #define ECB_MEMORY_FENCE do { pthread_mutex_lock (&ecb_mf_lock); pthread_mutex_unlock (&ecb_mf_lock); } while (0) |
625 | #define ECB_MEMORY_FENCE do { pthread_mutex_lock (&ecb_mf_lock); pthread_mutex_unlock (&ecb_mf_lock); } while (0) |
626 | #endif |
626 | #endif |
627 | #endif |
627 | #endif |
628 | |
628 | |
629 | #if !defined(ECB_MEMORY_FENCE_ACQUIRE) && defined(ECB_MEMORY_FENCE) |
629 | #if !defined ECB_MEMORY_FENCE_ACQUIRE && defined ECB_MEMORY_FENCE |
630 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE |
630 | #define ECB_MEMORY_FENCE_ACQUIRE ECB_MEMORY_FENCE |
631 | #endif |
631 | #endif |
632 | |
632 | |
633 | #if !defined(ECB_MEMORY_FENCE_RELEASE) && defined(ECB_MEMORY_FENCE) |
633 | #if !defined ECB_MEMORY_FENCE_RELEASE && defined ECB_MEMORY_FENCE |
634 | #define ECB_MEMORY_FENCE_RELEASE ECB_MEMORY_FENCE |
634 | #define ECB_MEMORY_FENCE_RELEASE ECB_MEMORY_FENCE |
635 | #endif |
635 | #endif |
636 | |
636 | |
637 | /*****************************************************************************/ |
637 | /*****************************************************************************/ |
638 | |
638 | |