… | |
… | |
74 | #else |
74 | #else |
75 | #define ECB_PTRSIZE 4 |
75 | #define ECB_PTRSIZE 4 |
76 | #endif |
76 | #endif |
77 | #endif |
77 | #endif |
78 | |
78 | |
|
|
79 | #define ECB_GCC_AMD64 (__amd64 || __amd64__ || __x86_64 || __x86_64__) |
|
|
80 | #define ECB_MSVC_AMD64 (_M_AMD64 || _M_X64) |
|
|
81 | |
79 | /* work around x32 idiocy by defining proper macros */ |
82 | /* work around x32 idiocy by defining proper macros */ |
80 | #if __amd64 || __x86_64 || _M_AMD64 || _M_X64 |
83 | #if ECB_GCC_AMD64 || ECB_MSVC_AMD64 |
81 | #if _ILP32 |
84 | #if _ILP32 |
82 | #define ECB_AMD64_X32 1 |
85 | #define ECB_AMD64_X32 1 |
83 | #else |
86 | #else |
84 | #define ECB_AMD64 1 |
87 | #define ECB_AMD64 1 |
85 | #endif |
88 | #endif |
… | |
… | |
153 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
156 | #if ECB_GCC_VERSION(2,5) || defined __INTEL_COMPILER || (__llvm__ && __GNUC__) || __SUNPRO_C >= 0x5110 || __SUNPRO_CC >= 0x5110 |
154 | #if __i386 || __i386__ |
157 | #if __i386 || __i386__ |
155 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
158 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("lock; orb $0, -1(%%esp)" : : : "memory") |
156 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("" : : : "memory") |
159 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("" : : : "memory") |
157 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("") |
160 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("") |
158 | #elif __amd64 || __amd64__ || __x86_64 || __x86_64__ |
161 | #elif ECB_GCC_AMD64 |
159 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
162 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("mfence" : : : "memory") |
160 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("" : : : "memory") |
163 | #define ECB_MEMORY_FENCE_ACQUIRE __asm__ __volatile__ ("" : : : "memory") |
161 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("") |
164 | #define ECB_MEMORY_FENCE_RELEASE __asm__ __volatile__ ("") |
162 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
165 | #elif __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ |
163 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
166 | #define ECB_MEMORY_FENCE __asm__ __volatile__ ("sync" : : : "memory") |
… | |
… | |
349 | #define ecb_deprecated __declspec (deprecated) |
352 | #define ecb_deprecated __declspec (deprecated) |
350 | #else |
353 | #else |
351 | #define ecb_deprecated ecb_attribute ((__deprecated__)) |
354 | #define ecb_deprecated ecb_attribute ((__deprecated__)) |
352 | #endif |
355 | #endif |
353 | |
356 | |
354 | #if __MSC_VER >= 1500 |
357 | #if _MSC_VER >= 1500 |
355 | #define ecb_deprecated_message(msg) __declspec (deprecated (msg)) |
358 | #define ecb_deprecated_message(msg) __declspec (deprecated (msg)) |
356 | #elif ECB_GCC_VERSION(4,5) |
359 | #elif ECB_GCC_VERSION(4,5) |
357 | #define ecb_deprecated_message(msg) ecb_attribute ((__deprecated__ (msg)) |
360 | #define ecb_deprecated_message(msg) ecb_attribute ((__deprecated__ (msg)) |
358 | #else |
361 | #else |
359 | #define ecb_deprecated_message(msg) ecb_deprecated |
362 | #define ecb_deprecated_message(msg) ecb_deprecated |
… | |
… | |
591 | /* but less than using pointers, and always seems to */ |
594 | /* but less than using pointers, and always seems to */ |
592 | /* successfully return a constant. */ |
595 | /* successfully return a constant. */ |
593 | /* the reason why we have this horrible preprocessor mess */ |
596 | /* the reason why we have this horrible preprocessor mess */ |
594 | /* is to avoid it in all cases, at least on common architectures */ |
597 | /* is to avoid it in all cases, at least on common architectures */ |
595 | /* or when using a recent enough gcc version (>= 4.6) */ |
598 | /* or when using a recent enough gcc version (>= 4.6) */ |
596 | #if ((__i386 || __i386__) && !__VOS__) || _M_X86 || __amd64 || __amd64__ || _M_X64 |
599 | #if ((__i386 || __i386__) && !__VOS__) || _M_IX86 || ECB_GCC_AMD64 || ECB_MSVC_AMD64 |
597 | return 0x44; |
600 | return 0x44; |
598 | #elif __BYTE_ORDER__ && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ |
601 | #elif __BYTE_ORDER__ && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ |
599 | return 0x44; |
602 | return 0x44; |
600 | #elif __BYTE_ORDER__ && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ |
603 | #elif __BYTE_ORDER__ && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ |
601 | return 0x11; |
604 | return 0x11; |
… | |
… | |
652 | |
655 | |
653 | /* basically, everything uses "ieee pure-endian" floating point numbers */ |
656 | /* basically, everything uses "ieee pure-endian" floating point numbers */ |
654 | /* the only noteworthy exception is ancient armle, which uses order 43218765 */ |
657 | /* the only noteworthy exception is ancient armle, which uses order 43218765 */ |
655 | #if 0 \ |
658 | #if 0 \ |
656 | || __i386 || __i386__ \ |
659 | || __i386 || __i386__ \ |
657 | || __amd64 || __amd64__ || __x86_64 || __x86_64__ \ |
660 | || ECB_GCC_AMD64 \ |
658 | || __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ \ |
661 | || __powerpc__ || __ppc__ || __powerpc64__ || __ppc64__ \ |
659 | || defined __s390__ || defined __s390x__ \ |
662 | || defined __s390__ || defined __s390x__ \ |
660 | || defined __mips__ \ |
663 | || defined __mips__ \ |
661 | || defined __alpha__ \ |
664 | || defined __alpha__ \ |
662 | || defined __hppa__ \ |
665 | || defined __hppa__ \ |
663 | || defined __ia64__ \ |
666 | || defined __ia64__ \ |
664 | || defined __m68k__ \ |
667 | || defined __m68k__ \ |
665 | || defined __m88k__ \ |
668 | || defined __m88k__ \ |
666 | || defined __sh__ \ |
669 | || defined __sh__ \ |
667 | || defined _M_IX86 || defined _M_AMD64 || defined _M_IA64 \ |
670 | || defined _M_IX86 || defined ECB_MSVC_AMD64 || defined _M_IA64 \ |
668 | || (defined __arm__ && (defined __ARM_EABI__ || defined __EABI__ || defined __VFP_FP__ || defined _WIN32_WCE || defined __ANDROID__)) \ |
671 | || (defined __arm__ && (defined __ARM_EABI__ || defined __EABI__ || defined __VFP_FP__ || defined _WIN32_WCE || defined __ANDROID__)) \ |
669 | || defined __aarch64__ |
672 | || defined __aarch64__ |
670 | #define ECB_STDFP 1 |
673 | #define ECB_STDFP 1 |
671 | #include <string.h> /* for memcpy */ |
674 | #include <string.h> /* for memcpy */ |
672 | #else |
675 | #else |
… | |
… | |
690 | #define ECB_NAN ECB_INFINITY |
693 | #define ECB_NAN ECB_INFINITY |
691 | #endif |
694 | #endif |
692 | |
695 | |
693 | #if ECB_C99 || _XOPEN_VERSION >= 600 || _POSIX_VERSION >= 200112L |
696 | #if ECB_C99 || _XOPEN_VERSION >= 600 || _POSIX_VERSION >= 200112L |
694 | #define ecb_ldexpf(x,e) ldexpf ((x), (e)) |
697 | #define ecb_ldexpf(x,e) ldexpf ((x), (e)) |
|
|
698 | #define ecb_frexpf(x,e) frexpf ((x), (e)) |
695 | #else |
699 | #else |
696 | #define ecb_ldexpf(x,e) (float) ldexp ((x), (e)) |
700 | #define ecb_ldexpf(x,e) (float) ldexp ((double) (x), (e)) |
|
|
701 | #define ecb_frexpf(x,e) (float) frexp ((double) (x), (e)) |
697 | #endif |
702 | #endif |
698 | |
703 | |
699 | /* converts an ieee half/binary16 to a float */ |
704 | /* converts an ieee half/binary16 to a float */ |
700 | ecb_function_ ecb_const float ecb_binary16_to_float (uint16_t x); |
705 | ecb_function_ ecb_const float ecb_binary16_to_float (uint16_t x); |
701 | ecb_function_ ecb_const float |
706 | ecb_function_ ecb_const float |
… | |
… | |
730 | if (x == 0e0f ) return 0x00000000U; |
735 | if (x == 0e0f ) return 0x00000000U; |
731 | if (x > +3.40282346638528860e+38f) return 0x7f800000U; |
736 | if (x > +3.40282346638528860e+38f) return 0x7f800000U; |
732 | if (x < -3.40282346638528860e+38f) return 0xff800000U; |
737 | if (x < -3.40282346638528860e+38f) return 0xff800000U; |
733 | if (x != x ) return 0x7fbfffffU; |
738 | if (x != x ) return 0x7fbfffffU; |
734 | |
739 | |
735 | m = frexpf (x, &e) * 0x1000000U; |
740 | m = ecb_frexpf (x, &e) * 0x1000000U; |
736 | |
741 | |
737 | r = m & 0x80000000U; |
742 | r = m & 0x80000000U; |
738 | |
743 | |
739 | if (r) |
744 | if (r) |
740 | m = -m; |
745 | m = -m; |