Index: lib/Headers/Intrin.h =================================================================== --- lib/Headers/Intrin.h +++ lib/Headers/Intrin.h @@ -30,7 +30,9 @@ #define __INTRIN_H /* First include the standard intrinsics. */ +#if defined(__i386__) || defined(__x86_64__) #include +#endif /* For the definition of jmp_buf. */ #include @@ -572,6 +574,8 @@ *a = *a | (1 << b); return x; } + +#if defined(__i386__) || defined(__x86_64__) static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) _interlockedbittestandset(long volatile *__BitBase, long __BitPos) { unsigned char __Res; @@ -582,6 +586,8 @@ : "Ir"(__BitPos)); return __Res; } +#endif + #ifdef __x86_64__ static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__)) _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) { @@ -813,6 +819,8 @@ /*----------------------------------------------------------------------------*\ |* Barriers \*----------------------------------------------------------------------------*/ + +#if defined(__i386__) || defined(__x86_64__) static __inline__ void __attribute__((__always_inline__, __nodebug__)) __attribute__((deprecated("use other intrinsics or C++11 atomics instead"))) _ReadWriteBarrier(void) { @@ -828,12 +836,15 @@ _WriteBarrier(void) { __asm__ volatile ("" : : : "memory"); } +#endif + #ifdef __x86_64__ static __inline__ void __attribute__((__always_inline__, __nodebug__)) __faststorefence(void) { __asm__ volatile("lock orq $0, (%%rsp)" : : : "memory"); } #endif + /*----------------------------------------------------------------------------*\ |* readfs, readgs |* (Pointers in address space #256 and #257 are relative to the GS and FS @@ -883,6 +894,8 @@ /*----------------------------------------------------------------------------*\ |* movs, stos \*----------------------------------------------------------------------------*/ + +#if defined(__i386__) || defined(__x86_64__) static __inline__ void __attribute__((__always_inline__, __nodebug__)) __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) { __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n) @@ -913,6 +926,8 @@ __asm__("rep stosh" : : "D"(__dst), "a"(__x), "c"(__n) : "%edi", "%ecx"); } +#endif + #ifdef __x86_64__ static __inline__ void __attribute__((__always_inline__, __nodebug__)) __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) { @@ -937,6 +952,8 @@ _ReturnAddress(void) { return __builtin_return_address(0); } + +#if defined(__i386__) || defined(__x86_64__) static __inline__ void __attribute__((__always_inline__, __nodebug__)) __cpuid(int __info[4], int __level) { __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) @@ -947,20 +964,24 @@ __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) : "a"(__level), "c"(__ecx)); } + static __inline__ unsigned __int64 __cdecl __attribute__((__always_inline__, __nodebug__)) _xgetbv(unsigned int __xcr_no) { unsigned int __eax, __edx; __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no)); return ((unsigned __int64)__edx << 32) | __eax; } + static __inline__ void __attribute__((__always_inline__, __nodebug__)) __halt(void) { __asm__ volatile ("hlt"); } +#endif /*----------------------------------------------------------------------------*\ |* Privileged intrinsics \*----------------------------------------------------------------------------*/ +#if defined(__i386__) || defined(__x86_64__) static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__)) __readmsr(unsigned long __register) { // Loads the contents of a 64-bit model specific register (MSR) specified in @@ -986,6 +1007,7 @@ __writecr3(unsigned int __cr3_val) { __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory"); } +#endif #ifdef __cplusplus }