Index: libunwind/trunk/include/__libunwind_config.h =================================================================== --- libunwind/trunk/include/__libunwind_config.h +++ libunwind/trunk/include/__libunwind_config.h @@ -18,7 +18,7 @@ #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_X86 8 #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_X86_64 32 #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_PPC 112 -#define _LIBUNWIND_HIGHEST_DWARF_REGISTER_PPC64 110 +#define _LIBUNWIND_HIGHEST_DWARF_REGISTER_PPC64 116 #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_ARM64 95 #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_ARM 287 #define _LIBUNWIND_HIGHEST_DWARF_REGISTER_OR1K 31 @@ -42,8 +42,8 @@ # define _LIBUNWIND_HIGHEST_DWARF_REGISTER _LIBUNWIND_HIGHEST_DWARF_REGISTER_X86_64 # elif defined(__powerpc64__) # define _LIBUNWIND_TARGET_PPC64 1 -# define _LIBUNWIND_CONTEXT_SIZE 136 -# define _LIBUNWIND_CURSOR_SIZE 148 +# define _LIBUNWIND_CONTEXT_SIZE 167 +# define _LIBUNWIND_CURSOR_SIZE 179 # define _LIBUNWIND_HIGHEST_DWARF_REGISTER _LIBUNWIND_HIGHEST_DWARF_REGISTER_PPC64 # elif defined(__ppc__) # define _LIBUNWIND_TARGET_PPC 1 @@ -96,8 +96,8 @@ # define _LIBUNWIND_TARGET_OR1K 1 # define _LIBUNWIND_TARGET_MIPS_O32 1 # define _LIBUNWIND_TARGET_MIPS_NEWABI 1 -# define _LIBUNWIND_CONTEXT_SIZE 136 -# define _LIBUNWIND_CURSOR_SIZE 148 +# define _LIBUNWIND_CONTEXT_SIZE 167 +# define _LIBUNWIND_CURSOR_SIZE 179 # define _LIBUNWIND_HIGHEST_DWARF_REGISTER 287 #endif // _LIBUNWIND_IS_NATIVE_ONLY Index: libunwind/trunk/include/libunwind.h =================================================================== --- libunwind/trunk/include/libunwind.h +++ libunwind/trunk/include/libunwind.h @@ -327,116 +327,186 @@ // 64-bit ppc register numbers enum { - UNW_PPC64_R0 = 0, - UNW_PPC64_R1 = 1, - UNW_PPC64_R2 = 2, - UNW_PPC64_R3 = 3, - UNW_PPC64_R4 = 4, - UNW_PPC64_R5 = 5, - UNW_PPC64_R6 = 6, - UNW_PPC64_R7 = 7, - UNW_PPC64_R8 = 8, - UNW_PPC64_R9 = 9, - UNW_PPC64_R10 = 10, - UNW_PPC64_R11 = 11, - UNW_PPC64_R12 = 12, - UNW_PPC64_R13 = 13, - UNW_PPC64_R14 = 14, - UNW_PPC64_R15 = 15, - UNW_PPC64_R16 = 16, - UNW_PPC64_R17 = 17, - UNW_PPC64_R18 = 18, - UNW_PPC64_R19 = 19, - UNW_PPC64_R20 = 20, - UNW_PPC64_R21 = 21, - UNW_PPC64_R22 = 22, - UNW_PPC64_R23 = 23, - UNW_PPC64_R24 = 24, - UNW_PPC64_R25 = 25, - UNW_PPC64_R26 = 26, - UNW_PPC64_R27 = 27, - UNW_PPC64_R28 = 28, - UNW_PPC64_R29 = 29, - UNW_PPC64_R30 = 30, - UNW_PPC64_R31 = 31, - UNW_PPC64_F0 = 32, - UNW_PPC64_F1 = 33, - UNW_PPC64_F2 = 34, - UNW_PPC64_F3 = 35, - UNW_PPC64_F4 = 36, - UNW_PPC64_F5 = 37, - UNW_PPC64_F6 = 38, - UNW_PPC64_F7 = 39, - UNW_PPC64_F8 = 40, - UNW_PPC64_F9 = 41, - UNW_PPC64_F10 = 42, - UNW_PPC64_F11 = 43, - UNW_PPC64_F12 = 44, - UNW_PPC64_F13 = 45, - UNW_PPC64_F14 = 46, - UNW_PPC64_F15 = 47, - UNW_PPC64_F16 = 48, - UNW_PPC64_F17 = 49, - UNW_PPC64_F18 = 50, - UNW_PPC64_F19 = 51, - UNW_PPC64_F20 = 52, - UNW_PPC64_F21 = 53, - UNW_PPC64_F22 = 54, - UNW_PPC64_F23 = 55, - UNW_PPC64_F24 = 56, - UNW_PPC64_F25 = 57, - UNW_PPC64_F26 = 58, - UNW_PPC64_F27 = 59, - UNW_PPC64_F28 = 60, - UNW_PPC64_F29 = 61, - UNW_PPC64_F30 = 62, - UNW_PPC64_F31 = 63, - UNW_PPC64_LR = 64, - UNW_PPC64_CTR = 65, - UNW_PPC64_CR0 = 66, - UNW_PPC64_CR1 = 67, - UNW_PPC64_CR2 = 68, - UNW_PPC64_CR3 = 69, - UNW_PPC64_CR4 = 70, - UNW_PPC64_CR5 = 71, - UNW_PPC64_CR6 = 72, - UNW_PPC64_CR7 = 73, - UNW_PPC64_XER = 74, - UNW_PPC64_V0 = 75, - UNW_PPC64_V1 = 76, - UNW_PPC64_V2 = 77, - UNW_PPC64_V3 = 78, - UNW_PPC64_V4 = 79, - UNW_PPC64_V5 = 80, - UNW_PPC64_V6 = 81, - UNW_PPC64_V7 = 82, - UNW_PPC64_V8 = 83, - UNW_PPC64_V9 = 84, - UNW_PPC64_V10 = 85, - UNW_PPC64_V11 = 86, - UNW_PPC64_V12 = 87, - UNW_PPC64_V13 = 88, - UNW_PPC64_V14 = 89, - UNW_PPC64_V15 = 90, - UNW_PPC64_V16 = 91, - UNW_PPC64_V17 = 92, - UNW_PPC64_V18 = 93, - UNW_PPC64_V19 = 94, - UNW_PPC64_V20 = 95, - UNW_PPC64_V21 = 96, - UNW_PPC64_V22 = 97, - UNW_PPC64_V23 = 98, - UNW_PPC64_V24 = 99, - UNW_PPC64_V25 = 100, - UNW_PPC64_V26 = 101, - UNW_PPC64_V27 = 102, - UNW_PPC64_V28 = 103, - UNW_PPC64_V29 = 104, - UNW_PPC64_V30 = 105, - UNW_PPC64_V31 = 106, - UNW_PPC64_VRSAVE = 107, - UNW_PPC64_VSCR = 108, - UNW_PPC64_FPSCR = 109 + UNW_PPC64_R0 = 0, + UNW_PPC64_R1 = 1, + UNW_PPC64_R2 = 2, + UNW_PPC64_R3 = 3, + UNW_PPC64_R4 = 4, + UNW_PPC64_R5 = 5, + UNW_PPC64_R6 = 6, + UNW_PPC64_R7 = 7, + UNW_PPC64_R8 = 8, + UNW_PPC64_R9 = 9, + UNW_PPC64_R10 = 10, + UNW_PPC64_R11 = 11, + UNW_PPC64_R12 = 12, + UNW_PPC64_R13 = 13, + UNW_PPC64_R14 = 14, + UNW_PPC64_R15 = 15, + UNW_PPC64_R16 = 16, + UNW_PPC64_R17 = 17, + UNW_PPC64_R18 = 18, + UNW_PPC64_R19 = 19, + UNW_PPC64_R20 = 20, + UNW_PPC64_R21 = 21, + UNW_PPC64_R22 = 22, + UNW_PPC64_R23 = 23, + UNW_PPC64_R24 = 24, + UNW_PPC64_R25 = 25, + UNW_PPC64_R26 = 26, + UNW_PPC64_R27 = 27, + UNW_PPC64_R28 = 28, + UNW_PPC64_R29 = 29, + UNW_PPC64_R30 = 30, + UNW_PPC64_R31 = 31, + UNW_PPC64_F0 = 32, + UNW_PPC64_F1 = 33, + UNW_PPC64_F2 = 34, + UNW_PPC64_F3 = 35, + UNW_PPC64_F4 = 36, + UNW_PPC64_F5 = 37, + UNW_PPC64_F6 = 38, + UNW_PPC64_F7 = 39, + UNW_PPC64_F8 = 40, + UNW_PPC64_F9 = 41, + UNW_PPC64_F10 = 42, + UNW_PPC64_F11 = 43, + UNW_PPC64_F12 = 44, + UNW_PPC64_F13 = 45, + UNW_PPC64_F14 = 46, + UNW_PPC64_F15 = 47, + UNW_PPC64_F16 = 48, + UNW_PPC64_F17 = 49, + UNW_PPC64_F18 = 50, + UNW_PPC64_F19 = 51, + UNW_PPC64_F20 = 52, + UNW_PPC64_F21 = 53, + UNW_PPC64_F22 = 54, + UNW_PPC64_F23 = 55, + UNW_PPC64_F24 = 56, + UNW_PPC64_F25 = 57, + UNW_PPC64_F26 = 58, + UNW_PPC64_F27 = 59, + UNW_PPC64_F28 = 60, + UNW_PPC64_F29 = 61, + UNW_PPC64_F30 = 62, + UNW_PPC64_F31 = 63, + // 64: reserved + UNW_PPC64_LR = 65, + UNW_PPC64_CTR = 66, + // 67: reserved + UNW_PPC64_CR0 = 68, + UNW_PPC64_CR1 = 69, + UNW_PPC64_CR2 = 70, + UNW_PPC64_CR3 = 71, + UNW_PPC64_CR4 = 72, + UNW_PPC64_CR5 = 73, + UNW_PPC64_CR6 = 74, + UNW_PPC64_CR7 = 75, + UNW_PPC64_XER = 76, + UNW_PPC64_V0 = 77, + UNW_PPC64_V1 = 78, + UNW_PPC64_V2 = 79, + UNW_PPC64_V3 = 80, + UNW_PPC64_V4 = 81, + UNW_PPC64_V5 = 82, + UNW_PPC64_V6 = 83, + UNW_PPC64_V7 = 84, + UNW_PPC64_V8 = 85, + UNW_PPC64_V9 = 86, + UNW_PPC64_V10 = 87, + UNW_PPC64_V11 = 88, + UNW_PPC64_V12 = 89, + UNW_PPC64_V13 = 90, + UNW_PPC64_V14 = 91, + UNW_PPC64_V15 = 92, + UNW_PPC64_V16 = 93, + UNW_PPC64_V17 = 94, + UNW_PPC64_V18 = 95, + UNW_PPC64_V19 = 96, + UNW_PPC64_V20 = 97, + UNW_PPC64_V21 = 98, + UNW_PPC64_V22 = 99, + UNW_PPC64_V23 = 100, + UNW_PPC64_V24 = 101, + UNW_PPC64_V25 = 102, + UNW_PPC64_V26 = 103, + UNW_PPC64_V27 = 104, + UNW_PPC64_V28 = 105, + UNW_PPC64_V29 = 106, + UNW_PPC64_V30 = 107, + UNW_PPC64_V31 = 108, + // 109, 111-113: OpenPOWER ELF V2 ABI: reserved + // Borrowing VRSAVE number from PPC32. + UNW_PPC64_VRSAVE = 109, + UNW_PPC64_VSCR = 110, + UNW_PPC64_TFHAR = 114, + UNW_PPC64_TFIAR = 115, + UNW_PPC64_TEXASR = 116, + UNW_PPC64_VS0 = UNW_PPC64_F0, + UNW_PPC64_VS1 = UNW_PPC64_F1, + UNW_PPC64_VS2 = UNW_PPC64_F2, + UNW_PPC64_VS3 = UNW_PPC64_F3, + UNW_PPC64_VS4 = UNW_PPC64_F4, + UNW_PPC64_VS5 = UNW_PPC64_F5, + UNW_PPC64_VS6 = UNW_PPC64_F6, + UNW_PPC64_VS7 = UNW_PPC64_F7, + UNW_PPC64_VS8 = UNW_PPC64_F8, + UNW_PPC64_VS9 = UNW_PPC64_F9, + UNW_PPC64_VS10 = UNW_PPC64_F10, + UNW_PPC64_VS11 = UNW_PPC64_F11, + UNW_PPC64_VS12 = UNW_PPC64_F12, + UNW_PPC64_VS13 = UNW_PPC64_F13, + UNW_PPC64_VS14 = UNW_PPC64_F14, + UNW_PPC64_VS15 = UNW_PPC64_F15, + UNW_PPC64_VS16 = UNW_PPC64_F16, + UNW_PPC64_VS17 = UNW_PPC64_F17, + UNW_PPC64_VS18 = UNW_PPC64_F18, + UNW_PPC64_VS19 = UNW_PPC64_F19, + UNW_PPC64_VS20 = UNW_PPC64_F20, + UNW_PPC64_VS21 = UNW_PPC64_F21, + UNW_PPC64_VS22 = UNW_PPC64_F22, + UNW_PPC64_VS23 = UNW_PPC64_F23, + UNW_PPC64_VS24 = UNW_PPC64_F24, + UNW_PPC64_VS25 = UNW_PPC64_F25, + UNW_PPC64_VS26 = UNW_PPC64_F26, + UNW_PPC64_VS27 = UNW_PPC64_F27, + UNW_PPC64_VS28 = UNW_PPC64_F28, + UNW_PPC64_VS29 = UNW_PPC64_F29, + UNW_PPC64_VS30 = UNW_PPC64_F30, + UNW_PPC64_VS31 = UNW_PPC64_F31, + UNW_PPC64_VS32 = UNW_PPC64_V0, + UNW_PPC64_VS33 = UNW_PPC64_V1, + UNW_PPC64_VS34 = UNW_PPC64_V2, + UNW_PPC64_VS35 = UNW_PPC64_V3, + UNW_PPC64_VS36 = UNW_PPC64_V4, + UNW_PPC64_VS37 = UNW_PPC64_V5, + UNW_PPC64_VS38 = UNW_PPC64_V6, + UNW_PPC64_VS39 = UNW_PPC64_V7, + UNW_PPC64_VS40 = UNW_PPC64_V8, + UNW_PPC64_VS41 = UNW_PPC64_V9, + UNW_PPC64_VS42 = UNW_PPC64_V10, + UNW_PPC64_VS43 = UNW_PPC64_V11, + UNW_PPC64_VS44 = UNW_PPC64_V12, + UNW_PPC64_VS45 = UNW_PPC64_V13, + UNW_PPC64_VS46 = UNW_PPC64_V14, + UNW_PPC64_VS47 = UNW_PPC64_V15, + UNW_PPC64_VS48 = UNW_PPC64_V16, + UNW_PPC64_VS49 = UNW_PPC64_V17, + UNW_PPC64_VS50 = UNW_PPC64_V18, + UNW_PPC64_VS51 = UNW_PPC64_V19, + UNW_PPC64_VS52 = UNW_PPC64_V20, + UNW_PPC64_VS53 = UNW_PPC64_V21, + UNW_PPC64_VS54 = UNW_PPC64_V22, + UNW_PPC64_VS55 = UNW_PPC64_V23, + UNW_PPC64_VS56 = UNW_PPC64_V24, + UNW_PPC64_VS57 = UNW_PPC64_V25, + UNW_PPC64_VS58 = UNW_PPC64_V26, + UNW_PPC64_VS59 = UNW_PPC64_V27, + UNW_PPC64_VS60 = UNW_PPC64_V28, + UNW_PPC64_VS61 = UNW_PPC64_V29, + UNW_PPC64_VS62 = UNW_PPC64_V30, + UNW_PPC64_VS63 = UNW_PPC64_V31 }; // 64-bit ARM64 registers Index: libunwind/trunk/src/Registers.hpp =================================================================== --- libunwind/trunk/src/Registers.hpp +++ libunwind/trunk/src/Registers.hpp @@ -1107,7 +1107,7 @@ #endif // _LIBUNWIND_TARGET_PPC #if defined(_LIBUNWIND_TARGET_PPC64) -/// Registers_ppc holds the register state of a thread in a 64-bit PowerPC +/// Registers_ppc64 holds the register state of a thread in a 64-bit PowerPC /// process. class _LIBUNWIND_HIDDEN Registers_ppc64 { public: @@ -1134,8 +1134,8 @@ private: struct ppc64_thread_state_t { - uint64_t __srr0; /* Instruction address register (PC) */ - uint64_t __srr1; /* Machine state register (supervisor) */ + uint64_t __srr0; // Instruction address register (PC) + uint64_t __srr1; // Machine state register (supervisor) uint64_t __r0; uint64_t __r1; uint64_t __r2; @@ -1168,21 +1168,25 @@ uint64_t __r29; uint64_t __r30; uint64_t __r31; - uint64_t __cr; /* Condition register */ - uint64_t __xer; /* User's integer exception register */ - uint64_t __lr; /* Link register */ - uint64_t __ctr; /* Count register */ - uint64_t __vrsave; /* Vector Save Register */ + uint64_t __cr; // Condition register + uint64_t __xer; // User's integer exception register + uint64_t __lr; // Link register + uint64_t __ctr; // Count register + uint64_t __vrsave; // Vector Save Register }; - struct ppc64_float_state_t { - double __fpregs[32]; - uint64_t __fpscr; /* floating point status register */ + union ppc64_vsr_t { + struct asfloat_s { + double f; + uint64_t v2; + } asfloat; + v128 v; }; ppc64_thread_state_t _registers; - ppc64_float_state_t _floatRegisters; - v128 _vectorRegisters[32]; + ppc64_vsr_t _vectorScalarRegisters[64]; + + static int getVectorRegNum(int num); }; inline Registers_ppc64::Registers_ppc64(const void *registers) { @@ -1190,32 +1194,29 @@ "ppc64 registers do not fit into unw_context_t"); memcpy(&_registers, static_cast(registers), sizeof(_registers)); - static_assert(sizeof(ppc64_thread_state_t) == 312, - "expected float register offset to be 312"); - memcpy(&_floatRegisters, - static_cast(registers) + sizeof(ppc64_thread_state_t), - sizeof(_floatRegisters)); - static_assert(sizeof(ppc64_thread_state_t) + sizeof(ppc64_float_state_t) == 576, - "expected vector register offset to be 576 bytes"); - memcpy(_vectorRegisters, - static_cast(registers) + sizeof(ppc64_thread_state_t) + - sizeof(ppc64_float_state_t), - sizeof(_vectorRegisters)); + static_assert(sizeof(_registers) == 312, + "expected vector scalar register offset to be 312"); + memcpy(&_vectorScalarRegisters, + static_cast(registers) + sizeof(_registers), + sizeof(_vectorScalarRegisters)); + static_assert(sizeof(_registers) + + sizeof(_vectorScalarRegisters) == 1336, + "expected vector register offset to be 1336 bytes"); } inline Registers_ppc64::Registers_ppc64() { memset(&_registers, 0, sizeof(_registers)); - memset(&_floatRegisters, 0, sizeof(_floatRegisters)); - memset(&_vectorRegisters, 0, sizeof(_vectorRegisters)); + memset(&_vectorScalarRegisters, 0, sizeof(_vectorScalarRegisters)); } inline bool Registers_ppc64::validRegister(int regNum) const { switch (regNum) { case UNW_REG_IP: case UNW_REG_SP: - case UNW_PPC64_VRSAVE: + case UNW_PPC64_XER: case UNW_PPC64_LR: case UNW_PPC64_CTR: + case UNW_PPC64_VRSAVE: return true; } @@ -1231,11 +1232,10 @@ switch (regNum) { case UNW_REG_IP: return _registers.__srr0; - case UNW_REG_SP: - return _registers.__r1; case UNW_PPC64_R0: return _registers.__r0; case UNW_PPC64_R1: + case UNW_REG_SP: return _registers.__r1; case UNW_PPC64_R2: return _registers.__r2; @@ -1297,10 +1297,6 @@ return _registers.__r30; case UNW_PPC64_R31: return _registers.__r31; - case UNW_PPC64_LR: - return _registers.__lr; - case UNW_PPC64_CTR: - return _registers.__ctr; case UNW_PPC64_CR0: return (_registers.__cr & 0xF0000000); case UNW_PPC64_CR1: @@ -1317,10 +1313,14 @@ return (_registers.__cr & 0x000000F0); case UNW_PPC64_CR7: return (_registers.__cr & 0x0000000F); + case UNW_PPC64_XER: + return _registers.__xer; + case UNW_PPC64_LR: + return _registers.__lr; + case UNW_PPC64_CTR: + return _registers.__ctr; case UNW_PPC64_VRSAVE: return _registers.__vrsave; - case UNW_PPC64_FPSCR: - return _floatRegisters.__fpscr; } _LIBUNWIND_ABORT("unsupported ppc64 register"); } @@ -1330,13 +1330,11 @@ case UNW_REG_IP: _registers.__srr0 = value; return; - case UNW_REG_SP: - _registers.__r1 = value; - return; case UNW_PPC64_R0: _registers.__r0 = value; return; case UNW_PPC64_R1: + case UNW_REG_SP: _registers.__r1 = value; return; case UNW_PPC64_R2: @@ -1429,12 +1427,6 @@ case UNW_PPC64_R31: _registers.__r31 = value; return; - case UNW_PPC64_LR: - _registers.__lr = value; - return; - case UNW_PPC64_CTR: - _registers.__ctr = value; - return; case UNW_PPC64_CR0: _registers.__cr &= 0x0FFFFFFF; _registers.__cr |= (value & 0xF0000000); @@ -1467,54 +1459,65 @@ _registers.__cr &= 0xFFFFFFF0; _registers.__cr |= (value & 0x0000000F); return; - case UNW_PPC64_VRSAVE: - _registers.__vrsave = value; - return; case UNW_PPC64_XER: _registers.__xer = value; return; - case UNW_PPC64_VSCR: - // not saved + case UNW_PPC64_LR: + _registers.__lr = value; + return; + case UNW_PPC64_CTR: + _registers.__ctr = value; + return; + case UNW_PPC64_VRSAVE: + _registers.__vrsave = value; return; } _LIBUNWIND_ABORT("unsupported ppc64 register"); } inline bool Registers_ppc64::validFloatRegister(int regNum) const { - if (regNum < UNW_PPC64_F0) - return false; - if (regNum > UNW_PPC64_F31) - return false; - return true; + return regNum >= UNW_PPC64_F0 && regNum <= UNW_PPC64_F31; } inline double Registers_ppc64::getFloatRegister(int regNum) const { assert(validFloatRegister(regNum)); - return _floatRegisters.__fpregs[regNum - UNW_PPC64_F0]; + return _vectorScalarRegisters[regNum - UNW_PPC64_F0].asfloat.f; } inline void Registers_ppc64::setFloatRegister(int regNum, double value) { assert(validFloatRegister(regNum)); - _floatRegisters.__fpregs[regNum - UNW_PPC64_F0] = value; + _vectorScalarRegisters[regNum - UNW_PPC64_F0].asfloat.f = value; } inline bool Registers_ppc64::validVectorRegister(int regNum) const { - if (regNum < UNW_PPC64_V0) - return false; - if (regNum > UNW_PPC64_V31) - return false; - return true; +#ifdef PPC64_HAS_VMX + if (regNum >= UNW_PPC64_VS0 && regNum <= UNW_PPC64_VS31) + return true; + if (regNum >= UNW_PPC64_VS32 && regNum <= UNW_PPC64_VS63) + return true; +#else + if (regNum >= UNW_PPC64_V0 && regNum <= UNW_PPC64_V31) + return true; +#endif + return false; +} + +inline int Registers_ppc64::getVectorRegNum(int num) +{ + if (num >= UNW_PPC64_VS0 && num <= UNW_PPC64_VS31) + return num - UNW_PPC64_VS0; + else + return num - UNW_PPC64_VS32 + 32; } inline v128 Registers_ppc64::getVectorRegister(int regNum) const { assert(validVectorRegister(regNum)); - v128 result = _vectorRegisters[regNum - UNW_PPC64_V0]; - return result; + return _vectorScalarRegisters[getVectorRegNum(regNum)].v; } inline void Registers_ppc64::setVectorRegister(int regNum, v128 value) { assert(validVectorRegister(regNum)); - _vectorRegisters[regNum - UNW_PPC64_V0] = value; + _vectorScalarRegisters[getVectorRegNum(regNum)].v = value; } inline const char *Registers_ppc64::getRegisterName(int regNum) { @@ -1587,6 +1590,30 @@ return "r30"; case UNW_PPC64_R31: return "r31"; + case UNW_PPC64_CR0: + return "cr0"; + case UNW_PPC64_CR1: + return "cr1"; + case UNW_PPC64_CR2: + return "cr2"; + case UNW_PPC64_CR3: + return "cr3"; + case UNW_PPC64_CR4: + return "cr4"; + case UNW_PPC64_CR5: + return "cr5"; + case UNW_PPC64_CR6: + return "cr6"; + case UNW_PPC64_CR7: + return "cr7"; + case UNW_PPC64_XER: + return "xer"; + case UNW_PPC64_LR: + return "lr"; + case UNW_PPC64_CTR: + return "ctr"; + case UNW_PPC64_VRSAVE: + return "vrsave"; case UNW_PPC64_F0: return "fp0"; case UNW_PPC64_F1: @@ -1651,35 +1678,72 @@ return "fp30"; case UNW_PPC64_F31: return "fp31"; - case UNW_PPC64_LR: - return "lr"; - case UNW_PPC64_CTR: - return "ctr"; - case UNW_PPC64_CR0: - return "cr0"; - case UNW_PPC64_CR1: - return "cr1"; - case UNW_PPC64_CR2: - return "cr2"; - case UNW_PPC64_CR3: - return "cr3"; - case UNW_PPC64_CR4: - return "cr4"; - case UNW_PPC64_CR5: - return "cr5"; - case UNW_PPC64_CR6: - return "cr6"; - case UNW_PPC64_CR7: - return "cr7"; - case UNW_PPC64_XER: - return "xer"; - case UNW_PPC64_VRSAVE: - return "vrsave"; - case UNW_PPC64_FPSCR: - return "fpscr"; - default: - return "unknown register"; + case UNW_PPC64_V0: + return "v0"; + case UNW_PPC64_V1: + return "v1"; + case UNW_PPC64_V2: + return "v2"; + case UNW_PPC64_V3: + return "v3"; + case UNW_PPC64_V4: + return "v4"; + case UNW_PPC64_V5: + return "v5"; + case UNW_PPC64_V6: + return "v6"; + case UNW_PPC64_V7: + return "v7"; + case UNW_PPC64_V8: + return "v8"; + case UNW_PPC64_V9: + return "v9"; + case UNW_PPC64_V10: + return "v10"; + case UNW_PPC64_V11: + return "v11"; + case UNW_PPC64_V12: + return "v12"; + case UNW_PPC64_V13: + return "v13"; + case UNW_PPC64_V14: + return "v14"; + case UNW_PPC64_V15: + return "v15"; + case UNW_PPC64_V16: + return "v16"; + case UNW_PPC64_V17: + return "v17"; + case UNW_PPC64_V18: + return "v18"; + case UNW_PPC64_V19: + return "v19"; + case UNW_PPC64_V20: + return "v20"; + case UNW_PPC64_V21: + return "v21"; + case UNW_PPC64_V22: + return "v22"; + case UNW_PPC64_V23: + return "v23"; + case UNW_PPC64_V24: + return "v24"; + case UNW_PPC64_V25: + return "v25"; + case UNW_PPC64_V26: + return "v26"; + case UNW_PPC64_V27: + return "v27"; + case UNW_PPC64_V28: + return "v28"; + case UNW_PPC64_V29: + return "v29"; + case UNW_PPC64_V30: + return "v30"; + case UNW_PPC64_V31: + return "v31"; } + return "unknown register"; } #endif // _LIBUNWIND_TARGET_PPC64 Index: libunwind/trunk/src/UnwindRegistersRestore.S =================================================================== --- libunwind/trunk/src/UnwindRegistersRestore.S +++ libunwind/trunk/src/UnwindRegistersRestore.S @@ -138,89 +138,259 @@ // thread_state pointer is in r3 // +// load register (GPR) +#define PPC64_LR(n) \ + ld %r##n, (8 * (n + 2))(%r3) + // restore integral registers // skip r0 for now // skip r1 for now - ld %r2, 32(%r3) + PPC64_LR(2) // skip r3 for now // skip r4 for now // skip r5 for now - ld %r6, 64(%r3) - ld %r7, 72(%r3) - ld %r8, 80(%r3) - ld %r9, 88(%r3) - ld %r10, 96(%r3) - ld %r11, 104(%r3) - ld %r12, 112(%r3) - ld %r13, 120(%r3) - ld %r14, 128(%r3) - ld %r15, 136(%r3) - ld %r16, 144(%r3) - ld %r17, 152(%r3) - ld %r18, 160(%r3) - ld %r19, 168(%r3) - ld %r20, 176(%r3) - ld %r21, 184(%r3) - ld %r22, 192(%r3) - ld %r23, 200(%r3) - ld %r24, 208(%r3) - ld %r25, 216(%r3) - ld %r26, 224(%r3) - ld %r27, 232(%r3) - ld %r28, 240(%r3) - ld %r29, 248(%r3) - ld %r30, 256(%r3) - ld %r31, 264(%r3) - - //restore float registers - lfd %f0, 312(%r3) - lfd %f1, 320(%r3) - lfd %f2, 328(%r3) - lfd %f3, 336(%r3) - lfd %f4, 344(%r3) - lfd %f5, 352(%r3) - lfd %f6, 360(%r3) - lfd %f7, 368(%r3) - lfd %f8, 376(%r3) - lfd %f9, 384(%r3) - lfd %f10, 392(%r3) - lfd %f11, 400(%r3) - lfd %f12, 408(%r3) - lfd %f13, 416(%r3) - lfd %f14, 424(%r3) - lfd %f15, 432(%r3) - lfd %f16, 440(%r3) - lfd %f17, 448(%r3) - lfd %f18, 456(%r3) - lfd %f19, 464(%r3) - lfd %f20, 472(%r3) - lfd %f21, 480(%r3) - lfd %f22, 488(%r3) - lfd %f23, 496(%r3) - lfd %f24, 504(%r3) - lfd %f25, 512(%r3) - lfd %f26, 520(%r3) - lfd %f27, 528(%r3) - lfd %f28, 536(%r3) - lfd %f29, 544(%r3) - lfd %f30, 552(%r3) - lfd %f31, 560(%r3) + PPC64_LR(6) + PPC64_LR(7) + PPC64_LR(8) + PPC64_LR(9) + PPC64_LR(10) + PPC64_LR(11) + PPC64_LR(12) + PPC64_LR(13) + PPC64_LR(14) + PPC64_LR(15) + PPC64_LR(16) + PPC64_LR(17) + PPC64_LR(18) + PPC64_LR(19) + PPC64_LR(20) + PPC64_LR(21) + PPC64_LR(22) + PPC64_LR(23) + PPC64_LR(24) + PPC64_LR(25) + PPC64_LR(26) + PPC64_LR(27) + PPC64_LR(28) + PPC64_LR(29) + PPC64_LR(30) + PPC64_LR(31) + +#ifdef PPC64_HAS_VMX + + // restore VS registers + // (note that this also restores floating point registers and V registers, + // because part of VS is mapped to these registers) + + addi %r4, %r3, PPC64_OFFS_FP + +// load VS register +#define PPC64_LVS(n) \ + lxvd2x %vs##n, 0, %r4 ;\ + addi %r4, %r4, 16 + + // restore the first 32 VS regs (and also all floating point regs) + PPC64_LVS(0) + PPC64_LVS(1) + PPC64_LVS(2) + PPC64_LVS(3) + PPC64_LVS(4) + PPC64_LVS(5) + PPC64_LVS(6) + PPC64_LVS(7) + PPC64_LVS(8) + PPC64_LVS(9) + PPC64_LVS(10) + PPC64_LVS(11) + PPC64_LVS(12) + PPC64_LVS(13) + PPC64_LVS(14) + PPC64_LVS(15) + PPC64_LVS(16) + PPC64_LVS(17) + PPC64_LVS(18) + PPC64_LVS(19) + PPC64_LVS(20) + PPC64_LVS(21) + PPC64_LVS(22) + PPC64_LVS(23) + PPC64_LVS(24) + PPC64_LVS(25) + PPC64_LVS(26) + PPC64_LVS(27) + PPC64_LVS(28) + PPC64_LVS(29) + PPC64_LVS(30) + PPC64_LVS(31) + + // use VRSAVE to conditionally restore the remaining VS regs, + // that are where the V regs are mapped + + ld %r5, PPC64_OFFS_VRSAVE(%r3) // test VRsave + cmpwi %r5, 0 + beq Lnovec + +// conditionally load VS +#define PPC64_CLVS_BOTTOM(n) \ + beq Ldone##n ;\ + addi %r4, %r3, PPC64_OFFS_FP + n * 16 ;\ + lxvd2x %vs##n, 0, %r4 ;\ +Ldone##n: + +#define PPC64_CLVSl(n) \ + andis. %r0, %r5, (1<<(47-n)) ;\ +PPC64_CLVS_BOTTOM(n) + +#define PPC64_CLVSh(n) \ + andi. %r0, %r5, (1<<(63-n)) ;\ +PPC64_CLVS_BOTTOM(n) + + PPC64_CLVSl(32) + PPC64_CLVSl(33) + PPC64_CLVSl(34) + PPC64_CLVSl(35) + PPC64_CLVSl(36) + PPC64_CLVSl(37) + PPC64_CLVSl(38) + PPC64_CLVSl(39) + PPC64_CLVSl(40) + PPC64_CLVSl(41) + PPC64_CLVSl(42) + PPC64_CLVSl(43) + PPC64_CLVSl(44) + PPC64_CLVSl(45) + PPC64_CLVSl(46) + PPC64_CLVSl(47) + PPC64_CLVSh(48) + PPC64_CLVSh(49) + PPC64_CLVSh(50) + PPC64_CLVSh(51) + PPC64_CLVSh(52) + PPC64_CLVSh(53) + PPC64_CLVSh(54) + PPC64_CLVSh(55) + PPC64_CLVSh(56) + PPC64_CLVSh(57) + PPC64_CLVSh(58) + PPC64_CLVSh(59) + PPC64_CLVSh(60) + PPC64_CLVSh(61) + PPC64_CLVSh(62) + PPC64_CLVSh(63) + +#else - //TODO: restore vector registers +// load FP register +#define PPC64_LF(n) \ + lfd %f##n, (PPC64_OFFS_FP + n * 16)(%r3) + + // restore float registers + PPC64_LF(0) + PPC64_LF(1) + PPC64_LF(2) + PPC64_LF(3) + PPC64_LF(4) + PPC64_LF(5) + PPC64_LF(6) + PPC64_LF(7) + PPC64_LF(8) + PPC64_LF(9) + PPC64_LF(10) + PPC64_LF(11) + PPC64_LF(12) + PPC64_LF(13) + PPC64_LF(14) + PPC64_LF(15) + PPC64_LF(16) + PPC64_LF(17) + PPC64_LF(18) + PPC64_LF(19) + PPC64_LF(20) + PPC64_LF(21) + PPC64_LF(22) + PPC64_LF(23) + PPC64_LF(24) + PPC64_LF(25) + PPC64_LF(26) + PPC64_LF(27) + PPC64_LF(28) + PPC64_LF(29) + PPC64_LF(30) + PPC64_LF(31) + + // restore vector registers if any are in use + ld %r5, PPC64_OFFS_VRSAVE(%r3) // test VRsave + cmpwi %r5, 0 + beq Lnovec + + subi %r4, %r1, 16 + // r4 is now a 16-byte aligned pointer into the red zone + // the _vectorScalarRegisters may not be 16-byte aligned + // so copy via red zone temp buffer + +#define PPC64_CLV_UNALIGNED_BOTTOM(n) \ + beq Ldone##n ;\ + ld %r0, (PPC64_OFFS_V + n * 16)(%r3) ;\ + std %r0, 0(%r4) ;\ + ld %r0, (PPC64_OFFS_V + n * 16 + 8)(%r3) ;\ + std %r0, 8(%r4) ;\ + lvx %v##n, 0, %r4 ;\ +Ldone ## n: + +#define PPC64_CLV_UNALIGNEDl(n) \ + andis. %r0, %r5, (1<<(15-n)) ;\ +PPC64_CLV_UNALIGNED_BOTTOM(n) + +#define PPC64_CLV_UNALIGNEDh(n) \ + andi. %r0, %r5, (1<<(31-n)) ;\ +PPC64_CLV_UNALIGNED_BOTTOM(n) + + PPC64_CLV_UNALIGNEDl(0) + PPC64_CLV_UNALIGNEDl(1) + PPC64_CLV_UNALIGNEDl(2) + PPC64_CLV_UNALIGNEDl(3) + PPC64_CLV_UNALIGNEDl(4) + PPC64_CLV_UNALIGNEDl(5) + PPC64_CLV_UNALIGNEDl(6) + PPC64_CLV_UNALIGNEDl(7) + PPC64_CLV_UNALIGNEDl(8) + PPC64_CLV_UNALIGNEDl(9) + PPC64_CLV_UNALIGNEDl(10) + PPC64_CLV_UNALIGNEDl(11) + PPC64_CLV_UNALIGNEDl(12) + PPC64_CLV_UNALIGNEDl(13) + PPC64_CLV_UNALIGNEDl(14) + PPC64_CLV_UNALIGNEDl(15) + PPC64_CLV_UNALIGNEDh(16) + PPC64_CLV_UNALIGNEDh(17) + PPC64_CLV_UNALIGNEDh(18) + PPC64_CLV_UNALIGNEDh(19) + PPC64_CLV_UNALIGNEDh(20) + PPC64_CLV_UNALIGNEDh(21) + PPC64_CLV_UNALIGNEDh(22) + PPC64_CLV_UNALIGNEDh(23) + PPC64_CLV_UNALIGNEDh(24) + PPC64_CLV_UNALIGNEDh(25) + PPC64_CLV_UNALIGNEDh(26) + PPC64_CLV_UNALIGNEDh(27) + PPC64_CLV_UNALIGNEDh(28) + PPC64_CLV_UNALIGNEDh(29) + PPC64_CLV_UNALIGNEDh(30) + PPC64_CLV_UNALIGNEDh(31) - // Lnovec: - ld %r0, 272(%r3) // __cr +#endif + +Lnovec: + ld %r0, PPC64_OFFS_CR(%r3) mtcr %r0 - ld %r0, 296(%r3) // __ctr - mtctr %r0 - ld %r0, 0(%r3) // __ssr0 + ld %r0, PPC64_OFFS_SRR0(%r3) mtctr %r0 - ld %r0, 16(%r3) - ld %r5, 56(%r3) - ld %r4, 48(%r3) - ld %r1, 24(%r3) - ld %r3, 40(%r3) + PPC64_LR(0) + PPC64_LR(5) + PPC64_LR(4) + PPC64_LR(1) + PPC64_LR(3) bctr #elif defined(__ppc__) Index: libunwind/trunk/src/UnwindRegistersSave.S =================================================================== --- libunwind/trunk/src/UnwindRegistersSave.S +++ libunwind/trunk/src/UnwindRegistersSave.S @@ -246,95 +246,223 @@ // thread_state pointer is in r3 // DEFINE_LIBUNWIND_FUNCTION(unw_getcontext) - std %r0, 16(%r3) + +// store register (GPR) +#define PPC64_STR(n) \ + std %r##n, (8 * (n + 2))(%r3) + + // save GPRs + PPC64_STR(0) mflr %r0 - std %r0, 0(%r3) // store lr as ssr0 - std %r1, 24(%r3) - std %r2, 32(%r3) - std %r3, 40(%r3) - std %r4, 48(%r3) - std %r5, 56(%r3) - std %r6, 64(%r3) - std %r7, 72(%r3) - std %r8, 80(%r3) - std %r9, 88(%r3) - std %r10, 96(%r3) - std %r11, 104(%r3) - std %r12, 112(%r3) - std %r13, 120(%r3) - std %r14, 128(%r3) - std %r15, 136(%r3) - std %r16, 144(%r3) - std %r17, 152(%r3) - std %r18, 160(%r3) - std %r19, 168(%r3) - std %r20, 176(%r3) - std %r21, 184(%r3) - std %r22, 192(%r3) - std %r23, 200(%r3) - std %r24, 208(%r3) - std %r25, 216(%r3) - std %r26, 224(%r3) - std %r27, 232(%r3) - std %r28, 240(%r3) - std %r29, 248(%r3) - std %r30, 256(%r3) - std %r31, 264(%r3) + std %r0, PPC64_OFFS_SRR0(%r3) // store lr as ssr0 + PPC64_STR(1) + PPC64_STR(2) + PPC64_STR(3) + PPC64_STR(4) + PPC64_STR(5) + PPC64_STR(6) + PPC64_STR(7) + PPC64_STR(8) + PPC64_STR(9) + PPC64_STR(10) + PPC64_STR(11) + PPC64_STR(12) + PPC64_STR(13) + PPC64_STR(14) + PPC64_STR(15) + PPC64_STR(16) + PPC64_STR(17) + PPC64_STR(18) + PPC64_STR(19) + PPC64_STR(20) + PPC64_STR(21) + PPC64_STR(22) + PPC64_STR(23) + PPC64_STR(24) + PPC64_STR(25) + PPC64_STR(26) + PPC64_STR(27) + PPC64_STR(28) + PPC64_STR(29) + PPC64_STR(30) + PPC64_STR(31) mfcr %r0 - std %r0, 272(%r3) - + std %r0, PPC64_OFFS_CR(%r3) mfxer %r0 - std %r0, 280(%r3) - + std %r0, PPC64_OFFS_XER(%r3) mflr %r0 - std %r0, 288(%r3) - + std %r0, PPC64_OFFS_LR(%r3) mfctr %r0 - std %r0, 296(%r3) - + std %r0, PPC64_OFFS_CTR(%r3) mfvrsave %r0 - std %r0, 304(%r3) + std %r0, PPC64_OFFS_VRSAVE(%r3) - // save float registers - stfd %f0, 312(%r3) - stfd %f1, 320(%r3) - stfd %f2, 328(%r3) - stfd %f3, 336(%r3) - stfd %f4, 344(%r3) - stfd %f5, 352(%r3) - stfd %f6, 360(%r3) - stfd %f7, 368(%r3) - stfd %f8, 376(%r3) - stfd %f9, 384(%r3) - stfd %f10, 392(%r3) - stfd %f11, 400(%r3) - stfd %f12, 408(%r3) - stfd %f13, 416(%r3) - stfd %f14, 424(%r3) - stfd %f15, 432(%r3) - stfd %f16, 440(%r3) - stfd %f17, 448(%r3) - stfd %f18, 456(%r3) - stfd %f19, 464(%r3) - stfd %f20, 472(%r3) - stfd %f21, 480(%r3) - stfd %f22, 488(%r3) - stfd %f23, 496(%r3) - stfd %f24, 504(%r3) - stfd %f25, 512(%r3) - stfd %f26, 520(%r3) - stfd %f27, 528(%r3) - stfd %f28, 536(%r3) - stfd %f29, 544(%r3) - stfd %f30, 552(%r3) - stfd %f31, 560(%r3) +#ifdef PPC64_HAS_VMX + // save VS registers + // (note that this also saves floating point registers and V registers, + // because part of VS is mapped to these registers) + + addi %r4, %r3, PPC64_OFFS_FP + +// store VS register +#define PPC64_STVS(n) \ + stxvd2x %vs##n, 0, %r4 ;\ + addi %r4, %r4, 16 + + PPC64_STVS(0) + PPC64_STVS(1) + PPC64_STVS(2) + PPC64_STVS(3) + PPC64_STVS(4) + PPC64_STVS(5) + PPC64_STVS(6) + PPC64_STVS(7) + PPC64_STVS(8) + PPC64_STVS(9) + PPC64_STVS(10) + PPC64_STVS(11) + PPC64_STVS(12) + PPC64_STVS(13) + PPC64_STVS(14) + PPC64_STVS(15) + PPC64_STVS(16) + PPC64_STVS(17) + PPC64_STVS(18) + PPC64_STVS(19) + PPC64_STVS(20) + PPC64_STVS(21) + PPC64_STVS(22) + PPC64_STVS(23) + PPC64_STVS(24) + PPC64_STVS(25) + PPC64_STVS(26) + PPC64_STVS(27) + PPC64_STVS(28) + PPC64_STVS(29) + PPC64_STVS(30) + PPC64_STVS(31) + PPC64_STVS(32) + PPC64_STVS(33) + PPC64_STVS(34) + PPC64_STVS(35) + PPC64_STVS(36) + PPC64_STVS(37) + PPC64_STVS(38) + PPC64_STVS(39) + PPC64_STVS(40) + PPC64_STVS(41) + PPC64_STVS(42) + PPC64_STVS(43) + PPC64_STVS(44) + PPC64_STVS(45) + PPC64_STVS(46) + PPC64_STVS(47) + PPC64_STVS(48) + PPC64_STVS(49) + PPC64_STVS(50) + PPC64_STVS(51) + PPC64_STVS(52) + PPC64_STVS(53) + PPC64_STVS(54) + PPC64_STVS(55) + PPC64_STVS(56) + PPC64_STVS(57) + PPC64_STVS(58) + PPC64_STVS(59) + PPC64_STVS(60) + PPC64_STVS(61) + PPC64_STVS(62) + PPC64_STVS(63) - mffs %f0 - stfd %f0, 568(%r3) +#else - //TODO: save vector registers +// store FP register +#define PPC64_STF(n) \ + stfd %f##n, (PPC64_OFFS_FP + n * 16)(%r3) + // save float registers + PPC64_STF(0) + PPC64_STF(1) + PPC64_STF(2) + PPC64_STF(3) + PPC64_STF(4) + PPC64_STF(5) + PPC64_STF(6) + PPC64_STF(7) + PPC64_STF(8) + PPC64_STF(9) + PPC64_STF(10) + PPC64_STF(11) + PPC64_STF(12) + PPC64_STF(13) + PPC64_STF(14) + PPC64_STF(15) + PPC64_STF(16) + PPC64_STF(17) + PPC64_STF(18) + PPC64_STF(19) + PPC64_STF(20) + PPC64_STF(21) + PPC64_STF(22) + PPC64_STF(23) + PPC64_STF(24) + PPC64_STF(25) + PPC64_STF(26) + PPC64_STF(27) + PPC64_STF(28) + PPC64_STF(29) + PPC64_STF(30) + PPC64_STF(31) + + // save vector registers + + // Use 16-bytes below the stack pointer as an + // aligned buffer to save each vector register. + // Note that the stack pointer is always 16-byte aligned. + subi %r4, %r1, 16 + +#define PPC64_STV_UNALIGNED(n) \ + stvx %v##n, 0, %r4 ;\ + ld %r5, 0(%r4) ;\ + std %r5, (PPC64_OFFS_V + n * 16)(%r3) ;\ + ld %r5, 8(%r4) ;\ + std %r5, (PPC64_OFFS_V + n * 16 + 8)(%r3) + + PPC64_STV_UNALIGNED(0) + PPC64_STV_UNALIGNED(1) + PPC64_STV_UNALIGNED(2) + PPC64_STV_UNALIGNED(3) + PPC64_STV_UNALIGNED(4) + PPC64_STV_UNALIGNED(5) + PPC64_STV_UNALIGNED(6) + PPC64_STV_UNALIGNED(7) + PPC64_STV_UNALIGNED(8) + PPC64_STV_UNALIGNED(9) + PPC64_STV_UNALIGNED(10) + PPC64_STV_UNALIGNED(11) + PPC64_STV_UNALIGNED(12) + PPC64_STV_UNALIGNED(13) + PPC64_STV_UNALIGNED(14) + PPC64_STV_UNALIGNED(15) + PPC64_STV_UNALIGNED(16) + PPC64_STV_UNALIGNED(17) + PPC64_STV_UNALIGNED(18) + PPC64_STV_UNALIGNED(19) + PPC64_STV_UNALIGNED(20) + PPC64_STV_UNALIGNED(21) + PPC64_STV_UNALIGNED(22) + PPC64_STV_UNALIGNED(23) + PPC64_STV_UNALIGNED(24) + PPC64_STV_UNALIGNED(25) + PPC64_STV_UNALIGNED(26) + PPC64_STV_UNALIGNED(27) + PPC64_STV_UNALIGNED(28) + PPC64_STV_UNALIGNED(29) + PPC64_STV_UNALIGNED(30) + PPC64_STV_UNALIGNED(31) + +#endif li %r3, 0 // return UNW_ESUCCESS blr Index: libunwind/trunk/src/assembly.h =================================================================== --- libunwind/trunk/src/assembly.h +++ libunwind/trunk/src/assembly.h @@ -18,6 +18,17 @@ #if defined(__powerpc64__) #define SEPARATOR ; +#define PPC64_OFFS_SRR0 0 +#define PPC64_OFFS_CR 272 +#define PPC64_OFFS_XER 280 +#define PPC64_OFFS_LR 288 +#define PPC64_OFFS_CTR 296 +#define PPC64_OFFS_VRSAVE 304 +#define PPC64_OFFS_FP 312 +#define PPC64_OFFS_V 824 +#ifdef _ARCH_PWR8 +#define PPC64_HAS_VMX +#endif #elif defined(__POWERPC__) || defined(__powerpc__) || defined(__ppc__) #define SEPARATOR @ #elif defined(__arm64__) Index: libunwind/trunk/src/config.h =================================================================== --- libunwind/trunk/src/config.h +++ libunwind/trunk/src/config.h @@ -75,6 +75,10 @@ #define _LIBUNWIND_BUILD_ZERO_COST_APIS #endif +#if defined(__powerpc64__) && defined(_ARCH_PWR8) +#define PPC64_HAS_VMX +#endif + #if defined(NDEBUG) && defined(_LIBUNWIND_IS_BAREMETAL) #define _LIBUNWIND_ABORT(msg) \ do { \