Index: include/llvm/IR/IntrinsicsX86.td =================================================================== --- include/llvm/IR/IntrinsicsX86.td +++ include/llvm/IR/IntrinsicsX86.td @@ -3553,6 +3553,35 @@ } //===----------------------------------------------------------------------===// +// XSAVE +let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.". + def int_x86_xsave : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsave64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xrstor : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xrstor64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsaveopt : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsaveopt64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xrstors : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xrstors64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsavec : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsavec64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsaves : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; + def int_x86_xsaves64 : + Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty, llvm_i32_ty], []>; +} + +//===----------------------------------------------------------------------===// // Half float conversion let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.". Index: lib/Support/Host.cpp =================================================================== --- lib/Support/Host.cpp +++ lib/Support/Host.cpp @@ -769,6 +769,7 @@ Features["movbe"] = (ECX >> 22) & 1; Features["popcnt"] = (ECX >> 23) & 1; Features["aes"] = (ECX >> 25) & 1; + Features["xsave"] = (ECX >> 26) & 1; Features["rdrnd"] = (ECX >> 30) & 1; // If CPUID indicates support for XSAVE, XRESTORE and AVX, and XGETBV @@ -819,6 +820,16 @@ Features["avx512bw"] = HasLeaf7 && ((EBX >> 30) & 1) && HasAVX512Save; Features["avx512vl"] = HasLeaf7 && ((EBX >> 31) & 1) && HasAVX512Save; + + bool HasLeafd = MaxLevel >= 0xd && + !GetX86CpuIDAndInfoEx(0xd, 0x1, &EAX, &EBX, &ECX, &EDX); + + //FIXME: Should these feature be conditional on "xsave" feature? + Features["xsaveopt"] = Features["xsave"] && HasLeafd && ((ECX >> 0) & 1); + Features["xsavec"] = Features["xsave"] && HasLeafd && ((ECX >> 1) & 1); + Features["xg1"] = Features["xsave"] && HasLeafd && ((ECX >> 2) & 1); + Features["xsaves"] = Features["xsave"] && HasLeafd && ((ECX >> 3) & 1); + return true; } #elif defined(__linux__) && (defined(__arm__) || defined(__aarch64__)) Index: lib/Target/X86/X86InstrInfo.td =================================================================== --- lib/Target/X86/X86InstrInfo.td +++ lib/Target/X86/X86InstrInfo.td @@ -2634,7 +2634,9 @@ def : MnemonicAlias<"xsaveq", "xsave64", "att">; def : MnemonicAlias<"xrstorq", "xrstor64", "att">; def : MnemonicAlias<"xsaveoptq", "xsaveopt64", "att">; - +def : MnemonicAlias<"xrstorsq", "xrstors64", "att">; +def : MnemonicAlias<"xsavecq", "xsavec64", "att">; +def : MnemonicAlias<"xsavesq", "xsaves64", "att">; class CondCodeAlias Index: lib/Target/X86/X86InstrSystem.td =================================================================== --- lib/Target/X86/X86InstrSystem.td +++ lib/Target/X86/X86InstrSystem.td @@ -484,32 +484,43 @@ let Uses = [EDX, EAX, ECX] in def XSETBV : I<0x01, MRM_D1, (outs), (ins), "xsetbv", []>, TB; -let Uses = [RDX, RAX] in { - def XSAVE : I<0xAE, MRM4m, (outs opaque512mem:$dst), (ins), - "xsave\t$dst", []>, TB; - def XSAVE64 : RI<0xAE, MRM4m, (outs opaque512mem:$dst), (ins), - "xsave64\t$dst", []>, TB, Requires<[In64BitMode]>; +let Uses = [EDX, EAX] in { + def XSAVE : I<0xAE, MRM4m, (outs), (ins opaque512mem:$dst), + "xsave\t$dst", + [(int_x86_xsave addr:$dst, EDX, EAX)]>, TB; + def XSAVE64 : RI<0xAE, MRM4m, (outs), (ins opaque512mem:$dst), + "xsave64\t$dst", + [(int_x86_xsave64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; def XRSTOR : I<0xAE, MRM5m, (outs), (ins opaque512mem:$dst), - "xrstor\t$dst", []>, TB; + "xrstor\t$dst", + [(int_x86_xrstor addr:$dst, EDX, EAX)]>, TB; def XRSTOR64 : RI<0xAE, MRM5m, (outs), (ins opaque512mem:$dst), - "xrstor64\t$dst", []>, TB, Requires<[In64BitMode]>; - def XSAVEOPT : I<0xAE, MRM6m, (outs opaque512mem:$dst), (ins), - "xsaveopt\t$dst", []>, PS; - def XSAVEOPT64 : RI<0xAE, MRM6m, (outs opaque512mem:$dst), (ins), - "xsaveopt64\t$dst", []>, PS, Requires<[In64BitMode]>; - + "xrstor64\t$dst", + [(int_x86_xrstor64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; + def XSAVEOPT : I<0xAE, MRM6m, (outs), (ins opaque512mem:$dst), + "xsaveopt\t$dst", + [(int_x86_xsaveopt addr:$dst, EDX, EAX)]>, TB; + def XSAVEOPT64 : RI<0xAE, MRM6m, (outs), (ins opaque512mem:$dst), + "xsaveopt64\t$dst", + [(int_x86_xsaveopt64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; def XRSTORS : I<0xC7, MRM3m, (outs), (ins opaque512mem:$dst), - "xrstors\t$dst", []>, TB; + "xrstors\t$dst", + [(int_x86_xrstors addr:$dst, EDX, EAX)]>, TB; def XRSTORS64 : RI<0xC7, MRM3m, (outs), (ins opaque512mem:$dst), - "xrstors64\t$dst", []>, TB, Requires<[In64BitMode]>; - def XSAVEC : I<0xC7, MRM4m, (outs opaque512mem:$dst), (ins), - "xsavec\t$dst", []>, TB; - def XSAVEC64 : RI<0xC7, MRM4m, (outs opaque512mem:$dst), (ins), - "xsavec64\t$dst", []>, TB, Requires<[In64BitMode]>; - def XSAVES : I<0xC7, MRM5m, (outs opaque512mem:$dst), (ins), - "xsaves\t$dst", []>, TB; - def XSAVES64 : RI<0xC7, MRM5m, (outs opaque512mem:$dst), (ins), - "xsaves64\t$dst", []>, TB, Requires<[In64BitMode]>; + "xrstors64\t$dst", + [(int_x86_xrstors64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; + def XSAVEC : I<0xC7, MRM4m, (outs), (ins opaque512mem:$dst), + "xsavec\t$dst", + [(int_x86_xsavec addr:$dst, EDX, EAX)]>, TB; + def XSAVEC64 : RI<0xC7, MRM4m, (outs), (ins opaque512mem:$dst), + "xsavec64\t$dst", + [(int_x86_xsavec64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; + def XSAVES : I<0xC7, MRM5m, (outs), (ins opaque512mem:$dst), + "xsaves\t$dst", + [(int_x86_xsaves addr:$dst, EDX, EAX)]>, TB; + def XSAVES64 : RI<0xC7, MRM5m, (outs), (ins opaque512mem:$dst), + "xsaves64\t$dst", + [(int_x86_xsaves64 addr:$dst, EDX, EAX)]>, TB, Requires<[In64BitMode]>; } } // SchedRW Index: test/CodeGen/X86/system-intrinsics-64.ll =================================================================== --- test/CodeGen/X86/system-intrinsics-64.ll +++ test/CodeGen/X86/system-intrinsics-64.ll @@ -31,3 +31,100 @@ ret void; } declare void @llvm.x86.fxrstor64(i8*) + + +define void @test_xsave(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsave +; CHECK: xsave + call void @llvm.x86.xsave(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsave(i8*, i32, i32) + +define void @test_xsave64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsave64 +; CHECK: xsave64 + call void @llvm.x86.xsave64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsave64(i8*, i32, i32) + +define void @test_xrstor(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xrstor +; CHECK: xrstor + call void @llvm.x86.xrstor(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xrstor(i8*, i32, i32) + +define void @test_xrstor64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xrstor64 +; CHECK: xrstor64 + call void @llvm.x86.xrstor64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xrstor64(i8*, i32, i32) + +define void @test_xsaveopt(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsaveopt +; CHECK: xsaveopt + call void @llvm.x86.xsaveopt(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsaveopt(i8*, i32, i32) + +define void @test_xsaveopt64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsaveopt64 +; CHECK: xsaveopt64 + call void @llvm.x86.xsaveopt64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsaveopt64(i8*, i32, i32) + +define void @test_xrstors(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xrstors +; CHECK: xrstors + call void @llvm.x86.xrstors(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xrstors(i8*, i32, i32) + +define void @test_xrstors64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xrstors64 +; CHECK: xrstors64 + call void @llvm.x86.xrstors64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xrstors64(i8*, i32, i32) + +define void @test_xsavec(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsavec +; CHECK: xsavec + call void @llvm.x86.xsavec(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsavec(i8*, i32, i32) + +define void @test_xsavec64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsavec64 +; CHECK: xsavec64 + call void @llvm.x86.xsavec64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsavec64(i8*, i32, i32) + +define void @test_xsaves(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsaves +; CHECK: xsaves + call void @llvm.x86.xsaves(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsaves(i8*, i32, i32) + +define void @test_xsaves64(i8* %ptr, i32 %hi, i32 %lo) { +; CHECK-LABEL: test_xsaves64 +; CHECK: xsaves64 + call void @llvm.x86.xsaves64(i8* %ptr, i32 %hi, i32 %lo) + ret void; +} +declare void @llvm.x86.xsaves64(i8*, i32, i32)