diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl_amd64.S b/compiler-rt/lib/tsan/rtl/tsan_rtl_amd64.S --- a/compiler-rt/lib/tsan/rtl/tsan_rtl_amd64.S +++ b/compiler-rt/lib/tsan/rtl/tsan_rtl_amd64.S @@ -42,6 +42,25 @@ push %r11 CFI_ADJUST_CFA_OFFSET(8) CFI_REL_OFFSET(%r11, 0) + # All XMM registers are caller-saved. + sub $0x100, %rsp + CFI_ADJUST_CFA_OFFSET(0x100) + vmovdqu %xmm0, 0x0(%rsp) + vmovdqu %xmm1, 0x10(%rsp) + vmovdqu %xmm2, 0x20(%rsp) + vmovdqu %xmm3, 0x30(%rsp) + vmovdqu %xmm4, 0x40(%rsp) + vmovdqu %xmm5, 0x50(%rsp) + vmovdqu %xmm6, 0x60(%rsp) + vmovdqu %xmm7, 0x70(%rsp) + vmovdqu %xmm8, 0x80(%rsp) + vmovdqu %xmm9, 0x90(%rsp) + vmovdqu %xmm10, 0xa0(%rsp) + vmovdqu %xmm11, 0xb0(%rsp) + vmovdqu %xmm12, 0xc0(%rsp) + vmovdqu %xmm13, 0xd0(%rsp) + vmovdqu %xmm14, 0xe0(%rsp) + vmovdqu %xmm15, 0xf0(%rsp) # Align stack frame. push %rbx # non-scratch CFI_ADJUST_CFA_OFFSET(8) @@ -59,6 +78,24 @@ pop %rbx CFI_ADJUST_CFA_OFFSET(-8) # Restore scratch registers. + vmovdqu 0x0(%rsp), %xmm0 + vmovdqu 0x10(%rsp), %xmm1 + vmovdqu 0x20(%rsp), %xmm2 + vmovdqu 0x30(%rsp), %xmm3 + vmovdqu 0x40(%rsp), %xmm4 + vmovdqu 0x50(%rsp), %xmm5 + vmovdqu 0x60(%rsp), %xmm6 + vmovdqu 0x70(%rsp), %xmm7 + vmovdqu 0x80(%rsp), %xmm8 + vmovdqu 0x90(%rsp), %xmm9 + vmovdqu 0xa0(%rsp), %xmm10 + vmovdqu 0xb0(%rsp), %xmm11 + vmovdqu 0xc0(%rsp), %xmm12 + vmovdqu 0xd0(%rsp), %xmm13 + vmovdqu 0xe0(%rsp), %xmm14 + vmovdqu 0xf0(%rsp), %xmm15 + add $0x100, %rsp + CFI_ADJUST_CFA_OFFSET(-0x100) pop %r11 CFI_ADJUST_CFA_OFFSET(-8) pop %r10 @@ -123,6 +160,25 @@ push %r11 CFI_ADJUST_CFA_OFFSET(8) CFI_REL_OFFSET(%r11, 0) + # All XMM registers are caller-saved. + sub $0x100, %rsp + CFI_ADJUST_CFA_OFFSET(0x100) + vmovdqu %xmm0, 0x0(%rsp) + vmovdqu %xmm1, 0x10(%rsp) + vmovdqu %xmm2, 0x20(%rsp) + vmovdqu %xmm3, 0x30(%rsp) + vmovdqu %xmm4, 0x40(%rsp) + vmovdqu %xmm5, 0x50(%rsp) + vmovdqu %xmm6, 0x60(%rsp) + vmovdqu %xmm7, 0x70(%rsp) + vmovdqu %xmm8, 0x80(%rsp) + vmovdqu %xmm9, 0x90(%rsp) + vmovdqu %xmm10, 0xa0(%rsp) + vmovdqu %xmm11, 0xb0(%rsp) + vmovdqu %xmm12, 0xc0(%rsp) + vmovdqu %xmm13, 0xd0(%rsp) + vmovdqu %xmm14, 0xe0(%rsp) + vmovdqu %xmm15, 0xf0(%rsp) # Align stack frame. push %rbx # non-scratch CFI_ADJUST_CFA_OFFSET(8) @@ -140,6 +196,24 @@ pop %rbx CFI_ADJUST_CFA_OFFSET(-8) # Restore scratch registers. + vmovdqu 0x0(%rsp), %xmm0 + vmovdqu 0x10(%rsp), %xmm1 + vmovdqu 0x20(%rsp), %xmm2 + vmovdqu 0x30(%rsp), %xmm3 + vmovdqu 0x40(%rsp), %xmm4 + vmovdqu 0x50(%rsp), %xmm5 + vmovdqu 0x60(%rsp), %xmm6 + vmovdqu 0x70(%rsp), %xmm7 + vmovdqu 0x80(%rsp), %xmm8 + vmovdqu 0x90(%rsp), %xmm9 + vmovdqu 0xa0(%rsp), %xmm10 + vmovdqu 0xb0(%rsp), %xmm11 + vmovdqu 0xc0(%rsp), %xmm12 + vmovdqu 0xd0(%rsp), %xmm13 + vmovdqu 0xe0(%rsp), %xmm14 + vmovdqu 0xf0(%rsp), %xmm15 + add $0x100, %rsp + CFI_ADJUST_CFA_OFFSET(-0x100) pop %r11 CFI_ADJUST_CFA_OFFSET(-8) pop %r10 diff --git a/compiler-rt/test/sanitizer_common/TestCases/frexp.cpp b/compiler-rt/test/sanitizer_common/TestCases/frexp.cpp new file mode 100644 --- /dev/null +++ b/compiler-rt/test/sanitizer_common/TestCases/frexp.cpp @@ -0,0 +1,20 @@ +// RUN: %clangxx -O2 %s -o %t && %run %t 2>&1 | FileCheck %s + +#include +#include +#include + +int main() { + for (int i = 0; i < 10000; i++) { + volatile double x = 10; + int exp = 0; + double y = frexp(x, &exp); + if (y != 0.625 || exp != 4) { + printf("i=%d y=%lf exp=%d\n", i, y, exp); + exit(1); + } + } + fprintf(stderr, "DONE\n"); + // CHECK: DONE + return 0; +}