# Changeset View

Changeset View

# Standalone View

Standalone View

# compiler-rt/trunk/lib/xray/xray_trampoline_x86.S

Show All 18 Lines | 14 | //===----------------------------------------------------------------------===// | |||
---|---|---|---|---|---|

19 | .align 16, 0x90 | 19 | .align 16, 0x90 | ||

20 | .type __xray_FunctionEntry,@function | 20 | .type __xray_FunctionEntry,@function | ||

21 | 21 | | |||

22 | __xray_FunctionEntry: | 22 | __xray_FunctionEntry: | ||

23 | .cfi_startproc | 23 | .cfi_startproc | ||

24 | // Save caller provided registers before doing any actual work. | 24 | // Save caller provided registers before doing any actual work. | ||

25 | pushq %rbp | 25 | pushq %rbp | ||

26 | .cfi_def_cfa_offset 16 | 26 | .cfi_def_cfa_offset 16 | ||

27 | subq $72, %rsp | 27 | subq $200, %rsp | ||

28 | movupd %xmm0, 184(%rsp) | ||||

29 | movupd %xmm1, 168(%rsp) | ||||

30 | movupd %xmm2, 152(%rsp) | ||||

31 | movupd %xmm3, 136(%rsp) | ||||

32 | movupd %xmm4, 120(%rsp) | ||||

33 | movupd %xmm5, 104(%rsp) | ||||

34 | movupd %xmm6, 88(%rsp) | ||||

35 | movupd %xmm7, 72(%rsp) | ||||

28 | movq %rdi, 64(%rsp) | 36 | movq %rdi, 64(%rsp) | ||

29 | movq %rax, 56(%rsp) | 37 | movq %rax, 56(%rsp) | ||

30 | movq %rdx, 48(%rsp) | 38 | movq %rdx, 48(%rsp) | ||

31 | movq %rsi, 40(%rsp) | 39 | movq %rsi, 40(%rsp) | ||

32 | movq %rcx, 32(%rsp) | 40 | movq %rcx, 32(%rsp) | ||

33 | movq %r8, 24(%rsp) | 41 | movq %r8, 24(%rsp) | ||

34 | movq %r9, 16(%rsp) | 42 | movq %r9, 16(%rsp) | ||

35 | 43 | | |||

36 | // de-mangled, that's __xray::XRayPatchedFunction, and we're doing an acquire | 44 | // de-mangled, that's __xray::XRayPatchedFunction, and we're doing an acquire | ||

37 | // load (on x86 is a normal mov instruction). | 45 | // load (on x86 is a normal mov instruction). | ||

38 | movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax | 46 | movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax | ||

39 | testq %rax, %rax | 47 | testq %rax, %rax | ||

40 | je .Ltmp0 | 48 | je .Ltmp0 | ||

41 | 49 | | |||

42 | // assume that %r10d has the function id. | 50 | // assume that %r10d has the function id. | ||

43 | movl %r10d, %edi | 51 | movl %r10d, %edi | ||

44 | xor %esi,%esi | 52 | xor %esi,%esi | ||

45 | callq *%rax | 53 | callq *%rax | ||

46 | .Ltmp0: | 54 | .Ltmp0: | ||

47 | // restore the registers | 55 | // restore the registers | ||

56 | movupd 184(%rsp), %xmm0 | ||||

57 | movupd 168(%rsp), %xmm1 | ||||

58 | movupd 152(%rsp), %xmm2 | ||||

59 | movupd 136(%rsp), %xmm3 | ||||

60 | movupd 120(%rsp), %xmm4 | ||||

61 | movupd 104(%rsp), %xmm5 | ||||

62 | movupd 88(%rsp) , %xmm6 | ||||

63 | movupd 72(%rsp) , %xmm7 | ||||

48 | movq 64(%rsp), %rdi | 64 | movq 64(%rsp), %rdi | ||

49 | movq 56(%rsp), %rax | 65 | movq 56(%rsp), %rax | ||

50 | movq 48(%rsp), %rdx | 66 | movq 48(%rsp), %rdx | ||

51 | movq 40(%rsp), %rsi | 67 | movq 40(%rsp), %rsi | ||

52 | movq 32(%rsp), %rcx | 68 | movq 32(%rsp), %rcx | ||

53 | movq 24(%rsp), %r8 | 69 | movq 24(%rsp), %r8 | ||

54 | movq 16(%rsp), %r9 | 70 | movq 16(%rsp), %r9 | ||

55 | addq $72, %rsp | 71 | addq $200, %rsp | ||

56 | popq %rbp | 72 | popq %rbp | ||

57 | retq | 73 | retq | ||

58 | .Ltmp1: | 74 | .Ltmp1: | ||

59 | .size __xray_FunctionEntry, .Ltmp1-__xray_FunctionEntry | 75 | .size __xray_FunctionEntry, .Ltmp1-__xray_FunctionEntry | ||

60 | .cfi_endproc | 76 | .cfi_endproc | ||

61 | 77 | | |||

62 | .globl __xray_FunctionExit | 78 | .globl __xray_FunctionExit | ||

63 | .align 16, 0x90 | 79 | .align 16, 0x90 | ||

64 | .type __xray_FunctionExit,@function | 80 | .type __xray_FunctionExit,@function | ||

65 | __xray_FunctionExit: | 81 | __xray_FunctionExit: | ||

66 | .cfi_startproc | 82 | .cfi_startproc | ||

67 | // Save the important registers first. Since we're assuming that this | 83 | // Save the important registers first. Since we're assuming that this | ||

68 | // function is only jumped into, we only preserve the registers for | 84 | // function is only jumped into, we only preserve the registers for | ||

69 | // returning. | 85 | // returning. | ||

70 | // FIXME: Figure out whether this is sufficient. | | |||

71 | pushq %rbp | 86 | pushq %rbp | ||

72 | .cfi_def_cfa_offset 16 | 87 | .cfi_def_cfa_offset 16 | ||

73 | subq $24, %rsp | 88 | subq $56, %rsp | ||

74 | .cfi_def_cfa_offset 32 | 89 | .cfi_def_cfa_offset 32 | ||

90 | movupd %xmm0, 40(%rsp) | ||||

91 | movupd %xmm1, 24(%rsp) | ||||

75 | movq %rax, 16(%rsp) | 92 | movq %rax, 16(%rsp) | ||

76 | movq %rdx, 8(%rsp) | 93 | movq %rdx, 8(%rsp) | ||

77 | movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax | 94 | movq _ZN6__xray19XRayPatchedFunctionE(%rip), %rax | ||

78 | testq %rax,%rax | 95 | testq %rax,%rax | ||

79 | je .Ltmp2 | 96 | je .Ltmp2 | ||

80 | 97 | | |||

81 | movl %r10d, %edi | 98 | movl %r10d, %edi | ||

82 | movl $1, %esi | 99 | movl $1, %esi | ||

83 | callq *%rax | 100 | callq *%rax | ||

84 | .Ltmp2: | 101 | .Ltmp2: | ||

85 | // Restore the important registers. | 102 | // Restore the important registers. | ||

103 | movupd 40(%rsp), %xmm0 | ||||

104 | movupd 24(%rsp), %xmm1 | ||||

86 | movq 16(%rsp), %rax | 105 | movq 16(%rsp), %rax | ||

87 | movq 8(%rsp), %rdx | 106 | movq 8(%rsp), %rdx | ||

88 | addq $24, %rsp | 107 | addq $56, %rsp | ||

89 | popq %rbp | 108 | popq %rbp | ||

90 | retq | 109 | retq | ||

91 | .Ltmp3: | 110 | .Ltmp3: | ||

92 | .size __xray_FunctionExit, .Ltmp3-__xray_FunctionExit | 111 | .size __xray_FunctionExit, .Ltmp3-__xray_FunctionExit | ||

93 | .cfi_endproc | 112 | .cfi_endproc |