Index: lib/Target/X86/X86CallLowering.cpp =================================================================== --- lib/Target/X86/X86CallLowering.cpp +++ lib/Target/X86/X86CallLowering.cpp @@ -210,6 +210,28 @@ MIRBuilder.buildLoad(ValVReg, Addr, *MMO); } + void assignValueToReg(unsigned ValVReg, unsigned PhysReg, + CCValAssign &VA) override { + markPhysRegUsed(PhysReg); + switch (VA.getLocInfo()) { + default: + MIRBuilder.buildCopy(ValVReg, PhysReg); + break; + case CCValAssign::LocInfo::SExt: + case CCValAssign::LocInfo::ZExt: + case CCValAssign::LocInfo::AExt: { + auto Copy = MIRBuilder.buildCopy(LLT{VA.getLocVT()}, PhysReg); + MIRBuilder.buildTrunc(ValVReg, Copy); + break; + } + } + } + + /// How the physical register gets marked varies between formal + /// parameters (it's a basic-block live-in), and a call instruction + /// (it's an implicit-def of the BL). + virtual void markPhysRegUsed(unsigned PhysReg) = 0; + protected: const DataLayout &DL; }; @@ -219,10 +241,8 @@ CCAssignFn *AssignFn) : IncomingValueHandler(MIRBuilder, MRI, AssignFn) {} - void assignValueToReg(unsigned ValVReg, unsigned PhysReg, - CCValAssign &VA) override { + void markPhysRegUsed(unsigned PhysReg) override { MIRBuilder.getMBB().addLiveIn(PhysReg); - MIRBuilder.buildCopy(ValVReg, PhysReg); } }; @@ -231,10 +251,8 @@ CCAssignFn *AssignFn, MachineInstrBuilder &MIB) : IncomingValueHandler(MIRBuilder, MRI, AssignFn), MIB(MIB) {} - void assignValueToReg(unsigned ValVReg, unsigned PhysReg, - CCValAssign &VA) override { + void markPhysRegUsed(unsigned PhysReg) override { MIB.addDef(PhysReg, RegState::Implicit); - MIRBuilder.buildCopy(ValVReg, PhysReg); } protected: Index: test/CodeGen/X86/GlobalISel/add-scalar.ll =================================================================== --- test/CodeGen/X86/GlobalISel/add-scalar.ll +++ test/CodeGen/X86/GlobalISel/add-scalar.ll @@ -48,8 +48,8 @@ define i16 @test_add_i16(i16 %arg1, i16 %arg2) { ; X64-LABEL: test_add_i16: ; X64: # BB#0: -; X64-NEXT: # kill: %DI %DI %RDI -; X64-NEXT: # kill: %SI %SI %RSI +; X64-NEXT: # kill: %EDI %EDI %RDI +; X64-NEXT: # kill: %ESI %ESI %RSI ; X64-NEXT: leal (%rsi,%rdi), %eax ; X64-NEXT: # kill: %AX %AX %EAX ; X64-NEXT: retq Index: test/CodeGen/X86/GlobalISel/callingconv.ll =================================================================== --- test/CodeGen/X86/GlobalISel/callingconv.ll +++ test/CodeGen/X86/GlobalISel/callingconv.ll @@ -314,22 +314,28 @@ ; X32-NEXT: pushl %ebx ; X32-NEXT: .Lcfi7: ; X32-NEXT: .cfi_def_cfa_offset 8 -; X32-NEXT: subl $8, %esp +; X32-NEXT: pushl %esi ; X32-NEXT: .Lcfi8: -; X32-NEXT: .cfi_def_cfa_offset 16 +; X32-NEXT: .cfi_def_cfa_offset 12 +; X32-NEXT: pushl %eax ; X32-NEXT: .Lcfi9: +; X32-NEXT: .cfi_def_cfa_offset 16 +; X32-NEXT: .Lcfi10: +; X32-NEXT: .cfi_offset %esi, -12 +; X32-NEXT: .Lcfi11: ; X32-NEXT: .cfi_offset %ebx, -8 ; X32-NEXT: movl 16(%esp), %eax ; X32-NEXT: movb (%eax), %bl -; X32-NEXT: movb %bl, (%esp) +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: movl %esi, (%esp) ; X32-NEXT: calll take_char ; X32-NEXT: movsbl %bl, %eax ; X32-NEXT: movl %eax, (%esp) ; X32-NEXT: calll take_char -; X32-NEXT: movzbl %bl, %eax -; X32-NEXT: movl %eax, (%esp) +; X32-NEXT: movl %esi, (%esp) ; X32-NEXT: calll take_char -; X32-NEXT: addl $8, %esp +; X32-NEXT: addl $4, %esp +; X32-NEXT: popl %esi ; X32-NEXT: popl %ebx ; X32-NEXT: retl ; @@ -340,13 +346,13 @@ ; X64-NEXT: .cfi_def_cfa_offset 16 ; X64-NEXT: .Lcfi7: ; X64-NEXT: .cfi_offset %rbx, -16 -; X64-NEXT: movb (%rdi), %bl +; X64-NEXT: movb (%rdi), %al +; X64-NEXT: movzbl %al, %ebx ; X64-NEXT: movl %ebx, %edi ; X64-NEXT: callq take_char -; X64-NEXT: movsbl %bl, %ebx -; X64-NEXT: movl %ebx, %edi +; X64-NEXT: movsbl %bl, %edi ; X64-NEXT: callq take_char -; X64-NEXT: movzbl %bl, %edi +; X64-NEXT: movl %ebx, %edi ; X64-NEXT: callq take_char ; X64-NEXT: popq %rbx ; X64-NEXT: retq @@ -362,7 +368,7 @@ ; X32-LABEL: test_variadic_call_1: ; X32: # BB#0: ; X32-NEXT: subl $12, %esp -; X32-NEXT: .Lcfi10: +; X32-NEXT: .Lcfi12: ; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: movl 16(%esp), %eax ; X32-NEXT: movl 20(%esp), %ecx @@ -396,7 +402,7 @@ ; X32-LABEL: test_variadic_call_2: ; X32: # BB#0: ; X32-NEXT: subl $12, %esp -; X32-NEXT: .Lcfi11: +; X32-NEXT: .Lcfi13: ; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: movl 16(%esp), %eax ; X32-NEXT: movl 20(%esp), %ecx Index: test/CodeGen/X86/GlobalISel/ext-x86-64.ll =================================================================== --- test/CodeGen/X86/GlobalISel/ext-x86-64.ll +++ test/CodeGen/X86/GlobalISel/ext-x86-64.ll @@ -6,7 +6,7 @@ define i64 @test_zext_i1(i8 %a) { ; X64-LABEL: test_zext_i1: ; X64: # BB#0: -; X64-NEXT: # kill: %DIL %DIL %RDI +; X64-NEXT: # kill: %EDI %EDI %RDI ; X64-NEXT: andq $1, %rdi ; X64-NEXT: movq %rdi, %rax ; X64-NEXT: retq Index: test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll =================================================================== --- test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll +++ test/CodeGen/X86/GlobalISel/irtranslator-callingconv.ll @@ -18,12 +18,18 @@ ; X64-NEXT: isImmutable: true, ; X64: liveins: %ecx, %edi, %edx, %esi, %r8d, %r9d -; X64: [[ARG1:%[0-9]+]](s8) = COPY %edi -; X64-NEXT: %{{[0-9]+}}(s8) = COPY %esi -; X64-NEXT: %{{[0-9]+}}(s8) = COPY %edx -; X64-NEXT: %{{[0-9]+}}(s8) = COPY %ecx -; X64-NEXT: %{{[0-9]+}}(s8) = COPY %r8d -; X64-NEXT: %{{[0-9]+}}(s8) = COPY %r9d +; X64: [[ARG1_TMP:%[0-9]+]](s32) = COPY %edi +; X64: [[ARG1:%[0-9]+]](s8) = G_TRUNC [[ARG1_TMP]](s32) +; X64-NEXT: %{{[0-9]+}}(s32) = COPY %esi +; X64-NEXT: %{{[0-9]+}}(s8) = G_TRUNC %{{[0-9]+}}(s32) +; X64-NEXT: %{{[0-9]+}}(s32) = COPY %edx +; X64-NEXT: %{{[0-9]+}}(s8) = G_TRUNC %{{[0-9]+}}(s32) +; X64-NEXT: %{{[0-9]+}}(s32) = COPY %ecx +; X64-NEXT: %{{[0-9]+}}(s8) = G_TRUNC %{{[0-9]+}}(s32) +; X64-NEXT: %{{[0-9]+}}(s32) = COPY %r8d +; X64-NEXT: %{{[0-9]+}}(s8) = G_TRUNC %{{[0-9]+}}(s32) +; X64-NEXT: %{{[0-9]+}}(s32) = COPY %r9d +; X64-NEXT: %{{[0-9]+}}(s8) = G_TRUNC %{{[0-9]+}}(s32) ; X64-NEXT: [[ARG7_ADDR:%[0-9]+]](p0) = G_FRAME_INDEX %fixed-stack.[[STACK0]] ; X64-NEXT: [[ARG7:%[0-9]+]](s8) = G_LOAD [[ARG7_ADDR]](p0) :: (invariant load 1 from %fixed-stack.[[STACK0]], align 0) ; X64-NEXT: [[ARG8_ADDR:%[0-9]+]](p0) = G_FRAME_INDEX %fixed-stack.[[STACK8]] @@ -651,23 +657,24 @@ ; X32-NEXT: %3(p0) = COPY %esp ; X32-NEXT: %4(s32) = G_CONSTANT i32 0 ; X32-NEXT: %5(p0) = G_GEP %3, %4(s32) -; X32-NEXT: G_STORE %2(s8), %5(p0) :: (store 4 into stack, align 0) +; X32-NEXT: %6(s32) = G_ANYEXT %2(s8) +; X32-NEXT: G_STORE %6(s32), %5(p0) :: (store 4 into stack, align 0) ; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp ; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp ; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp -; X32-NEXT: %6(p0) = COPY %esp -; X32-NEXT: %7(s32) = G_CONSTANT i32 0 -; X32-NEXT: %8(p0) = G_GEP %6, %7(s32) -; X32-NEXT: %9(s32) = G_SEXT %2(s8) -; X32-NEXT: G_STORE %9(s32), %8(p0) :: (store 4 into stack, align 0) +; X32-NEXT: %7(p0) = COPY %esp +; X32-NEXT: %8(s32) = G_CONSTANT i32 0 +; X32-NEXT: %9(p0) = G_GEP %7, %8(s32) +; X32-NEXT: %10(s32) = G_SEXT %2(s8) +; X32-NEXT: G_STORE %10(s32), %9(p0) :: (store 4 into stack, align 0) ; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp ; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp ; X32-NEXT: ADJCALLSTACKDOWN32 4, 0, 0, implicit-def %esp, implicit-def %eflags, implicit %esp -; X32-NEXT: %10(p0) = COPY %esp -; X32-NEXT: %11(s32) = G_CONSTANT i32 0 -; X32-NEXT: %12(p0) = G_GEP %10, %11(s32) -; X32-NEXT: %13(s32) = G_ZEXT %2(s8) -; X32-NEXT: G_STORE %13(s32), %12(p0) :: (store 4 into stack, align 0) +; X32-NEXT: %11(p0) = COPY %esp +; X32-NEXT: %12(s32) = G_CONSTANT i32 0 +; X32-NEXT: %13(p0) = G_GEP %11, %12(s32) +; X32-NEXT: %14(s32) = G_ZEXT %2(s8) +; X32-NEXT: G_STORE %14(s32), %13(p0) :: (store 4 into stack, align 0) ; X32-NEXT: CALLpcrel32 @take_char, csr_32, implicit %esp ; X32-NEXT: ADJCALLSTACKUP32 4, 0, implicit-def %esp, implicit-def %eflags, implicit %esp ; X32-NEXT: RET 0 @@ -675,17 +682,18 @@ ; X64: %0(p0) = COPY %rdi ; X64-NEXT: %1(s8) = G_LOAD %0(p0) :: (load 1 from %ir.addr) ; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp -; X64-NEXT: %edi = COPY %1(s8) +; X64-NEXT: %2(s32) = G_ANYEXT %1(s8) +; X64-NEXT: %edi = COPY %2(s32) ; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi ; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp ; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp -; X64-NEXT: %2(s32) = G_SEXT %1(s8) -; X64-NEXT: %edi = COPY %2(s32) +; X64-NEXT: %3(s32) = G_SEXT %1(s8) +; X64-NEXT: %edi = COPY %3(s32) ; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi ; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp ; X64-NEXT: ADJCALLSTACKDOWN64 0, 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp -; X64-NEXT: %3(s32) = G_ZEXT %1(s8) -; X64-NEXT: %edi = COPY %3(s32) +; X64-NEXT: %4(s32) = G_ZEXT %1(s8) +; X64-NEXT: %edi = COPY %4(s32) ; X64-NEXT: CALL64pcrel32 @take_char, csr_64, implicit %rsp, implicit %edi ; X64-NEXT: ADJCALLSTACKUP64 0, 0, implicit-def %rsp, implicit-def %eflags, implicit %rsp ; X64-NEXT: RET 0