Index: llvm/trunk/include/llvm/CodeGen/GlobalISel/MachineIRBuilder.h =================================================================== --- llvm/trunk/include/llvm/CodeGen/GlobalISel/MachineIRBuilder.h +++ llvm/trunk/include/llvm/CodeGen/GlobalISel/MachineIRBuilder.h @@ -521,6 +521,18 @@ MachineInstrBuilder buildLoad(unsigned Res, unsigned Addr, MachineMemOperand &MMO); + /// Build and insert `Res = Addr, MMO`. + /// + /// Loads the value stored at \p Addr. Puts the result in \p Res. + /// + /// \pre setBasicBlock or setMI must have been called. + /// \pre \p Res must be a generic virtual register. + /// \pre \p Addr must be a generic virtual register with pointer type. + /// + /// \return a MachineInstrBuilder for the newly created instruction. + MachineInstrBuilder buildLoadInstr(unsigned Opcode, unsigned Res, + unsigned Addr, MachineMemOperand &MMO); + /// Build and insert `G_STORE Val, Addr, MMO`. /// /// Stores the value \p Val to \p Addr. Index: llvm/trunk/include/llvm/Support/TargetOpcodes.def =================================================================== --- llvm/trunk/include/llvm/Support/TargetOpcodes.def +++ llvm/trunk/include/llvm/Support/TargetOpcodes.def @@ -265,9 +265,15 @@ /// COPY is the relevant instruction. HANDLE_TARGET_OPCODE(G_BITCAST) -/// Generic load. +/// Generic load (including anyext load) HANDLE_TARGET_OPCODE(G_LOAD) +/// Generic signext load +HANDLE_TARGET_OPCODE(G_SEXTLOAD) + +/// Generic zeroext load +HANDLE_TARGET_OPCODE(G_ZEXTLOAD) + /// Generic store. HANDLE_TARGET_OPCODE(G_STORE) Index: llvm/trunk/include/llvm/Target/GenericOpcodes.td =================================================================== --- llvm/trunk/include/llvm/Target/GenericOpcodes.td +++ llvm/trunk/include/llvm/Target/GenericOpcodes.td @@ -482,6 +482,22 @@ let mayLoad = 1; } +// Generic sign-extended load. Expects a MachineMemOperand in addition to explicit operands. +def G_SEXTLOAD : GenericInstruction { + let OutOperandList = (outs type0:$dst); + let InOperandList = (ins ptype1:$addr); + let hasSideEffects = 0; + let mayLoad = 1; +} + +// Generic zero-extended load. Expects a MachineMemOperand in addition to explicit operands. +def G_ZEXTLOAD : GenericInstruction { + let OutOperandList = (outs type0:$dst); + let InOperandList = (ins ptype1:$addr); + let hasSideEffects = 0; + let mayLoad = 1; +} + // Generic store. Expects a MachineMemOperand in addition to explicit operands. def G_STORE : GenericInstruction { let OutOperandList = (outs); Index: llvm/trunk/lib/CodeGen/GlobalISel/LegalizerHelper.cpp =================================================================== --- llvm/trunk/lib/CodeGen/GlobalISel/LegalizerHelper.cpp +++ llvm/trunk/lib/CodeGen/GlobalISel/LegalizerHelper.cpp @@ -732,17 +732,19 @@ MI.eraseFromParent(); return Legalized; } - case TargetOpcode::G_LOAD: { + case TargetOpcode::G_LOAD: // For some types like i24, we might try to widen to i32. To properly handle // this we should be using a dedicated extending load, until then avoid // trying to legalize. if (alignTo(MRI.getType(MI.getOperand(0).getReg()).getSizeInBits(), 8) != WideTy.getSizeInBits()) return UnableToLegalize; - + LLVM_FALLTHROUGH; + case TargetOpcode::G_SEXTLOAD: + case TargetOpcode::G_ZEXTLOAD: { unsigned DstExt = MRI.createGenericVirtualRegister(WideTy); - MIRBuilder.buildLoad(DstExt, MI.getOperand(1).getReg(), - **MI.memoperands_begin()); + MIRBuilder.buildLoadInstr(MI.getOpcode(), DstExt, MI.getOperand(1).getReg(), + **MI.memoperands_begin()); MIRBuilder.buildTrunc(MI.getOperand(0).getReg(), DstExt); MI.eraseFromParent(); return Legalized; @@ -1030,6 +1032,44 @@ MI.eraseFromParent(); return Legalized; } + case TargetOpcode::G_LOAD: + case TargetOpcode::G_SEXTLOAD: + case TargetOpcode::G_ZEXTLOAD: { + // Lower to a memory-width G_LOAD and a G_SEXT/G_ZEXT/G_ANYEXT + unsigned DstReg = MI.getOperand(0).getReg(); + unsigned PtrReg = MI.getOperand(1).getReg(); + LLT DstTy = MRI.getType(DstReg); + auto &MMO = **MI.memoperands_begin(); + + if (DstTy.getSizeInBits() == MMO.getSize() /* in bytes */ * 8) { + MIRBuilder.buildLoad(DstReg, PtrReg, MMO); + MI.eraseFromParent(); + return Legalized; + } + + if (DstTy.isScalar()) { + unsigned TmpReg = MRI.createGenericVirtualRegister( + LLT::scalar(MMO.getSize() /* in bytes */ * 8)); + MIRBuilder.buildLoad(TmpReg, PtrReg, MMO); + switch (MI.getOpcode()) { + default: + llvm_unreachable("Unexpected opcode"); + case TargetOpcode::G_LOAD: + MIRBuilder.buildAnyExt(DstReg, TmpReg); + break; + case TargetOpcode::G_SEXTLOAD: + MIRBuilder.buildSExt(DstReg, TmpReg); + break; + case TargetOpcode::G_ZEXTLOAD: + MIRBuilder.buildZExt(DstReg, TmpReg); + break; + } + MI.eraseFromParent(); + return Legalized; + } + + return UnableToLegalize; + } } } Index: llvm/trunk/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp =================================================================== --- llvm/trunk/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp +++ llvm/trunk/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp @@ -278,10 +278,16 @@ MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr, MachineMemOperand &MMO) { + return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO); +} + +MachineInstrBuilder +MachineIRBuilderBase::buildLoadInstr(unsigned Opcode, unsigned Res, + unsigned Addr, MachineMemOperand &MMO) { assert(getMRI()->getType(Res).isValid() && "invalid operand type"); assert(getMRI()->getType(Addr).isPointer() && "invalid operand type"); - return buildInstr(TargetOpcode::G_LOAD) + return buildInstr(Opcode) .addDef(Res) .addUse(Addr) .addMemOperand(&MMO); Index: llvm/trunk/lib/Target/AArch64/AArch64LegalizerInfo.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64LegalizerInfo.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64LegalizerInfo.cpp @@ -135,6 +135,9 @@ .maxScalarIf(typeInSet(1, {s64}), 0, s32) .widenScalarToNextPow2(0); + getActionDefinitionsBuilder({G_SEXTLOAD, G_ZEXTLOAD}) + .lower(); + getActionDefinitionsBuilder({G_LOAD, G_STORE}) .legalForTypesWithMemSize({{s8, p0, 8}, {s16, p0, 16}, @@ -147,6 +150,9 @@ .unsupportedIfMemSizeNotPow2() .clampScalar(0, s8, s64) .widenScalarToNextPow2(0) + .lowerIf([=](const LegalityQuery &Query) { + return Query.Types[0].getSizeInBits() != Query.MMODescrs[0].Size * 8; + }) .clampNumElements(0, v2s32, v2s32); // Constants Index: llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir =================================================================== --- llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir +++ llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir @@ -0,0 +1,25 @@ +# RUN: llc -O0 -run-pass=legalizer -global-isel %s -o - -verify-machineinstrs | FileCheck %s + +--- | + target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128" + target triple = "aarch64--" + define void @test_extload(i8* %addr) { + entry: + ret void + } +... + +--- +name: test_extload +body: | + bb.0.entry: + liveins: $x0 + ; CHECK-LABEL: name: test_extload + ; CHECK: [[T0:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[T1:%[0-9]+]]:_(s8) = G_LOAD [[T0]](p0) :: (load 1 from %ir.addr) + ; CHECK: [[T2:%[0-9]+]]:_(s32) = G_ANYEXT [[T1]](s8) + ; CHECK: $w0 = COPY [[T2]](s32) + %0:_(p0) = COPY $x0 + %1:_(s32) = G_LOAD %0 :: (load 1 from %ir.addr) + $w0 = COPY %1 +... Index: llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-sextload.mir =================================================================== --- llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-sextload.mir +++ llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-sextload.mir @@ -0,0 +1,25 @@ +# RUN: llc -O0 -run-pass=legalizer -global-isel %s -o - -verify-machineinstrs | FileCheck %s + +--- | + target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128" + target triple = "aarch64--" + define void @test_zextload(i8* %addr) { + entry: + ret void + } +... + +--- +name: test_zextload +body: | + bb.0.entry: + liveins: $x0 + ; CHECK-LABEL: name: test_zextload + ; CHECK: [[T0:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[T1:%[0-9]+]]:_(s8) = G_LOAD [[T0]](p0) :: (load 1 from %ir.addr) + ; CHECK: [[T2:%[0-9]+]]:_(s32) = G_SEXT [[T1]](s8) + ; CHECK: $w0 = COPY [[T2]](s32) + %0:_(p0) = COPY $x0 + %1:_(s32) = G_SEXTLOAD %0 :: (load 1 from %ir.addr) + $w0 = COPY %1 +... Index: llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-zextload.mir =================================================================== --- llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-zextload.mir +++ llvm/trunk/test/CodeGen/AArch64/GlobalISel/legalize-zextload.mir @@ -0,0 +1,25 @@ +# RUN: llc -O0 -run-pass=legalizer -global-isel %s -o - -verify-machineinstrs | FileCheck %s + +--- | + target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128" + target triple = "aarch64--" + define void @test_sextload(i8* %addr) { + entry: + ret void + } +... + +--- +name: test_sextload +body: | + bb.0.entry: + liveins: $x0 + ; CHECK-LABEL: name: test_sextload + ; CHECK: [[T0:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[T1:%[0-9]+]]:_(s8) = G_LOAD [[T0]](p0) :: (load 1 from %ir.addr) + ; CHECK: [[T2:%[0-9]+]]:_(s32) = G_ZEXT [[T1]](s8) + ; CHECK: $w0 = COPY [[T2]](s32) + %0:_(p0) = COPY $x0 + %1:_(s32) = G_ZEXTLOAD %0 :: (load 1 from %ir.addr) + $w0 = COPY %1 +...