diff --git a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp --- a/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp +++ b/llvm/lib/Target/RISCV/GISel/RISCVLegalizerInfo.cpp @@ -11,6 +11,8 @@ //===----------------------------------------------------------------------===// #include "RISCVLegalizerInfo.h" +#include "RISCVSubtarget.h" +#include "llvm/CodeGen/GlobalISel/LegalizerHelper.h" #include "llvm/CodeGen/TargetOpcodes.h" #include "llvm/CodeGen/ValueTypes.h" #include "llvm/IR/DerivedTypes.h" @@ -19,5 +21,60 @@ using namespace llvm; RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST) { + const LLT s1 = LLT::scalar(1); + const LLT s32 = LLT::scalar(32); + const LLT s64 = LLT::scalar(64); + const LLT XLenLLT = ST.is64Bit() ? s64 : s32; + + using namespace TargetOpcode; + + getActionDefinitionsBuilder({G_ADD, G_SUB}) + .legalFor({XLenLLT}) + .clampScalar(0, XLenLLT, XLenLLT); + + getActionDefinitionsBuilder({G_SHL, G_ASHR, G_LSHR}) + .legalFor({{XLenLLT, XLenLLT}}) + .clampScalar(0, XLenLLT, XLenLLT) + .clampScalar(1, XLenLLT, XLenLLT); + + if (ST.hasStdExtM()) { + getActionDefinitionsBuilder({G_MUL, G_SDIV, G_SREM, G_UDIV, G_UREM}) + .legalFor({XLenLLT}) + .customFor({s32}) + .clampScalar(0, XLenLLT, XLenLLT); + } else { + getActionDefinitionsBuilder({G_MUL, G_SDIV, G_SREM, G_UDIV, G_UREM}) + .legalFor({XLenLLT}) + .libcall() + .clampScalar(0, XLenLLT, XLenLLT); + } + + getActionDefinitionsBuilder({G_AND, G_OR, G_XOR}) + .legalFor({XLenLLT}) + .clampScalar(0, XLenLLT, XLenLLT); + + getActionDefinitionsBuilder({G_UADDO, G_UADDE, G_USUBO, G_USUBE}) + .lowerFor({{XLenLLT, s1}}); + if (ST.hasStdExtM()) { + getActionDefinitionsBuilder(G_UMULO).lowerFor({{XLenLLT, s1}}); + + getActionDefinitionsBuilder(G_UMULH).legalFor({XLenLLT}).clampScalar( + 0, XLenLLT, XLenLLT); + } + + getActionDefinitionsBuilder(G_ICMP) + .legalFor({XLenLLT, XLenLLT}) + .clampScalar(0, XLenLLT, XLenLLT) + .clampScalar(1, XLenLLT, XLenLLT); + + getActionDefinitionsBuilder(G_CONSTANT) + .legalFor({XLenLLT}) + .clampScalar(0, XLenLLT, XLenLLT); + + getActionDefinitionsBuilder({G_ZEXT, G_SEXT, G_ANYEXT}) + .legalFor({XLenLLT}) + .clampScalar(0, XLenLLT, XLenLLT) + .maxScalar(1, XLenLLT); + getLegacyLegalizerInfo().computeTables(); } diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir @@ -0,0 +1,826 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py:wq +# RUN: llc -O0 -mtriple=riscv32 --global-isel -run-pass=legalizer %s -o - \ +# RUN: | FileCheck %s --check-prefix=RV32 + +--- | + + define i32 @addi(i32 %a) { + %1 = add i32 %a, 1 + ret i32 %1 + } + + define i32 @slti(i32 %a) { + %1 = icmp slt i32 %a, 2 + %2 = zext i1 %1 to i32 + ret i32 %2 + } + + define i32 @sltiu(i32 %a) { + %1 = icmp ult i32 %a, 3 + %2 = zext i1 %1 to i32 + ret i32 %2 + } + + define i32 @xori(i32 %a) { + %1 = xor i32 %a, 4 + ret i32 %1 + } + + define i32 @ori(i32 %a) { + %1 = or i32 %a, 5 + ret i32 %1 + } + + define i32 @andi(i32 %a) { + %1 = and i32 %a, 6 + ret i32 %1 + } + + define i32 @slli(i32 %a) { + %1 = shl i32 %a, 7 + ret i32 %1 + } + + define i32 @srli(i32 %a) { + %1 = lshr i32 %a, 8 + ret i32 %1 + } + + define i32 @srli_demandedbits(i32 %0) { + %2 = lshr i32 %0, 3 + %3 = or i32 %2, 1 + ret i32 %3 + } + + define i32 @srai(i32 %a) { + %1 = ashr i32 %a, 9 + ret i32 %1 + } + + define i32 @add(i32 %a, i32 %b) { + %1 = add i32 %a, %b + ret i32 %1 + } + + define i32 @sub(i32 %a, i32 %b) { + %1 = sub i32 %a, %b + ret i32 %1 + } + + define i32 @sub_negative_constant_lhs(i32 %a) { + %1 = sub i32 -2, %a + ret i32 %1 + } + + define i32 @sll(i32 %a, i32 %b) { + %1 = shl i32 %a, %b + ret i32 %1 + } + + define i32 @sll_negative_constant_lhs(i32 %a) { + %1 = shl i32 -1, %a + ret i32 %1 + } + + define i32 @slt(i32 %a, i32 %b) { + %1 = icmp slt i32 %a, %b + %2 = zext i1 %1 to i32 + ret i32 %2 + } + + define i32 @sltu(i32 %a, i32 %b) { + %1 = icmp ult i32 %a, %b + %2 = zext i1 %1 to i32 + ret i32 %2 + } + + define i32 @xor(i32 %a, i32 %b) { + %1 = xor i32 %a, %b + ret i32 %1 + } + + define i32 @srl(i32 %a, i32 %b) { + %1 = lshr i32 %a, %b + ret i32 %1 + } + + define i32 @srl_negative_constant_lhs(i32 %a) { + %1 = lshr i32 -1, %a + ret i32 %1 + } + + define i32 @sra(i32 %a, i32 %b) { + %1 = ashr i32 %a, %b + ret i32 %1 + } + + define i32 @sra_negative_constant_lhs(i32 %a) { + %1 = ashr i32 -2147483648, %a + ret i32 %1 + } + + define i32 @or(i32 %a, i32 %b) { + %1 = or i32 %a, %b + ret i32 %1 + } + + define i32 @and(i32 %a, i32 %b) { + %1 = and i32 %a, %b + ret i32 %1 + } + +... +--- +name: addi +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: addi + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 1 + %2:_(s32) = G_ADD %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: slti +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: slti + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[COPY]](s32), [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[ICMP]], [[C1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 2 + %2:_(s1) = G_ICMP intpred(slt), %0(s32), %1 + %3:_(s32) = G_ZEXT %2(s1) + $x10 = COPY %3(s32) + PseudoRET implicit $x10 + +... +--- +name: sltiu +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sltiu + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 3 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[COPY]](s32), [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[ICMP]], [[C1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 3 + %2:_(s1) = G_ICMP intpred(ult), %0(s32), %1 + %3:_(s32) = G_ZEXT %2(s1) + $x10 = COPY %3(s32) + PseudoRET implicit $x10 + +... +--- +name: xori +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: xori + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4 + ; RV32-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[XOR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 4 + %2:_(s32) = G_XOR %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: ori +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: ori + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 5 + ; RV32-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[OR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 5 + %2:_(s32) = G_OR %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: andi +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: andi + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 6 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 6 + %2:_(s32) = G_AND %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: slli +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: slli + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 7 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[C]](s32) + ; RV32-NEXT: $x10 = COPY [[SHL]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 7 + %2:_(s32) = G_SHL %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: srli +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srli + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32) + ; RV32-NEXT: $x10 = COPY [[LSHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 8 + %2:_(s32) = G_LSHR %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: srli_demandedbits +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.1): + liveins: $x10 + + ; RV32-LABEL: name: srli_demandedbits + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 3 + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[C]](s32) + ; RV32-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[LSHR]], [[C1]] + ; RV32-NEXT: $x10 = COPY [[OR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 3 + %3:_(s32) = G_CONSTANT i32 1 + %2:_(s32) = G_LSHR %0, %1(s32) + %4:_(s32) = G_OR %2, %3 + $x10 = COPY %4(s32) + PseudoRET implicit $x10 + +... +--- +name: srai +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srai + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 9 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[C]](s32) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 9 + %2:_(s32) = G_ASHR %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: add +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: add + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_ADD %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sub +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sub + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[SUB]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_SUB %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sub_negative_constant_lhs +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sub_negative_constant_lhs + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2 + ; RV32-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY]] + ; RV32-NEXT: $x10 = COPY [[SUB]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 -2 + %2:_(s32) = G_SUB %1, %0 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sll +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sll + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY]], [[COPY1]](s32) + ; RV32-NEXT: $x10 = COPY [[SHL]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_SHL %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sll_negative_constant_lhs +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sll_negative_constant_lhs + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[C]], [[COPY]](s32) + ; RV32-NEXT: $x10 = COPY [[SHL]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 -1 + %2:_(s32) = G_SHL %1, %0(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: slt +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: slt + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(slt), [[COPY]](s32), [[COPY1]] + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[ICMP]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s1) = G_ICMP intpred(slt), %0(s32), %1 + %3:_(s32) = G_ZEXT %2(s1) + $x10 = COPY %3(s32) + PseudoRET implicit $x10 + +... +--- +name: sltu +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sltu + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[COPY]](s32), [[COPY1]] + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[ICMP]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s1) = G_ICMP intpred(ult), %0(s32), %1 + %3:_(s32) = G_ZEXT %2(s1) + $x10 = COPY %3(s32) + PseudoRET implicit $x10 + +... +--- +name: xor +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: xor + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[XOR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_XOR %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: srl +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: srl + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[COPY]], [[COPY1]](s32) + ; RV32-NEXT: $x10 = COPY [[LSHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_LSHR %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: srl_negative_constant_lhs +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srl_negative_constant_lhs + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[C]], [[COPY]](s32) + ; RV32-NEXT: $x10 = COPY [[LSHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 -1 + %2:_(s32) = G_LSHR %1, %0(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sra +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sra + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[COPY1]](s32) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_ASHR %0, %1(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: sra_negative_constant_lhs +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sra_negative_constant_lhs + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -2147483648 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[C]], [[COPY]](s32) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = G_CONSTANT i32 -2147483648 + %2:_(s32) = G_ASHR %1, %0(s32) + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: or +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: or + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[OR]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_OR %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... +--- +name: and +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: and + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s32) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s32) = COPY $x10 + %1:_(s32) = COPY $x11 + %2:_(s32) = G_AND %0, %1 + $x10 = COPY %2(s32) + PseudoRET implicit $x10 + +... diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir @@ -0,0 +1,1271 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py:wq +# RUN: llc -O0 -mtriple=riscv64 --global-isel -run-pass=legalizer %s -o - \ +# RUN: | FileCheck %s --check-prefix=RV32 + +--- | + + define i64 @addi(i64 %a) { + %1 = add i64 %a, 1 + ret i64 %1 + } + + define i64 @slti(i64 %a) { + %1 = icmp slt i64 %a, 2 + %2 = zext i1 %1 to i64 + ret i64 %2 + } + + define i64 @sltiu(i64 %a) { + %1 = icmp ult i64 %a, 3 + %2 = zext i1 %1 to i64 + ret i64 %2 + } + + define i64 @xori(i64 %a) { + %1 = xor i64 %a, 4 + ret i64 %1 + } + + define i64 @ori(i64 %a) { + %1 = or i64 %a, 5 + ret i64 %1 + } + + define i64 @andi(i64 %a) { + %1 = and i64 %a, 6 + ret i64 %1 + } + + define i64 @slli(i64 %a) { + %1 = shl i64 %a, 7 + ret i64 %1 + } + + define i64 @srli(i64 %a) { + %1 = lshr i64 %a, 8 + ret i64 %1 + } + + define i64 @srai(i64 %a) { + %1 = ashr i64 %a, 9 + ret i64 %1 + } + + define i64 @add(i64 %a, i64 %b) { + %1 = add i64 %a, %b + ret i64 %1 + } + + define i64 @sub(i64 %a, i64 %b) { + %1 = sub i64 %a, %b + ret i64 %1 + } + + define i64 @sll(i64 %a, i64 %b) { + %1 = shl i64 %a, %b + ret i64 %1 + } + + define i64 @slt(i64 %a, i64 %b) { + %1 = icmp slt i64 %a, %b + %2 = zext i1 %1 to i64 + ret i64 %2 + } + + define i64 @sltu(i64 %a, i64 %b) { + %1 = icmp ult i64 %a, %b + %2 = zext i1 %1 to i64 + ret i64 %2 + } + + define i64 @xor(i64 %a, i64 %b) { + %1 = xor i64 %a, %b + ret i64 %1 + } + + define i64 @srl(i64 %a, i64 %b) { + %1 = lshr i64 %a, %b + ret i64 %1 + } + + define i64 @sra(i64 %a, i64 %b) { + %1 = ashr i64 %a, %b + ret i64 %1 + } + + define i64 @or(i64 %a, i64 %b) { + %1 = or i64 %a, %b + ret i64 %1 + } + + define i64 @and(i64 %a, i64 %b) { + %1 = and i64 %a, %b + ret i64 %1 + } + + define signext i32 @addiw(i32 signext %a) { + %1 = add i32 %a, 123 + ret i32 %1 + } + + define signext i32 @slliw(i32 signext %a) { + %1 = shl i32 %a, 17 + ret i32 %1 + } + + define signext i32 @srliw(i32 %a) { + %1 = lshr i32 %a, 8 + ret i32 %1 + } + + define signext i32 @sraiw(i32 %a) { + %1 = ashr i32 %a, 9 + ret i32 %1 + } + + define i64 @sraiw_i64(i64 %a) { + %1 = shl i64 %a, 32 + %2 = ashr i64 %1, 41 + ret i64 %2 + } + + define signext i32 @sextw(i32 zeroext %a) { + ret i32 %a + } + + define signext i32 @addw(i32 signext %a, i32 signext %b) { + %1 = add i32 %a, %b + ret i32 %1 + } + + define signext i32 @subw(i32 signext %a, i32 signext %b) { + %1 = sub i32 %a, %b + ret i32 %1 + } + + define signext i32 @sllw(i32 signext %a, i32 zeroext %b) { + %1 = shl i32 %a, %b + ret i32 %1 + } + + define signext i32 @srlw(i32 signext %a, i32 zeroext %b) { + %1 = lshr i32 %a, %b + ret i32 %1 + } + + define signext i32 @sraw(i64 %a, i32 zeroext %b) { + %1 = trunc i64 %a to i32 + %2 = ashr i32 %1, %b + ret i32 %2 + } + + define i64 @add_hi_and_lo_negone(i64 %0) { + %2 = add nsw i64 %0, -1 + ret i64 %2 + } + + define i64 @add_hi_zero_lo_negone(i64 %0) { + %2 = add i64 %0, 4294967295 + ret i64 %2 + } + + define i64 @add_lo_negone(i64 %0) { + %2 = add nsw i64 %0, -4294967297 + ret i64 %2 + } + + define i64 @add_hi_one_lo_negone(i64 %0) { + %2 = add nsw i64 %0, 8589934591 + ret i64 %2 + } + +... +--- +name: addi +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: addi + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 1 + %2:_(s64) = G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: slti +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: slti + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 2 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[COPY]](s64), [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ICMP]], [[C1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 2 + %2:_(s1) = G_ICMP intpred(slt), %0(s64), %1 + %3:_(s64) = G_ZEXT %2(s1) + $x10 = COPY %3(s64) + PseudoRET implicit $x10 + +... +--- +name: sltiu +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sltiu + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 3 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[COPY]](s64), [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ICMP]], [[C1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 3 + %2:_(s1) = G_ICMP intpred(ult), %0(s64), %1 + %3:_(s64) = G_ZEXT %2(s1) + $x10 = COPY %3(s64) + PseudoRET implicit $x10 + +... +--- +name: xori +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: xori + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4 + ; RV32-NEXT: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[XOR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 4 + %2:_(s64) = G_XOR %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: ori +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: ori + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 5 + ; RV32-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[OR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 5 + %2:_(s64) = G_OR %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: andi +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: andi + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 6 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 6 + %2:_(s64) = G_AND %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: slli +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: slli + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 7 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64) + ; RV32-NEXT: $x10 = COPY [[SHL]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 7 + %2:_(s64) = G_SHL %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: srli +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srli + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 8 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[C]](s64) + ; RV32-NEXT: $x10 = COPY [[LSHR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 8 + %2:_(s64) = G_LSHR %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: srai +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srai + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 9 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[C]](s64) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 9 + %2:_(s64) = G_ASHR %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: add +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: add + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: sub +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sub + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[SUB]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_SUB %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: sll +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sll + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[COPY1]](s64) + ; RV32-NEXT: $x10 = COPY [[SHL]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_SHL %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: slt +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: slt + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(slt), [[COPY]](s64), [[COPY1]] + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ICMP]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s1) = G_ICMP intpred(slt), %0(s64), %1 + %3:_(s64) = G_ZEXT %2(s1) + $x10 = COPY %3(s64) + PseudoRET implicit $x10 + +... +--- +name: sltu +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sltu + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[COPY]](s64), [[COPY1]] + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ICMP]], [[C]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s1) = G_ICMP intpred(ult), %0(s64), %1 + %3:_(s64) = G_ZEXT %2(s1) + $x10 = COPY %3(s64) + PseudoRET implicit $x10 + +... +--- +name: xor +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: xor + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[XOR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_XOR %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: srl +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: srl + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[COPY1]](s64) + ; RV32-NEXT: $x10 = COPY [[LSHR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_LSHR %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: sra +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sra + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[COPY1]](s64) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_ASHR %0, %1(s64) + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: or +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: or + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[OR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_OR %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: and +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: and + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[COPY1]] + ; RV32-NEXT: $x10 = COPY [[AND]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = COPY $x11 + %2:_(s64) = G_AND %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: addiw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: addiw + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 123 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[ASSERT_SEXT]], [[C]] + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ADD]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %1:_(s64) = COPY $x10 + %2:_(s64) = G_ASSERT_SEXT %1, 32 + %0:_(s32) = G_TRUNC %2(s64) + %3:_(s32) = G_CONSTANT i32 123 + %4:_(s32) = G_ADD %0, %3 + %5:_(s64) = G_SEXT %4(s32) + $x10 = COPY %5(s64) + PseudoRET implicit $x10 + +... +--- +name: slliw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: slliw + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 17 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ASSERT_SEXT]], [[C]](s64) + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SHL]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %1:_(s64) = COPY $x10 + %2:_(s64) = G_ASSERT_SEXT %1, 32 + %0:_(s32) = G_TRUNC %2(s64) + %3:_(s32) = G_CONSTANT i32 17 + %4:_(s32) = G_SHL %0, %3(s32) + %5:_(s64) = G_SEXT %4(s32) + $x10 = COPY %5(s64) + PseudoRET implicit $x10 + +... +--- +name: srliw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: srliw + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8 + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[AND]], [[C1]](s64) + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[LSHR]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %1:_(s64) = COPY $x10 + %0:_(s32) = G_TRUNC %1(s64) + %2:_(s32) = G_CONSTANT i32 8 + %3:_(s32) = G_LSHR %0, %2(s32) + %4:_(s64) = G_SEXT %3(s32) + $x10 = COPY %4(s64) + PseudoRET implicit $x10 + +... +--- +name: sraiw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sraiw + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 9 + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT]], [[C]](s64) + ; RV32-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[ASHR]](s64) + ; RV32-NEXT: [[SEXT1:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC1]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT1]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %1:_(s64) = COPY $x10 + %0:_(s32) = G_TRUNC %1(s64) + %2:_(s32) = G_CONSTANT i32 9 + %3:_(s32) = G_ASHR %0, %2(s32) + %4:_(s64) = G_SEXT %3(s32) + $x10 = COPY %4(s64) + PseudoRET implicit $x10 + +... +--- +name: sraiw_i64 +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sraiw_i64 + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 32 + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 41 + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64) + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C1]](s64) + ; RV32-NEXT: $x10 = COPY [[ASHR]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 32 + %3:_(s64) = G_CONSTANT i64 41 + %2:_(s64) = G_SHL %0, %1(s64) + %4:_(s64) = G_ASHR %2, %3(s64) + $x10 = COPY %4(s64) + PseudoRET implicit $x10 + +... +--- +name: sextw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10 + + ; RV32-LABEL: name: sextw + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32 + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %1:_(s64) = COPY $x10 + %2:_(s64) = G_ASSERT_ZEXT %1, 32 + %0:_(s32) = G_TRUNC %2(s64) + %3:_(s64) = G_SEXT %0(s32) + $x10 = COPY %3(s64) + PseudoRET implicit $x10 + +... +--- +name: addw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } + - { id: 6, class: _, preferred-register: '' } + - { id: 7, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: addw + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 32 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[ASSERT_SEXT]], [[ASSERT_SEXT1]] + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ADD]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %2:_(s64) = COPY $x10 + %3:_(s64) = G_ASSERT_SEXT %2, 32 + %0:_(s32) = G_TRUNC %3(s64) + %4:_(s64) = COPY $x11 + %5:_(s64) = G_ASSERT_SEXT %4, 32 + %1:_(s32) = G_TRUNC %5(s64) + %6:_(s32) = G_ADD %0, %1 + %7:_(s64) = G_SEXT %6(s32) + $x10 = COPY %7(s64) + PseudoRET implicit $x10 + +... +--- +name: subw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } + - { id: 6, class: _, preferred-register: '' } + - { id: 7, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: subw + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASSERT_SEXT1:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY1]], 32 + ; RV32-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[ASSERT_SEXT]], [[ASSERT_SEXT1]] + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SUB]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %2:_(s64) = COPY $x10 + %3:_(s64) = G_ASSERT_SEXT %2, 32 + %0:_(s32) = G_TRUNC %3(s64) + %4:_(s64) = COPY $x11 + %5:_(s64) = G_ASSERT_SEXT %4, 32 + %1:_(s32) = G_TRUNC %5(s64) + %6:_(s32) = G_SUB %0, %1 + %7:_(s64) = G_SEXT %6(s32) + $x10 = COPY %7(s64) + PseudoRET implicit $x10 + +... +--- +name: sllw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } + - { id: 6, class: _, preferred-register: '' } + - { id: 7, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sllw + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ASSERT_ZEXT]], [[C]] + ; RV32-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ASSERT_SEXT]], [[AND]](s64) + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SHL]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %2:_(s64) = COPY $x10 + %3:_(s64) = G_ASSERT_SEXT %2, 32 + %0:_(s32) = G_TRUNC %3(s64) + %4:_(s64) = COPY $x11 + %5:_(s64) = G_ASSERT_ZEXT %4, 32 + %1:_(s32) = G_TRUNC %5(s64) + %6:_(s32) = G_SHL %0, %1(s32) + %7:_(s64) = G_SEXT %6(s32) + $x10 = COPY %7(s64) + PseudoRET implicit $x10 + +... +--- +name: srlw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } + - { id: 6, class: _, preferred-register: '' } + - { id: 7, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: srlw + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ASSERT_SEXT]], [[C]] + ; RV32-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[ASSERT_ZEXT]], [[C1]] + ; RV32-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[AND]], [[AND1]](s64) + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[LSHR]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %2:_(s64) = COPY $x10 + %3:_(s64) = G_ASSERT_SEXT %2, 32 + %0:_(s32) = G_TRUNC %3(s64) + %4:_(s64) = COPY $x11 + %5:_(s64) = G_ASSERT_ZEXT %4, 32 + %1:_(s32) = G_TRUNC %5(s64) + %6:_(s32) = G_LSHR %0, %1(s32) + %7:_(s64) = G_SEXT %6(s32) + $x10 = COPY %7(s64) + PseudoRET implicit $x10 + +... +--- +name: sraw +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } + - { id: 3, class: _, preferred-register: '' } + - { id: 4, class: _, preferred-register: '' } + - { id: 5, class: _, preferred-register: '' } + - { id: 6, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.0): + liveins: $x10, $x11 + + ; RV32-LABEL: name: sraw + ; RV32: liveins: $x10, $x11 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11 + ; RV32-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY1]], 32 + ; RV32-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64) + ; RV32-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC]](s32) + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ASSERT_ZEXT]], [[C]] + ; RV32-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXT]], [[AND]](s64) + ; RV32-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[ASHR]](s64) + ; RV32-NEXT: [[SEXT1:%[0-9]+]]:_(s64) = G_SEXT [[TRUNC1]](s32) + ; RV32-NEXT: $x10 = COPY [[SEXT1]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %2:_(s64) = COPY $x11 + %3:_(s64) = G_ASSERT_ZEXT %2, 32 + %1:_(s32) = G_TRUNC %3(s64) + %4:_(s32) = G_TRUNC %0(s64) + %5:_(s32) = G_ASHR %4, %1(s32) + %6:_(s64) = G_SEXT %5(s32) + $x10 = COPY %6(s64) + PseudoRET implicit $x10 + +... +--- +name: add_hi_and_lo_negone +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.1): + liveins: $x10 + + ; RV32-LABEL: name: add_hi_and_lo_negone + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = nsw G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 -1 + %2:_(s64) = nsw G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: add_hi_zero_lo_negone +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.1): + liveins: $x10 + + ; RV32-LABEL: name: add_hi_zero_lo_negone + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 4294967295 + %2:_(s64) = G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: add_lo_negone +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.1): + liveins: $x10 + + ; RV32-LABEL: name: add_lo_negone + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 -4294967297 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = nsw G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 -4294967297 + %2:_(s64) = nsw G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +... +--- +name: add_hi_one_lo_negone +alignment: 4 +legalized: false +regBankSelected: false +registers: + - { id: 0, class: _, preferred-register: '' } + - { id: 1, class: _, preferred-register: '' } + - { id: 2, class: _, preferred-register: '' } +body: | + bb.1 (%ir-block.1): + liveins: $x10 + + ; RV32-LABEL: name: add_hi_one_lo_negone + ; RV32: liveins: $x10 + ; RV32-NEXT: {{ $}} + ; RV32-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10 + ; RV32-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 8589934591 + ; RV32-NEXT: [[ADD:%[0-9]+]]:_(s64) = nsw G_ADD [[COPY]], [[C]] + ; RV32-NEXT: $x10 = COPY [[ADD]](s64) + ; RV32-NEXT: PseudoRET implicit $x10 + %0:_(s64) = COPY $x10 + %1:_(s64) = G_CONSTANT i64 8589934591 + %2:_(s64) = nsw G_ADD %0, %1 + $x10 = COPY %2(s64) + PseudoRET implicit $x10 + +...