diff --git a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
--- a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
+++ b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp
@@ -403,6 +403,8 @@
   assert((Size == 32 || Size == 64 || Size == 128) && "Unsupported size");
 
   switch (Opcode) {
+  case TargetOpcode::G_MUL:
+    RTLIBCASE(MUL_I);
   case TargetOpcode::G_SDIV:
     RTLIBCASE(SDIV_I);
   case TargetOpcode::G_UDIV:
@@ -639,6 +641,7 @@
   switch (MI.getOpcode()) {
   default:
     return UnableToLegalize;
+  case TargetOpcode::G_MUL:
   case TargetOpcode::G_SDIV:
   case TargetOpcode::G_UDIV:
   case TargetOpcode::G_SREM:
diff --git a/llvm/lib/Target/RISCV/RISCVLegalizerInfo.cpp b/llvm/lib/Target/RISCV/RISCVLegalizerInfo.cpp
--- a/llvm/lib/Target/RISCV/RISCVLegalizerInfo.cpp
+++ b/llvm/lib/Target/RISCV/RISCVLegalizerInfo.cpp
@@ -11,6 +11,7 @@
 //===----------------------------------------------------------------------===//
 
 #include "RISCVLegalizerInfo.h"
+#include "RISCVSubtarget.h"
 #include "llvm/CodeGen/TargetOpcodes.h"
 #include "llvm/CodeGen/ValueTypes.h"
 #include "llvm/IR/DerivedTypes.h"
@@ -19,5 +20,48 @@
 using namespace llvm;
 
 RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST) {
+  const LLT s32 = LLT::scalar(32);
+  const LLT s64 = LLT::scalar(64);
+
+  bool IsRV64 = ST.is64Bit();
+  const LLT &XLenLLT = IsRV64 ? s64 : s32;
+
+  using namespace TargetOpcode;
+
+  getActionDefinitionsBuilder({G_ADD, G_SUB})
+      .legalFor({XLenLLT})
+      .clampScalar(0, XLenLLT, XLenLLT);
+
+  if (ST.hasStdExtM())
+    getActionDefinitionsBuilder({G_MUL, G_SDIV, G_SREM, G_UDIV, G_UREM})
+        .legalFor({XLenLLT})
+        .clampScalar(0, XLenLLT, XLenLLT);
+  else
+    getActionDefinitionsBuilder({G_MUL, G_SDIV, G_SREM, G_UDIV, G_UREM})
+        .libcallFor({XLenLLT})
+        .clampScalar(0, XLenLLT, XLenLLT);
+
+  getActionDefinitionsBuilder({G_AND, G_OR, G_XOR})
+      .legalFor({XLenLLT})
+      .clampScalar(0, XLenLLT, XLenLLT);
+
+  // G_ZEXT -> G_AND
+  // G_SEXT -> G_SEXT_INREG
+  getActionDefinitionsBuilder({G_ZEXT, G_SEXT, G_ANYEXT})
+      .clampScalar(0, XLenLLT, XLenLLT);
+
+  getActionDefinitionsBuilder({G_ASHR, G_LSHR, G_SHL})
+      .legalFor({{XLenLLT, XLenLLT}})
+      .clampScalar(0, XLenLLT, XLenLLT)
+      .clampScalar(1, XLenLLT, XLenLLT);
+
+  getActionDefinitionsBuilder(G_CONSTANT)
+      .legalFor({XLenLLT})
+      .clampScalar(0, XLenLLT, XLenLLT);
+
+  // G_SEXT_INREG -> G_SHL + G_ASHR
+  // TODO: We have better patterns for this depending on the operand.
+  getActionDefinitionsBuilder(G_SEXT_INREG).lower();
+
   computeTables();
 }
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir
new file mode 100644
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu32.mir
@@ -0,0 +1,580 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -march=riscv32 -run-pass=legalizer -simplify-mir -verify-machineinstrs < %s \
+# RUN: | FileCheck -check-prefix=RV32I %s
+# RUN: llc -march=riscv32 -mattr=+m -run-pass=legalizer -simplify-mir -verify-machineinstrs < %s \
+# RUN: | FileCheck -check-prefix=RV32IM %s
+
+--- |
+
+  ; Extends only exhaustively tested for add to avoid excessive tests.
+  define void @add_i8() { entry: ret void }
+  define void @add_i8_signext() { entry: ret void }
+  define void @add_i8_zeroext() { entry: ret void }
+  define void @add_i16() { entry: ret void }
+  define void @add_i16_signext() { entry: ret void }
+  define void @add_i16_zeroext() { entry: ret void }
+  define void @add_i32() { entry: ret void }
+
+  define void @sub_i32() { entry: ret void }
+  define void @mul_i32() { entry: ret void }
+  define void @sdiv_i32() { entry: ret void }
+  define void @srem_i32() { entry: ret void }
+  define void @udiv_i32() { entry: ret void }
+  define void @urem_i32() { entry: ret void }
+  define void @and_i32() { entry: ret void }
+  define void @or_i32() { entry: ret void }
+  define void @xor_i32() { entry: ret void }
+...
+---
+name:            add_i8
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i8
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: $x10 = COPY [[COPY4]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i8
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: $x10 = COPY [[COPY4]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s32)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s32) = G_ANYEXT %4(s8)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i8_signext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i8_signext
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32I: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32)
+    ; RV32I: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I: $x10 = COPY [[ASHR]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i8_signext
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+    ; RV32IM: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32)
+    ; RV32IM: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32IM: $x10 = COPY [[ASHR]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s32)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s32) = G_SEXT %4(s8)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i8_zeroext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i8_zeroext
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C]]
+    ; RV32I: $x10 = COPY [[AND]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i8_zeroext
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C]]
+    ; RV32IM: $x10 = COPY [[AND]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s32)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s32) = G_ZEXT %4(s8)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i16
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: $x10 = COPY [[COPY4]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i16
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: $x10 = COPY [[COPY4]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s32)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s32) = G_ANYEXT %4(s16)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16_signext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i16_signext
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32I: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32)
+    ; RV32I: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32I: $x10 = COPY [[ASHR]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i16_signext
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+    ; RV32IM: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[COPY4]], [[C]](s32)
+    ; RV32IM: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C]](s32)
+    ; RV32IM: $x10 = COPY [[ASHR]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s32)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s32) = G_SEXT %4(s16)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16_zeroext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i16_zeroext
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32I: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32I: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32I: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32I: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C]]
+    ; RV32I: $x10 = COPY [[AND]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i16_zeroext
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY]](s32)
+    ; RV32IM: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV32IM: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+    ; RV32IM: [[COPY4:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
+    ; RV32IM: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C]]
+    ; RV32IM: $x10 = COPY [[AND]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %2:_(s32) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s32)
+    %3:_(s32) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s32)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s32) = G_ZEXT %4(s16)
+    $x10 = COPY %5(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: add_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[COPY1]]
+    ; RV32I: $x10 = COPY [[ADD]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: add_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[ADD]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_ADD %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            sub_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: sub_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY]], [[COPY1]]
+    ; RV32I: $x10 = COPY [[SUB]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: sub_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[SUB]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_SUB %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            mul_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: mul_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: $x10 = COPY [[COPY]](s32)
+    ; RV32I: $x11 = COPY [[COPY1]](s32)
+    ; RV32I: PseudoCALL &__mulsi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: $x10 = COPY [[COPY2]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: mul_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[MUL:%[0-9]+]]:_(s32) = G_MUL [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[MUL]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_MUL %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            sdiv_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: sdiv_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: $x10 = COPY [[COPY]](s32)
+    ; RV32I: $x11 = COPY [[COPY1]](s32)
+    ; RV32I: PseudoCALL &__divsi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: $x10 = COPY [[COPY2]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: sdiv_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[SDIV:%[0-9]+]]:_(s32) = G_SDIV [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[SDIV]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_SDIV %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            srem_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: srem_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: $x10 = COPY [[COPY]](s32)
+    ; RV32I: $x11 = COPY [[COPY1]](s32)
+    ; RV32I: PseudoCALL &__modsi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: $x10 = COPY [[COPY2]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: srem_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[SREM:%[0-9]+]]:_(s32) = G_SREM [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[SREM]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_SREM %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            udiv_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: udiv_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: $x10 = COPY [[COPY]](s32)
+    ; RV32I: $x11 = COPY [[COPY1]](s32)
+    ; RV32I: PseudoCALL &__udivsi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: $x10 = COPY [[COPY2]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: udiv_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[UDIV:%[0-9]+]]:_(s32) = G_UDIV [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[UDIV]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_UDIV %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            urem_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: urem_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: $x10 = COPY [[COPY]](s32)
+    ; RV32I: $x11 = COPY [[COPY1]](s32)
+    ; RV32I: PseudoCALL &__umodsi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV32I: [[COPY2:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: $x10 = COPY [[COPY2]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: urem_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[UREM:%[0-9]+]]:_(s32) = G_UREM [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[UREM]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_UREM %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            and_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: and_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[COPY1]]
+    ; RV32I: $x10 = COPY [[AND]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: and_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[AND]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_AND %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            or_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: or_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[OR:%[0-9]+]]:_(s32) = G_OR [[COPY]], [[COPY1]]
+    ; RV32I: $x10 = COPY [[OR]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: or_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[OR:%[0-9]+]]:_(s32) = G_OR [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[OR]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_OR %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
+---
+name:            xor_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV32I-LABEL: name: xor_i32
+    ; RV32I: liveins: $x10, $x11
+    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32I: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32I: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY]], [[COPY1]]
+    ; RV32I: $x10 = COPY [[XOR]](s32)
+    ; RV32I: PseudoRET implicit $x10
+    ; RV32IM-LABEL: name: xor_i32
+    ; RV32IM: liveins: $x10, $x11
+    ; RV32IM: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
+    ; RV32IM: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
+    ; RV32IM: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY]], [[COPY1]]
+    ; RV32IM: $x10 = COPY [[XOR]](s32)
+    ; RV32IM: PseudoRET implicit $x10
+    %0:_(s32) = COPY $x10
+    %1:_(s32) = COPY $x11
+    %2:_(s32) = G_XOR %0, %1
+    $x10 = COPY %2(s32)
+    PseudoRET implicit $x10
+
+...
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir
new file mode 100644
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/alu64.mir
@@ -0,0 +1,709 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -march=riscv64 -run-pass=legalizer -simplify-mir -verify-machineinstrs < %s \
+# RUN: | FileCheck -check-prefix=RV64I %s
+# RUN: llc -march=riscv64 -mattr=+m -run-pass=legalizer -simplify-mir -verify-machineinstrs < %s \
+# RUN: | FileCheck -check-prefix=RV64IM %s
+
+--- |
+
+  ; Extends only exhaustively tested for add to avoid excessive tests.
+  define void @add_i8() { entry: ret void }
+  define void @add_i8_signext() { entry: ret void }
+  define void @add_i8_zeroext() { entry: ret void }
+  define void @add_i16() { entry: ret void }
+  define void @add_i16_signext() { entry: ret void }
+  define void @add_i16_zeroext() { entry: ret void }
+  define void @add_i32() { entry: ret void }
+  define void @add_i32_signext() { entry: ret void }
+  define void @add_i32_zeroext() { entry: ret void }
+  define void @add_i64() { entry: ret void }
+
+  define void @sub_i64() { entry: ret void }
+  define void @mul_i64() { entry: ret void }
+  define void @sdiv_i64() { entry: ret void }
+  define void @srem_i64() { entry: ret void }
+  define void @udiv_i64() { entry: ret void }
+  define void @urem_i64() { entry: ret void }
+  define void @and_i64() { entry: ret void }
+  define void @or_i64() { entry: ret void }
+  define void @xor_i64() { entry: ret void }
+
+...
+---
+name:            add_i8
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i8
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: $x10 = COPY [[COPY4]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i8
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: $x10 = COPY [[COPY4]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s64)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s64) = G_ANYEXT %4(s8)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i8_signext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i8_signext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64I: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64I: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I: $x10 = COPY [[ASHR]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i8_signext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
+    ; RV64IM: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64IM: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64IM: $x10 = COPY [[ASHR]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s64)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s64) = G_SEXT %4(s8)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i8_zeroext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i8_zeroext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64I: $x10 = COPY [[AND]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i8_zeroext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64IM: $x10 = COPY [[AND]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s8) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s8) = G_TRUNC %3(s64)
+    %4:_(s8) = G_ADD %0, %1
+    %5:_(s64) = G_ZEXT %4(s8)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i16
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: $x10 = COPY [[COPY4]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i16
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: $x10 = COPY [[COPY4]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s64)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s64) = G_ANYEXT %4(s16)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16_signext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i16_signext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64I: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64I: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I: $x10 = COPY [[ASHR]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i16_signext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
+    ; RV64IM: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64IM: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64IM: $x10 = COPY [[ASHR]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s64)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s64) = G_SEXT %4(s16)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i16_zeroext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i16_zeroext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64I: $x10 = COPY [[AND]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i16_zeroext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64IM: $x10 = COPY [[AND]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s16) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s16) = G_TRUNC %3(s64)
+    %4:_(s16) = G_ADD %0, %1
+    %5:_(s64) = G_ZEXT %4(s16)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i32
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i32
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: $x10 = COPY [[COPY4]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i32
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: $x10 = COPY [[COPY4]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s32) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s32) = G_TRUNC %3(s64)
+    %4:_(s32) = G_ADD %0, %1
+    %5:_(s64) = G_ANYEXT %4(s32)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i32_signext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i32_signext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+    ; RV64I: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64I: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64I: $x10 = COPY [[ASHR]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i32_signext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+    ; RV64IM: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY4]], [[C]](s64)
+    ; RV64IM: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C]](s64)
+    ; RV64IM: $x10 = COPY [[ASHR]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s32) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s32) = G_TRUNC %3(s64)
+    %4:_(s32) = G_ADD %0, %1
+    %5:_(s64) = G_SEXT %4(s32)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i32_zeroext
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i32_zeroext
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64I: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64I: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64I: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64I: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64I: $x10 = COPY [[AND]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i32_zeroext
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[COPY2:%[0-9]+]]:_(s64) = COPY [[COPY]](s64)
+    ; RV64IM: [[COPY3:%[0-9]+]]:_(s64) = COPY [[COPY1]](s64)
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY2]], [[COPY3]]
+    ; RV64IM: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
+    ; RV64IM: [[COPY4:%[0-9]+]]:_(s64) = COPY [[ADD]](s64)
+    ; RV64IM: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY4]], [[C]]
+    ; RV64IM: $x10 = COPY [[AND]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %2:_(s64) = COPY $x10
+    %0:_(s32) = G_TRUNC %2(s64)
+    %3:_(s64) = COPY $x11
+    %1:_(s32) = G_TRUNC %3(s64)
+    %4:_(s32) = G_ADD %0, %1
+    %5:_(s64) = G_ZEXT %4(s32)
+    $x10 = COPY %5(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            add_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: add_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[COPY1]]
+    ; RV64I: $x10 = COPY [[ADD]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: add_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[ADD]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_ADD %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            sub_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: sub_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[COPY]], [[COPY1]]
+    ; RV64I: $x10 = COPY [[SUB]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: sub_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[SUB]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_SUB %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            mul_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: mul_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: $x10 = COPY [[COPY]](s64)
+    ; RV64I: $x11 = COPY [[COPY1]](s64)
+    ; RV64I: PseudoCALL &__muldi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: $x10 = COPY [[COPY2]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: mul_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[MUL:%[0-9]+]]:_(s64) = G_MUL [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[MUL]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_MUL %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            sdiv_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: sdiv_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: $x10 = COPY [[COPY]](s64)
+    ; RV64I: $x11 = COPY [[COPY1]](s64)
+    ; RV64I: PseudoCALL &__divdi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: $x10 = COPY [[COPY2]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: sdiv_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[SDIV:%[0-9]+]]:_(s64) = G_SDIV [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[SDIV]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_SDIV %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            srem_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: srem_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: $x10 = COPY [[COPY]](s64)
+    ; RV64I: $x11 = COPY [[COPY1]](s64)
+    ; RV64I: PseudoCALL &__moddi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: $x10 = COPY [[COPY2]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: srem_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[SREM:%[0-9]+]]:_(s64) = G_SREM [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[SREM]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_SREM %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            udiv_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: udiv_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: $x10 = COPY [[COPY]](s64)
+    ; RV64I: $x11 = COPY [[COPY1]](s64)
+    ; RV64I: PseudoCALL &__udivdi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: $x10 = COPY [[COPY2]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: udiv_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[UDIV:%[0-9]+]]:_(s64) = G_UDIV [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[UDIV]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_UDIV %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            urem_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: urem_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: $x10 = COPY [[COPY]](s64)
+    ; RV64I: $x11 = COPY [[COPY1]](s64)
+    ; RV64I: PseudoCALL &__umoddi3, implicit-def $x1, implicit $x10, implicit $x11, implicit-def $x10
+    ; RV64I: [[COPY2:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: $x10 = COPY [[COPY2]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: urem_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[UREM:%[0-9]+]]:_(s64) = G_UREM [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[UREM]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_UREM %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            and_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: and_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[COPY1]]
+    ; RV64I: $x10 = COPY [[AND]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: and_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[AND]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_AND %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            or_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: or_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[COPY1]]
+    ; RV64I: $x10 = COPY [[OR]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: or_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[OR]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_OR %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+---
+name:            xor_i64
+tracksRegLiveness: true
+body:             |
+  bb.0.entry:
+    liveins: $x10, $x11
+
+
+    ; RV64I-LABEL: name: xor_i64
+    ; RV64I: liveins: $x10, $x11
+    ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64I: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64I: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[COPY]], [[COPY1]]
+    ; RV64I: $x10 = COPY [[XOR]](s64)
+    ; RV64I: PseudoRET implicit $x10
+    ; RV64IM-LABEL: name: xor_i64
+    ; RV64IM: liveins: $x10, $x11
+    ; RV64IM: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
+    ; RV64IM: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
+    ; RV64IM: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[COPY]], [[COPY1]]
+    ; RV64IM: $x10 = COPY [[XOR]](s64)
+    ; RV64IM: PseudoRET implicit $x10
+    %0:_(s64) = COPY $x10
+    %1:_(s64) = COPY $x11
+    %2:_(s64) = G_XOR %0, %1
+    $x10 = COPY %2(s64)
+    PseudoRET implicit $x10
+
+...
+