diff --git a/llvm/lib/Target/AArch64/GISel/AArch64InstructionSelector.cpp b/llvm/lib/Target/AArch64/GISel/AArch64InstructionSelector.cpp --- a/llvm/lib/Target/AArch64/GISel/AArch64InstructionSelector.cpp +++ b/llvm/lib/Target/AArch64/GISel/AArch64InstructionSelector.cpp @@ -5781,9 +5781,9 @@ assert(Size != 64 && "Extend from 64 bits?"); switch (Size) { case 8: - return AArch64_AM::SXTB; + return IsLoadStore ? AArch64_AM::InvalidShiftExtend : AArch64_AM::SXTB; case 16: - return AArch64_AM::SXTH; + return IsLoadStore ? AArch64_AM::InvalidShiftExtend : AArch64_AM::SXTH; case 32: return AArch64_AM::SXTW; default: @@ -5796,9 +5796,9 @@ assert(Size != 64 && "Extend from 64 bits?"); switch (Size) { case 8: - return AArch64_AM::UXTB; + return IsLoadStore ? AArch64_AM::InvalidShiftExtend : AArch64_AM::UXTB; case 16: - return AArch64_AM::UXTH; + return IsLoadStore ? AArch64_AM::InvalidShiftExtend : AArch64_AM::UXTH; case 32: return AArch64_AM::UXTW; default: diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/select-arith-extended-reg.mir b/llvm/test/CodeGen/AArch64/GlobalISel/select-arith-extended-reg.mir --- a/llvm/test/CodeGen/AArch64/GlobalISel/select-arith-extended-reg.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/select-arith-extended-reg.mir @@ -632,3 +632,144 @@ %res:gpr(s32) = G_SUB %sub_lhs, %shl $w3 = COPY %res(s32) RET_ReallyLR implicit $w3 +... +--- +name: store_16b_zext +alignment: 4 +legalized: true +regBankSelected: true +tracksRegLiveness: true +liveins: + - { reg: '$x0' } + - { reg: '$w1' } + - { reg: '$x2' } +body: | + bb.1.entry: + liveins: $x0, $x1, $x2 + + ; CHECK-LABEL: name: store_16b_zext + ; CHECK: liveins: $x0, $x1, $x2 + ; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0 + ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY $w1 + ; CHECK: [[COPY2:%[0-9]+]]:gpr64all = COPY $x2 + ; CHECK: [[SUBREG_TO_REG:%[0-9]+]]:gpr64 = SUBREG_TO_REG 0, [[COPY1]], %subreg.sub_32 + ; CHECK: %zext:gpr64 = UBFMXri [[SUBREG_TO_REG]], 0, 15 + ; CHECK: [[COPY3:%[0-9]+]]:gpr64 = COPY [[COPY2]] + ; CHECK: STRXroX [[COPY3]], [[COPY]], %zext, 0, 1 :: (store 8) + ; CHECK: RET_ReallyLR + %0:gpr(p0) = COPY $x0 + %1:gpr(s32) = COPY $w1 + %2:gpr(p0) = COPY $x2 + %small:gpr(s16) = G_TRUNC %1 + %zext:gpr(s64) = G_ZEXT %small(s16) + %cst:gpr(s64) = G_CONSTANT i64 3 + %shl:gpr(s64) = G_SHL %zext, %cst(s64) + %gep:gpr(p0) = G_PTR_ADD %0, %shl(s64) + G_STORE %2(p0), %gep(p0) :: (store 8) + RET_ReallyLR +... +--- +name: store_8b_zext +alignment: 4 +legalized: true +regBankSelected: true +tracksRegLiveness: true +liveins: + - { reg: '$x0' } + - { reg: '$w1' } + - { reg: '$x2' } +body: | + bb.1.entry: + liveins: $x0, $x1, $x2 + + ; CHECK-LABEL: name: store_8b_zext + ; CHECK: liveins: $x0, $x1, $x2 + ; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0 + ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY $w1 + ; CHECK: [[COPY2:%[0-9]+]]:gpr64all = COPY $x2 + ; CHECK: [[SUBREG_TO_REG:%[0-9]+]]:gpr64 = SUBREG_TO_REG 0, [[COPY1]], %subreg.sub_32 + ; CHECK: %zext:gpr64 = UBFMXri [[SUBREG_TO_REG]], 0, 7 + ; CHECK: [[COPY3:%[0-9]+]]:gpr64 = COPY [[COPY2]] + ; CHECK: STRXroX [[COPY3]], [[COPY]], %zext, 0, 1 :: (store 8) + ; CHECK: RET_ReallyLR + %0:gpr(p0) = COPY $x0 + %1:gpr(s32) = COPY $w1 + %2:gpr(p0) = COPY $x2 + %small:gpr(s8) = G_TRUNC %1 + %zext:gpr(s64) = G_ZEXT %small(s8) + %cst:gpr(s64) = G_CONSTANT i64 3 + %shl:gpr(s64) = G_SHL %zext, %cst(s64) + %gep:gpr(p0) = G_PTR_ADD %0, %shl(s64) + G_STORE %2(p0), %gep(p0) :: (store 8) + RET_ReallyLR +... +--- +name: store_16b_sext +alignment: 4 +legalized: true +regBankSelected: true +tracksRegLiveness: true +liveins: + - { reg: '$x0' } + - { reg: '$w1' } + - { reg: '$x2' } +body: | + bb.1.entry: + liveins: $x0, $x1, $x2 + + ; CHECK-LABEL: name: store_16b_sext + ; CHECK: liveins: $x0, $x1, $x2 + ; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0 + ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY $w1 + ; CHECK: [[COPY2:%[0-9]+]]:gpr64all = COPY $x2 + ; CHECK: [[SUBREG_TO_REG:%[0-9]+]]:gpr64 = SUBREG_TO_REG 0, [[COPY1]], %subreg.sub_32 + ; CHECK: %zext:gpr64 = SBFMXri [[SUBREG_TO_REG]], 0, 15 + ; CHECK: [[COPY3:%[0-9]+]]:gpr64 = COPY [[COPY2]] + ; CHECK: STRXroX [[COPY3]], [[COPY]], %zext, 0, 1 :: (store 8) + ; CHECK: RET_ReallyLR + %0:gpr(p0) = COPY $x0 + %1:gpr(s32) = COPY $w1 + %2:gpr(p0) = COPY $x2 + %small:gpr(s16) = G_TRUNC %1 + %zext:gpr(s64) = G_SEXT %small(s16) + %cst:gpr(s64) = G_CONSTANT i64 3 + %shl:gpr(s64) = G_SHL %zext, %cst(s64) + %gep:gpr(p0) = G_PTR_ADD %0, %shl(s64) + G_STORE %2(p0), %gep(p0) :: (store 8) + RET_ReallyLR +... +--- +name: store_8b_sext +alignment: 4 +legalized: true +regBankSelected: true +tracksRegLiveness: true +liveins: + - { reg: '$x0' } + - { reg: '$w1' } + - { reg: '$x2' } +body: | + bb.1.entry: + liveins: $x0, $x1, $x2 + + ; CHECK-LABEL: name: store_8b_sext + ; CHECK: liveins: $x0, $x1, $x2 + ; CHECK: [[COPY:%[0-9]+]]:gpr64sp = COPY $x0 + ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY $w1 + ; CHECK: [[COPY2:%[0-9]+]]:gpr64all = COPY $x2 + ; CHECK: [[SUBREG_TO_REG:%[0-9]+]]:gpr64 = SUBREG_TO_REG 0, [[COPY1]], %subreg.sub_32 + ; CHECK: %zext:gpr64 = SBFMXri [[SUBREG_TO_REG]], 0, 7 + ; CHECK: [[COPY3:%[0-9]+]]:gpr64 = COPY [[COPY2]] + ; CHECK: STRXroX [[COPY3]], [[COPY]], %zext, 0, 1 :: (store 8) + ; CHECK: RET_ReallyLR + %0:gpr(p0) = COPY $x0 + %1:gpr(s32) = COPY $w1 + %2:gpr(p0) = COPY $x2 + %small:gpr(s8) = G_TRUNC %1 + %zext:gpr(s64) = G_SEXT %small(s8) + %cst:gpr(s64) = G_CONSTANT i64 3 + %shl:gpr(s64) = G_SHL %zext, %cst(s64) + %gep:gpr(p0) = G_PTR_ADD %0, %shl(s64) + G_STORE %2(p0), %gep(p0) :: (store 8) + RET_ReallyLR +...