Index: llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp =================================================================== --- llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp +++ llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp @@ -275,8 +275,8 @@ {s64, p0, s64, 8}, {p0, p0, s64, 8}, {v2s32, p0, s64, 8}}) - .clampScalar(0, s32, s64) .widenScalarToNextPow2(0) + .clampScalar(0, s32, s64) // TODO: We could support sum-of-pow2's but the lowering code doesn't know // how to do that yet. .unsupportedIfMemSizeNotPow2() Index: llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir =================================================================== --- llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir +++ llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir @@ -1,50 +1,5 @@ # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -march=aarch64 -run-pass=legalizer %s -o - -verify-machineinstrs | FileCheck %s ---- | - target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128" - target triple = "aarch64" - - define void @test_extload() { - entry: - ret void - } - - define i64 @sext_i32_i64(i32* %ptr) { - %ld = load i32, i32* %ptr, align 4 - %v = sext i32 %ld to i64 - ret i64 %v - } - - define i64 @sext_i16_i64(i16* %ptr) { - %ld = load i16, i16* %ptr, align 2 - %v = sext i16 %ld to i64 - ret i64 %v - } - - define i64 @sext_i8_i64(i8* %ptr) { - %ld = load i8, i8* %ptr, align 1 - %v = sext i8 %ld to i64 - ret i64 %v - } - - define i64 @zext_i32_i64(i32* %ptr) { - %ld = load i32, i32* %ptr, align 4 - %v = zext i32 %ld to i64 - ret i64 %v - } - - define i64 @zext_i16_i64(i16* %ptr) { - %ld = load i16, i16* %ptr, align 2 - %v = zext i16 %ld to i64 - ret i64 %v - } - - define i64 @zext_i8_i64(i8* %ptr) { - %ld = load i8, i8* %ptr, align 1 - %v = zext i8 %ld to i64 - ret i64 %v - } - ... --- name: test_extload @@ -67,11 +22,11 @@ ; CHECK-LABEL: name: sext_i32_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s32) from %ir.ptr) + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s32)) ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s32) from %ir.ptr) + %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s32)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 @@ -84,11 +39,11 @@ ; CHECK-LABEL: name: sext_i16_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s16) from %ir.ptr) + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s16)) ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s16) from %ir.ptr) + %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s16)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 @@ -101,11 +56,11 @@ ; CHECK-LABEL: name: sext_i8_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s8) from %ir.ptr) + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s8)) ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s8) from %ir.ptr) + %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s8)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 @@ -118,11 +73,11 @@ ; CHECK-LABEL: name: zext_i32_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s32) from %ir.ptr) + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s32)) ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s32) from %ir.ptr) + %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s32)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 @@ -135,11 +90,11 @@ ; CHECK-LABEL: name: zext_i16_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s16) from %ir.ptr) + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s16)) ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s16) from %ir.ptr) + %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s16)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 @@ -152,12 +107,48 @@ ; CHECK-LABEL: name: zext_i8_i64 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 - ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8) from %ir.ptr) + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8)) ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) ; CHECK: RET_ReallyLR implicit $x0 %0:_(p0) = COPY $x0 - %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s8) from %ir.ptr) + %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s8)) $x0 = COPY %2(s64) RET_ReallyLR implicit $x0 ... +--- +name: zext_i8_i88 +body: | + bb.1: + liveins: $x0 + ; CHECK-LABEL: name: zext_i8_i88 + ; CHECK: %ptr:_(p0) = COPY $x0 + ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD %ptr(p0) :: (load (s8)) + ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0 + ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64) + ; CHECK: RET_ReallyLR implicit $x0 + %ptr:_(p0) = COPY $x0 + %load:_(s88) = G_ZEXTLOAD %ptr(p0) :: (load (s8)) + %trunc:_(s64) = G_TRUNC %load + $x0 = COPY %trunc(s64) + RET_ReallyLR implicit $x0 +... +... +--- +name: sext_i8_i88 +body: | + bb.1: + liveins: $x0 + ; CHECK-LABEL: name: sext_i8_i88 + ; CHECK: %ptr:_(p0) = COPY $x0 + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD %ptr(p0) :: (load (s8)) + ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63 + ; CHECK: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXTLOAD]], [[C]](s64) + ; CHECK: $x0 = COPY [[SEXTLOAD]](s64) + ; CHECK: RET_ReallyLR implicit $x0 + %ptr:_(p0) = COPY $x0 + %load:_(s88) = G_SEXTLOAD %ptr(p0) :: (load (s8)) + %trunc:_(s64) = G_TRUNC %load + $x0 = COPY %trunc(s64) + RET_ReallyLR implicit $x0 +...