diff --git a/llvm/lib/IR/ConstantFold.cpp b/llvm/lib/IR/ConstantFold.cpp --- a/llvm/lib/IR/ConstantFold.cpp +++ b/llvm/lib/IR/ConstantFold.cpp @@ -1013,10 +1013,14 @@ return C1; } - // Handle scalar UndefValue. Vectors are always evaluated per element. - bool HasScalarUndef = !C1->getType()->isVectorTy() && - (isa(C1) || isa(C2)); - if (HasScalarUndef) { + // Handle scalar UndefValue and scalable vector UndefValue. Fixed-length + // vectors are always evaluated per element. + bool IsScalableVector = + C1->getType()->isVectorTy() && C1->getType()->getVectorIsScalable(); + bool HasScalarUndefOrScalableVectorUndef = + (!C1->getType()->isVectorTy() || IsScalableVector) && + (isa(C1) || isa(C2)); + if (HasScalarUndefOrScalableVectorUndef) { switch (static_cast(Opcode)) { case Instruction::Xor: if (isa(C1) && isa(C2)) @@ -1119,7 +1123,7 @@ } // Neither constant should be UndefValue, unless these are vector constants. - assert(!HasScalarUndef && "Unexpected UndefValue"); + assert((!HasScalarUndefOrScalableVectorUndef) && "Unexpected UndefValue"); // Handle simplifications when the RHS is a constant int. if (ConstantInt *CI2 = dyn_cast(C2)) { @@ -1330,6 +1334,11 @@ } } } else if (VectorType *VTy = dyn_cast(C1->getType())) { + // Do not iterate on scalable vector. The number of elements is unknown at + // compile-time. + if (IsScalableVector) + return nullptr; + // Fold each element and create a vector constant from those constants. SmallVector Result; Type *Ty = IntegerType::get(VTy->getContext(), 32); diff --git a/llvm/test/Analysis/ConstantFolding/vscale.ll b/llvm/test/Analysis/ConstantFolding/vscale.ll new file mode 100644 --- /dev/null +++ b/llvm/test/Analysis/ConstantFolding/vscale.ll @@ -0,0 +1,155 @@ +; NOTE: Assertions have been autogenerated by utils/update_test_checks.py +; RUN: opt < %s -constprop -S | FileCheck %s + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Binary Operations +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +define @add() { +; CHECK-LABEL: @add( +; CHECK-NEXT: ret undef +; + %r = add undef, undef + ret %r +} + +define @fadd() { +; CHECK-LABEL: @fadd( +; CHECK-NEXT: ret undef +; + %r = fadd undef, undef + ret %r +} + +define @sub() { +; CHECK-LABEL: @sub( +; CHECK-NEXT: ret undef +; + %r = sub undef, undef + ret %r +} + +define @fsub() { +; CHECK-LABEL: @fsub( +; CHECK-NEXT: ret undef +; + %r = fsub undef, undef + ret %r +} + +define @mul() { +; CHECK-LABEL: @mul( +; CHECK-NEXT: ret undef +; + %r = mul undef, undef + ret %r +} + +define @fmul() { +; CHECK-LABEL: @fmul( +; CHECK-NEXT: ret undef +; + %r = fmul undef, undef + ret %r +} + +define @udiv() { +; CHECK-LABEL: @udiv( +; CHECK-NEXT: ret undef +; + %r = udiv undef, undef + ret %r +} + +define @sdiv() { +; CHECK-LABEL: @sdiv( +; CHECK-NEXT: ret undef +; + %r = sdiv undef, undef + ret %r +} + +define @fdiv() { +; CHECK-LABEL: @fdiv( +; CHECK-NEXT: ret undef +; + %r = fdiv undef, undef + ret %r +} + +define @urem() { +; CHECK-LABEL: @urem( +; CHECK-NEXT: ret undef +; + %r = urem undef, undef + ret %r +} + +define @srem() { +; CHECK-LABEL: @srem( +; CHECK-NEXT: ret undef +; + %r = srem undef, undef + ret %r +} + +define @frem() { +; CHECK-LABEL: @frem( +; CHECK-NEXT: ret undef +; + %r = frem undef, undef + ret %r +} + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Bitwise Binary Operations +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +define @shl() { +; CHECK-LABEL: @shl( +; CHECK-NEXT: ret undef +; + %r = shl undef, undef + ret %r +} + +define @lshr() { +; CHECK-LABEL: @lshr( +; CHECK-NEXT: ret undef +; + %r = lshr undef, undef + ret %r +} + +define @ashr() { +; CHECK-LABEL: @ashr( +; CHECK-NEXT: ret undef +; + %r = ashr undef, undef + ret %r +} + +define @and() { +; CHECK-LABEL: @and( +; CHECK-NEXT: ret undef +; + %r = and undef, undef + ret %r +} + +define @or() { +; CHECK-LABEL: @or( +; CHECK-NEXT: ret undef +; + %r = or undef, undef + ret %r +} + +define @xor() { +; CHECK-LABEL: @xor( +; CHECK-NEXT: ret zeroinitializer +; + %r = xor undef, undef + ret %r +}