diff --git a/clang/include/clang/Analysis/FlowSensitive/DataflowAnalysis.h b/clang/include/clang/Analysis/FlowSensitive/DataflowAnalysis.h --- a/clang/include/clang/Analysis/FlowSensitive/DataflowAnalysis.h +++ b/clang/include/clang/Analysis/FlowSensitive/DataflowAnalysis.h @@ -23,6 +23,7 @@ #include "clang/Analysis/CFG.h" #include "clang/Analysis/FlowSensitive/ControlFlowContext.h" #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h" +#include "clang/Analysis/FlowSensitive/DataflowLattice.h" #include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h" #include "llvm/ADT/Any.h" #include "llvm/ADT/Optional.h" @@ -32,16 +33,6 @@ namespace clang { namespace dataflow { -template > -struct HasTransferBranchFor : std::false_type {}; - -template -struct HasTransferBranchFor< - AnalysisT, LatticeT, - std::void_t().transferBranch( - std::declval(), std::declval(), - std::declval(), std::declval()))>> - : std::true_type {}; /// Base class template for dataflow analyses built on a single lattice type. /// /// Requirements: @@ -54,6 +45,11 @@ /// the analysis transfer function for a given CFG element and lattice /// element. /// +/// `Derived` can optionally provide the following members: +/// * `void transferBranch(bool Branch, const Stmt *Stmt, TypeErasedLattice &E, +/// Environment &Env)` - applies the analysis transfer +/// function for a given edge from a CFG block of a conditional statement. +/// /// `Derived` can optionally override the following members: /// * `bool merge(QualType, const Value &, const Value &, Value &, /// Environment &)` - joins distinct values. This could be a strict @@ -67,6 +63,15 @@ /// made to it; /// * `bool operator==(const LatticeT &) const` - returns true if and only if /// the object is equal to the argument. +/// +/// `LatticeT` can optionally provide the following members: +/// * `LatticeJoinEffect widen(const LatticeT &Previous)` - replaces the +/// lattice element with an approximation that can reach a fixed point more +/// quickly than iterated application of the transfer function alone. The +/// previous value is provided to inform the choice of widened value. The +/// function must also serve as a comparison operation, by indicating whether +/// the widened value is equivalent to the previous value with the returned +/// `LatticeJoinEffect`. template class DataflowAnalysis : public TypeErasedDataflowAnalysis { public: @@ -98,6 +103,13 @@ return L1.join(L2); } + LatticeJoinEffect widenTypeErased(TypeErasedLattice &Current, + const TypeErasedLattice &Previous) final { + Lattice &C = llvm::any_cast(Current.Value); + const Lattice &P = llvm::any_cast(Previous.Value); + return widenInternal(Rank0{}, C, P); + } + bool isEqualTypeErased(const TypeErasedLattice &E1, const TypeErasedLattice &E2) final { const Lattice &L1 = llvm::any_cast(E1.Value); @@ -113,16 +125,47 @@ void transferBranchTypeErased(bool Branch, const Stmt *Stmt, TypeErasedLattice &E, Environment &Env) final { - if constexpr (HasTransferBranchFor::value) { - Lattice &L = llvm::any_cast(E.Value); - static_cast(this)->transferBranch(Branch, Stmt, L, Env); - } - // Silence unused parameter warnings. - (void)Branch; - (void)Stmt; + transferBranchInternal(Rank0{}, *static_cast(this), Branch, Stmt, + E, Env); } private: + // These `Rank` structs are used for template metaprogramming to choose + // between overloads. + struct Rank1 {}; + struct Rank0 : Rank1 {}; + + // The first-choice implementation: use `widen` when it is available. + template + static auto widenInternal(Rank0, T &Current, const T &Prev) + -> decltype(Current.widen(Prev)) { + return Current.widen(Prev); + } + + // The second-choice implementation: `widen` is unavailable. Widening is + // merged with equality checking, so when widening is unimplemented, we + // default to equality checking. + static LatticeJoinEffect widenInternal(Rank1, const Lattice &Current, + const Lattice &Prev) { + return Prev == Current ? LatticeJoinEffect::Unchanged + : LatticeJoinEffect::Changed; + } + + // The first-choice implementation: `transferBranch` is implemented. + template + static auto transferBranchInternal(Rank0, Analysis &A, bool Branch, + const Stmt *Stmt, TypeErasedLattice &L, + Environment &Env) + -> std::void_t(), Env))> { + A.transferBranch(Branch, Stmt, llvm::any_cast(L.Value), Env); + } + + // The second-choice implementation: `transferBranch` is unimplemented. No-op. + template + static void transferBranchInternal(Rank1, Analysis &A, bool, const Stmt *, + TypeErasedLattice &, Environment &) {} + ASTContext &Context; }; diff --git a/clang/include/clang/Analysis/FlowSensitive/DataflowEnvironment.h b/clang/include/clang/Analysis/FlowSensitive/DataflowEnvironment.h --- a/clang/include/clang/Analysis/FlowSensitive/DataflowEnvironment.h +++ b/clang/include/clang/Analysis/FlowSensitive/DataflowEnvironment.h @@ -26,6 +26,7 @@ #include "clang/Analysis/FlowSensitive/Value.h" #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/DenseSet.h" +#include "llvm/Support/ErrorHandling.h" #include #include #include @@ -88,11 +89,7 @@ const Environment &Env2) { // FIXME: Consider adding QualType to StructValue and removing the Type // argument here. - // - // FIXME: default to a sound comparison (`Unknown`) and/or expand the - // comparison logic built into the framework to support broader forms of - // equivalence than strict pointer equality. - return ComparisonResult::Same; + return ComparisonResult::Unknown; } /// Modifies `MergedVal` to approximate both `Val1` and `Val2`. This could @@ -118,6 +115,50 @@ Environment &MergedEnv) { return true; } + + /// This function may widen the current value -- replace it with an + /// approximation that can reach a fixed point more quickly than iterated + /// application of the transfer function alone. The previous value is + /// provided to inform the choice of widened value. The function must also + /// serve as a comparison operation, by indicating whether the widened value + /// is equivalent to the previous value. + /// + /// Returns either: + /// + /// `nullptr`, if this value is not of interest to the model, or + /// + /// `&Prev`, if the widened value is equivalent to `Prev`, or + /// + /// A non-null value that approximates `Current`. `Prev` is available to + /// inform the chosen approximation. + /// + /// `PrevEnv` and `CurrentEnv` can be used to query child values and path + /// condition implications of `Prev` and `Current`, respectively. + /// + /// Requirements: + /// + /// `Prev` must precede `Current` in the value ordering. Widening is *not* + /// called when the current value is already equivalent to the previous + /// value. + /// + /// `Prev` and `Current` must model values of type `Type`. + /// + /// `Prev` and `Current` must be assigned to the same storage location in + /// `PrevEnv` and `CurrentEnv`, respectively. + virtual Value *widen(QualType Type, Value &Prev, const Environment &PrevEnv, + Value &Current, Environment &CurrentEnv) { + // The default implementation reduces to just comparison, since comparison + // is required by the API, even if no widening is performed. + switch (compare(Type, Prev, PrevEnv, Current, CurrentEnv)) { + case ComparisonResult::Same: + return &Prev; + case ComparisonResult::Different: + return &Current; + case ComparisonResult::Unknown: + return nullptr; + } + llvm_unreachable("all cases in switch covered"); + } }; /// Creates an environment that uses `DACtx` to store objects that encompass @@ -182,6 +223,17 @@ LatticeJoinEffect join(const Environment &Other, Environment::ValueModel &Model); + + /// Widens the environment point-wise, using `PrevEnv` as needed to inform the + /// approximation. + /// + /// Requirements: + /// + /// `PrevEnv` must precede `this` in the environment ordering. + /// `PrevEnv` and `this` must use the same `DataflowAnalysisContext`. + LatticeJoinEffect widen(const Environment &PrevEnv, + Environment::ValueModel &Model); + // FIXME: Rename `createOrGetStorageLocation` to `getOrCreateStorageLocation`, // `getStableStorageLocation`, or something more appropriate. @@ -407,6 +459,11 @@ // `DACtx` is not null and not owned by this object. DataflowAnalysisContext *DACtx; + + // FIXME: move the fields `CallStack`, `ReturnLoc` and `ThisPointeeLoc` into a + // separate call-context object, shared between environments in the same call. + // https://github.com/llvm/llvm-project/issues/59005 + // `DeclContext` of the block being analysed if provided. std::vector CallStack; diff --git a/clang/include/clang/Analysis/FlowSensitive/DataflowLattice.h b/clang/include/clang/Analysis/FlowSensitive/DataflowLattice.h --- a/clang/include/clang/Analysis/FlowSensitive/DataflowLattice.h +++ b/clang/include/clang/Analysis/FlowSensitive/DataflowLattice.h @@ -18,6 +18,8 @@ namespace dataflow { /// Effect indicating whether a lattice join operation resulted in a new value. +// FIXME: Rename to `LatticeEffect` since `widen` uses it as well, and we are +// likely removing it from `join`. enum class LatticeJoinEffect { Unchanged, Changed, diff --git a/clang/include/clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h b/clang/include/clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h --- a/clang/include/clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h +++ b/clang/include/clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h @@ -74,6 +74,20 @@ virtual LatticeJoinEffect joinTypeErased(TypeErasedLattice &, const TypeErasedLattice &) = 0; + /// Chooses a lattice element that approximates the current element at a + /// program point, given the previous element at that point. Places the + /// widened result in the current element (`Current`). Widening is optional -- + /// it is only needed to either accelerate convergence (for lattices with + /// non-trivial height) or guarantee convergence (for lattices with infinite + /// height). + /// + /// Returns an indication of whether any changes were made to `Current` in + /// order to widen. This saves a separate call to `isEqualTypeErased` after + /// the widening. + virtual LatticeJoinEffect + widenTypeErased(TypeErasedLattice &Current, + const TypeErasedLattice &Previous) = 0; + /// Returns true if and only if the two given type-erased lattice elements are /// equal. virtual bool isEqualTypeErased(const TypeErasedLattice &, diff --git a/clang/include/clang/Analysis/FlowSensitive/Value.h b/clang/include/clang/Analysis/FlowSensitive/Value.h --- a/clang/include/clang/Analysis/FlowSensitive/Value.h +++ b/clang/include/clang/Analysis/FlowSensitive/Value.h @@ -106,9 +106,6 @@ /// Models the trivially true formula, which is Top in the lattice of boolean /// formulas. -/// -/// FIXME: Given the subtlety of comparison involving `TopBoolValue`, define -/// `operator==` for `Value`. class TopBoolValue final : public BoolValue { public: TopBoolValue() : BoolValue(Kind::TopBool) {} diff --git a/clang/lib/Analysis/FlowSensitive/DataflowEnvironment.cpp b/clang/lib/Analysis/FlowSensitive/DataflowEnvironment.cpp --- a/clang/lib/Analysis/FlowSensitive/DataflowEnvironment.cpp +++ b/clang/lib/Analysis/FlowSensitive/DataflowEnvironment.cpp @@ -49,19 +49,52 @@ return Result; } +static bool compareDistinctValues(QualType Type, Value &Val1, + const Environment &Env1, Value &Val2, + const Environment &Env2, + Environment::ValueModel &Model) { + // Note: Potentially costly, but, for booleans, we could check whether both + // can be proven equivalent in their respective environments. + + // FIXME: move the reference/pointers logic from `areEquivalentValues` to here + // and implement separate, join/widen specific handling for + // reference/pointers. + switch (Model.compare(Type, Val1, Env1, Val2, Env2)) { + case ComparisonResult::Same: + return true; + case ComparisonResult::Different: + return false; + case ComparisonResult::Unknown: + switch (Val1.getKind()) { + case Value::Kind::Integer: + case Value::Kind::Reference: + case Value::Kind::Pointer: + case Value::Kind::Struct: + // FIXME: this choice intentionally introduces unsoundness to allow + // for convergence. Once we have widening support for the + // reference/pointer and struct built-in models, this should be + // `false`. + return true; + default: + return false; + } + } + llvm_unreachable("All cases covered in switch"); +} + /// Attempts to merge distinct values `Val1` and `Val2` in `Env1` and `Env2`, /// respectively, of the same type `Type`. Merging generally produces a single /// value that (soundly) approximates the two inputs, although the actual /// meaning depends on `Model`. -static Value *mergeDistinctValues(QualType Type, Value *Val1, - const Environment &Env1, Value *Val2, +static Value *mergeDistinctValues(QualType Type, Value &Val1, + const Environment &Env1, Value &Val2, const Environment &Env2, Environment &MergedEnv, Environment::ValueModel &Model) { // Join distinct boolean values preserving information about the constraints // in the respective path conditions. - if (auto *Expr1 = dyn_cast(Val1)) { - auto *Expr2 = cast(Val2); + if (auto *Expr1 = dyn_cast(&Val1)) { + auto *Expr2 = cast(&Val2); auto &MergedVal = MergedEnv.makeAtomicBoolValue(); MergedEnv.addToFlowCondition(MergedEnv.makeOr( MergedEnv.makeAnd(Env1.getFlowConditionToken(), @@ -74,12 +107,37 @@ // FIXME: Consider destroying `MergedValue` immediately if `ValueModel::merge` // returns false to avoid storing unneeded values in `DACtx`. if (Value *MergedVal = MergedEnv.createValue(Type)) - if (Model.merge(Type, *Val1, Env1, *Val2, Env2, *MergedVal, MergedEnv)) + if (Model.merge(Type, Val1, Env1, Val2, Env2, *MergedVal, MergedEnv)) return MergedVal; return nullptr; } +// When widening does not change `Current`, return value will equal `&Prev`. +static Value &widenDistinctValues(QualType Type, Value &Prev, + const Environment &PrevEnv, Value &Current, + Environment &CurrentEnv, + Environment::ValueModel &Model) { + // Boolean-model widening. + if (isa(&Prev)) { + assert(isa(Current)); + // Widen to Top, because we know they are different values. If previous was + // already Top, re-use that to (implicitly) indicate that no change occured. + if (isa(Prev)) + return Prev; + return CurrentEnv.makeTopBoolValue(); + } + + // FIXME: Add other built-in model widening. + + // Custom-model widening. + if (auto *W = Model.widen(Type, Prev, PrevEnv, Current, CurrentEnv)) + return *W; + + // Default of widening is a no-op: leave the current value unchanged. + return Current; +} + /// Initializes a global storage value. static void initGlobalVar(const VarDecl &D, Environment &Env) { if (!D.hasGlobalStorage() || @@ -295,14 +353,72 @@ assert(It->second != nullptr); if (!areEquivalentValues(*Val, *It->second) && - Model.compare(Loc->getType(), *Val, *this, *It->second, Other) != - ComparisonResult::Same) + !compareDistinctValues(Loc->getType(), *Val, *this, *It->second, Other, + Model)) return false; } return true; } +LatticeJoinEffect Environment::widen(const Environment &PrevEnv, + Environment::ValueModel &Model) { + assert(DACtx == PrevEnv.DACtx); + assert(ReturnLoc == PrevEnv.ReturnLoc); + assert(ThisPointeeLoc == PrevEnv.ThisPointeeLoc); + assert(CallStack == PrevEnv.CallStack); + + auto Effect = LatticeJoinEffect::Unchanged; + + // By the API, `PrevEnv` <= `*this`, meaning `join(PrevEnv, *this) = + // *this`. That guarantees that these maps are subsets of the maps in + // `PrevEnv`. So, we don't need change their current values to widen (in + // contrast to `join`). + // + // FIXME: The above is violated for `MemberLocToStruct`, because `join` can + // cause the map size to increase (when we add fresh data in places of + // conflict). Once this issue with join is resolved, re-enable the assertion + // below or replace with something that captures the desired invariant. + assert(DeclToLoc.size() <= PrevEnv.DeclToLoc.size()); + assert(ExprToLoc.size() <= PrevEnv.ExprToLoc.size()); + // assert(MemberLocToStruct.size() <= PrevEnv.MemberLocToStruct.size()); + + llvm::DenseMap WidenedLocToVal; + for (auto &Entry : LocToVal) { + const StorageLocation *Loc = Entry.first; + assert(Loc != nullptr); + + Value *Val = Entry.second; + assert(Val != nullptr); + + auto PrevIt = PrevEnv.LocToVal.find(Loc); + if (PrevIt == PrevEnv.LocToVal.end()) + continue; + assert(PrevIt->second != nullptr); + + if (areEquivalentValues(*Val, *PrevIt->second)) { + WidenedLocToVal.insert({Loc, Val}); + continue; + } + + Value &WidenedVal = widenDistinctValues(Loc->getType(), *PrevIt->second, + PrevEnv, *Val, *this, Model); + WidenedLocToVal.insert({Loc, &WidenedVal}); + if (&WidenedVal != PrevIt->second) + Effect = LatticeJoinEffect::Changed; + } + LocToVal = std::move(WidenedLocToVal); + // FIXME: update the equivalence calculation for `MemberLocToStruct`, once we + // have a systematic way of soundly comparing this map. + if (DeclToLoc.size() != PrevEnv.DeclToLoc.size() || + ExprToLoc.size() != PrevEnv.ExprToLoc.size() || + LocToVal.size() != PrevEnv.LocToVal.size() || + MemberLocToStruct.size() != PrevEnv.MemberLocToStruct.size()) + Effect = LatticeJoinEffect::Changed; + + return Effect; +} + LatticeJoinEffect Environment::join(const Environment &Other, Environment::ValueModel &Model) { assert(DACtx == Other.DACtx); @@ -328,10 +444,16 @@ JoinedEnv.MemberLocToStruct = intersectDenseMaps(MemberLocToStruct, Other.MemberLocToStruct); + assert(JoinedEnv.MemberLocToStruct.size() <= + std::min(MemberLocToStruct.size(), Other.MemberLocToStruct.size())); + + intersectDenseMaps(MemberLocToStruct, Other.MemberLocToStruct); if (MemberLocToStruct.size() != JoinedEnv.MemberLocToStruct.size()) Effect = LatticeJoinEffect::Changed; // FIXME: set `Effect` as needed. + // FIXME: update join to detect backedges and simplify the flow condition + // accordingly. JoinedEnv.FlowConditionToken = &DACtx->joinFlowConditions( *FlowConditionToken, *Other.FlowConditionToken); @@ -352,9 +474,12 @@ continue; } - if (Value *MergedVal = mergeDistinctValues( - Loc->getType(), Val, *this, It->second, Other, JoinedEnv, Model)) + if (Value *MergedVal = + mergeDistinctValues(Loc->getType(), *Val, *this, *It->second, Other, + JoinedEnv, Model)) { JoinedEnv.LocToVal.insert({Loc, MergedVal}); + Effect = LatticeJoinEffect::Changed; + } } if (LocToVal.size() != JoinedEnv.LocToVal.size()) Effect = LatticeJoinEffect::Changed; @@ -498,6 +623,9 @@ } if (Type->isIntegerType()) { + // FIXME: consider instead `return nullptr`, given that we do nothing useful + // with integers, and so distinguishing them serves no purpose, but could + // prevent convergence. CreatedValuesCount++; return &takeOwnership(std::make_unique()); } diff --git a/clang/lib/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.cpp b/clang/lib/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.cpp --- a/clang/lib/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.cpp +++ b/clang/lib/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.cpp @@ -23,6 +23,7 @@ #include "clang/Analysis/Analyses/PostOrderCFGView.h" #include "clang/Analysis/CFG.h" #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h" +#include "clang/Analysis/FlowSensitive/DataflowLattice.h" #include "clang/Analysis/FlowSensitive/DataflowWorklist.h" #include "clang/Analysis/FlowSensitive/Transfer.h" #include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h" @@ -69,6 +70,20 @@ return BlockPos - Pred.succ_begin(); } +static bool isLoopHead(const CFGBlock &B) { + if (const auto *T = B.getTerminatorStmt()) + switch (T->getStmtClass()) { + case Stmt::WhileStmtClass: + case Stmt::DoStmtClass: + case Stmt::ForStmtClass: + return true; + default: + return false; + } + + return false; +} + // The return type of the visit functions in TerminatorVisitor. The first // element represents the terminator expression (that is the conditional // expression in case of a path split in the CFG). The second element @@ -435,13 +450,24 @@ TypeErasedDataflowAnalysisState NewBlockState = transferCFGBlock(*Block, AC); - if (OldBlockState && - Analysis.isEqualTypeErased(OldBlockState.value().Lattice, - NewBlockState.Lattice) && - OldBlockState->Env.equivalentTo(NewBlockState.Env, Analysis)) { - // The state of `Block` didn't change after transfer so there's no need to - // revisit its successors. - continue; + if (OldBlockState) { + if (isLoopHead(*Block)) { + LatticeJoinEffect Effect1 = Analysis.widenTypeErased( + NewBlockState.Lattice, OldBlockState.value().Lattice); + LatticeJoinEffect Effect2 = + NewBlockState.Env.widen(OldBlockState->Env, Analysis); + if (Effect1 == LatticeJoinEffect::Unchanged && + Effect2 == LatticeJoinEffect::Unchanged) + // The state of `Block` didn't change from widening so there's no need + // to revisit its successors. + continue; + } else if (Analysis.isEqualTypeErased(OldBlockState.value().Lattice, + NewBlockState.Lattice) && + OldBlockState->Env.equivalentTo(NewBlockState.Env, Analysis)) { + // The state of `Block` didn't change after transfer so there's no need + // to revisit its successors. + continue; + } } BlockStates[Block->getBlockID()] = std::move(NewBlockState); diff --git a/clang/unittests/Analysis/FlowSensitive/TransferTest.cpp b/clang/unittests/Analysis/FlowSensitive/TransferTest.cpp --- a/clang/unittests/Analysis/FlowSensitive/TransferTest.cpp +++ b/clang/unittests/Analysis/FlowSensitive/TransferTest.cpp @@ -9,7 +9,6 @@ #include "TestingSupport.h" #include "clang/AST/ASTContext.h" #include "clang/AST/Decl.h" -#include "clang/ASTMatchers/ASTMatchFinder.h" #include "clang/ASTMatchers/ASTMatchers.h" #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h" #include "clang/Analysis/FlowSensitive/NoopAnalysis.h" @@ -63,7 +62,7 @@ LangStandard::Kind Std = LangStandard::lang_cxx17, bool ApplyBuiltinTransfer = true, llvm::StringRef TargetFun = "target") { - runDataflow(Code, Match, + runDataflow(Code, std::move(Match), {ApplyBuiltinTransfer ? TransferOptions{} : llvm::Optional()}, Std, TargetFun); @@ -3255,8 +3254,7 @@ TEST(TransferTest, LoopWithAssignmentConverges) { std::string Code = R"( - - bool &foo(); + bool foo(); void target() { do { @@ -3285,9 +3283,45 @@ }); } -TEST(TransferTest, LoopWithReferenceAssignmentConverges) { +TEST(TransferTest, LoopWithStagedAssignments) { std::string Code = R"( + bool foo(); + + void target() { + bool Bar = false; + bool Err = false; + while (foo()) { + if (Bar) + Err = true; + Bar = true; + /*[[p]]*/ + } + } + )"; + runDataflow( + Code, + [](const llvm::StringMap> &Results, + ASTContext &ASTCtx) { + ASSERT_THAT(Results.keys(), UnorderedElementsAre("p")); + const Environment &Env = getEnvironmentAtAnnotation(Results, "p"); + + const ValueDecl *BarDecl = findValueDecl(ASTCtx, "Bar"); + ASSERT_THAT(BarDecl, NotNull()); + const ValueDecl *ErrDecl = findValueDecl(ASTCtx, "Err"); + ASSERT_THAT(ErrDecl, NotNull()); + + auto &BarVal = *cast(Env.getValue(*BarDecl, SkipPast::None)); + auto &ErrVal = *cast(Env.getValue(*ErrDecl, SkipPast::None)); + EXPECT_TRUE(Env.flowConditionImplies(BarVal)); + // An unsound analysis, for example only evaluating the loop once, can + // conclude that `Err` is false. So, we test that this conclusion is not + // reached. + EXPECT_FALSE(Env.flowConditionImplies(Env.makeNot(ErrVal))); + }); +} +TEST(TransferTest, LoopWithReferenceAssignmentConverges) { + std::string Code = R"( bool &foo(); void target() { @@ -3299,9 +3333,8 @@ } while (true); } )"; - // The key property that we are verifying is implicit in `runDataflow` -- - // namely, that the analysis succeeds, rather than hitting the maximum number - // of iterations. + // The key property that we are verifying is that the analysis succeeds, + // rather than hitting the maximum number of iterations. runDataflow( Code, [](const llvm::StringMap> &Results,