Index: include/llvm/CodeGen/FunctionLoweringInfo.h =================================================================== --- include/llvm/CodeGen/FunctionLoweringInfo.h +++ include/llvm/CodeGen/FunctionLoweringInfo.h @@ -72,6 +72,37 @@ /// MBBMap - A mapping from LLVM basic blocks to their machine code entry. DenseMap MBBMap; + typedef SmallVector SwiftErrorVRegs; + typedef SmallVector SwiftErrorValues; + /// A function can only have a single swifterror argument. And if it does + /// have a swifterror argument, it must be the first entry in + /// SwiftErrorVals. + SwiftErrorValues SwiftErrorVals; + + /// Track the virtual register for each swifterror value in a given basic + /// block. Entries in SwiftErrorVRegs have the same ordering as entries + /// in SwiftErrorVals. + /// Note that another choice that is more straight-forward is to use + /// Map>. It + /// maintains a map from swifterror values to virtual registers for each + /// machine basic block. This choice does not require a one-to-one + /// correspondence between SwiftErrorValues and SwiftErrorVRegs. But because + /// of efficiency concern, we do not choose it. + llvm::DenseMap SwiftErrorMap; + + /// Track the virtual register for each swifterror value at the end of a basic + /// block when we need the assignment of a virtual register before the basic + /// block is visited. When we actually visit the basic block, we will make + /// sure the swifterror value is in the correct virtual register. + llvm::DenseMap + SwiftErrorWorklist; + + /// Find the swifterror virtual register in SwiftErrorMap. We will assert + /// failure when the value does not exist in swifterror map. + unsigned findSwiftErrorVReg(const MachineBasicBlock*, const Value*) const; + /// Set the swifterror virtual register in SwiftErrorMap. + void setSwiftErrorVReg(const MachineBasicBlock *MBB, const Value*, unsigned); + /// ValueMap - Since we emit code for the function a basic block at a time, /// we must remember which virtual registers hold the values for /// cross-basic-block values. Index: include/llvm/Target/TargetLowering.h =================================================================== --- include/llvm/Target/TargetLowering.h +++ include/llvm/Target/TargetLowering.h @@ -2267,6 +2267,12 @@ return false; } + /// Return true if the target supports swifterror attribute. It optimizes + /// loads and stores to reading and writing a specific register. + virtual bool supportSwiftError() const { + return false; + } + /// Return true if the target supports that a subset of CSRs for the given /// machine function is handled explicitly via copies. virtual bool supportSplitCSR(MachineFunction *MF) const { @@ -2369,6 +2375,7 @@ SmallVector Outs; SmallVector OutVals; SmallVector Ins; + SmallVector InVals; CallLoweringInfo(SelectionDAG &DAG) : RetTy(nullptr), RetSExt(false), RetZExt(false), IsVarArg(false), Index: lib/CodeGen/SelectionDAG/FastISel.cpp =================================================================== --- lib/CodeGen/SelectionDAG/FastISel.cpp +++ lib/CodeGen/SelectionDAG/FastISel.cpp @@ -1324,6 +1324,15 @@ return true; } +// Return true if we should copy from swift error to the final vreg as specified +// by SwiftErrorWorklist. +static bool shouldCopySwiftErrorsToFinalVRegs(const TargetLowering &TLI, + FunctionLoweringInfo &FuncInfo) { + if (!TLI.supportSwiftError()) + return false; + return FuncInfo.SwiftErrorWorklist.count(FuncInfo.MBB); +} + // Remove local value instructions starting from the instruction after // SavedLastLocalValue to the current function insert point. void FastISel::removeDeadLocalValueCode(MachineInstr *SavedLastLocalValue) @@ -1347,7 +1356,9 @@ MachineInstr *SavedLastLocalValue = getLastLocalValue(); // Just before the terminator instruction, insert instructions to // feed PHI nodes in successor blocks. - if (isa(I)) + if (isa(I)) { + if (shouldCopySwiftErrorsToFinalVRegs(TLI, FuncInfo)) + return false; if (!handlePHINodesInSuccessorBlocks(I->getParent())) { // PHI node handling may have generated local value instructions, // even though it failed to handle all PHI nodes. @@ -1356,6 +1367,7 @@ removeDeadLocalValueCode(SavedLastLocalValue); return false; } + } // FastISel does not handle any operand bundles except OB_funclet. if (ImmutableCallSite CS = ImmutableCallSite(I)) Index: lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp =================================================================== --- lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp +++ lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp @@ -595,3 +595,21 @@ } } } + +unsigned FunctionLoweringInfo::findSwiftErrorVReg(const MachineBasicBlock *MBB, + const Value* Val) const { + // Find the index in SwiftErrorVals. + SwiftErrorValues::const_iterator I = + std::find(SwiftErrorVals.begin(), SwiftErrorVals.end(), Val); + assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals"); + return SwiftErrorMap.lookup(MBB)[I - SwiftErrorVals.begin()]; +} + +void FunctionLoweringInfo::setSwiftErrorVReg(const MachineBasicBlock *MBB, + const Value* Val, unsigned VReg) { + // Find the index in SwiftErrorVals. + SwiftErrorValues::iterator I = + std::find(SwiftErrorVals.begin(), SwiftErrorVals.end(), Val); + assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals"); + SwiftErrorMap[MBB][I - SwiftErrorVals.begin()] = VReg; +} Index: lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h =================================================================== --- lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h +++ lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h @@ -895,6 +895,8 @@ bool visitBinaryFloatCall(const CallInst &I, unsigned Opcode); void visitAtomicLoad(const LoadInst &I); void visitAtomicStore(const StoreInst &I); + void visitLoadFromSwiftError(const LoadInst &I); + void visitStoreToSwiftError(const StoreInst &I); void visitInlineAsm(ImmutableCallSite CS); const char *visitIntrinsicCall(const CallInst &I, unsigned Intrinsic); Index: lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp =================================================================== --- lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp +++ lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp @@ -929,10 +929,48 @@ return Root; } +/// Copy swift error to the final virtual register at end of a basic block, as +/// specified by SwiftErrorWorklist, if necessary. +static void copySwiftErrorsToFinalVRegs(SelectionDAGBuilder &SDB) { + const TargetLowering &TLI = SDB.DAG.getTargetLoweringInfo(); + if (!TLI.supportSwiftError()) + return; + + if (!SDB.FuncInfo.SwiftErrorWorklist.count(SDB.FuncInfo.MBB)) + return; + + // Go through entries in SwiftErrorWorklist, and create copy as necessary. + FunctionLoweringInfo::SwiftErrorVRegs &WorklistEntry = + SDB.FuncInfo.SwiftErrorWorklist[SDB.FuncInfo.MBB]; + FunctionLoweringInfo::SwiftErrorVRegs &MapEntry = + SDB.FuncInfo.SwiftErrorMap[SDB.FuncInfo.MBB]; + for (unsigned I = 0, E = WorklistEntry.size(); I < E; I++) { + unsigned WorkReg = WorklistEntry[I]; + + // Find the swifterror virtual register for the value in SwiftErrorMap. + unsigned MapReg = MapEntry[I]; + assert(TargetRegisterInfo::isVirtualRegister(MapReg) && + "Entries in SwiftErrorMap should be virtual registers"); + + if (WorkReg == MapReg) + continue; + + // Create copy from SwiftErrorMap to SwiftWorklist. + auto &DL = SDB.DAG.getDataLayout(); + SDValue CopyNode = SDB.DAG.getCopyToReg( + SDB.getRoot(), SDB.getCurSDLoc(), WorkReg, + SDB.DAG.getRegister(MapReg, EVT(TLI.getPointerTy(DL)))); + MapEntry[I] = WorkReg; + SDB.DAG.setRoot(CopyNode); + } +} + void SelectionDAGBuilder::visit(const Instruction &I) { // Set up outgoing PHI node register values before emitting the terminator. - if (isa(&I)) + if (isa(&I)) { + copySwiftErrorsToFinalVRegs(*this); HandlePHINodesInSuccessorBlocks(I.getParent()); + } ++SDNodeOrder; @@ -1434,6 +1472,23 @@ } } + // Push in swifterror virtual register as the last element of Outs. This makes + // sure swifterror virtual register will be returned in the swifterror + // physical register. + const Function *F = I.getParent()->getParent(); + if (TLI.supportSwiftError() && + F->getAttributes().hasAttrSomewhere(Attribute::SwiftError)) { + ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy(); + Flags.setSwiftError(); + Outs.push_back(ISD::OutputArg(Flags, EVT(TLI.getPointerTy(DL)) /*vt*/, + EVT(TLI.getPointerTy(DL)) /*argvt*/, + true /*isfixed*/, 1 /*origidx*/, + 0 /*partOffs*/)); + // Create SDNode for the swifterror virtual register. + OutVals.push_back(DAG.getRegister(FuncInfo.SwiftErrorMap[FuncInfo.MBB][0], + EVT(TLI.getPointerTy(DL)))); + } + bool isVarArg = DAG.getMachineFunction().getFunction()->isVarArg(); CallingConv::ID CallConv = DAG.getMachineFunction().getFunction()->getCallingConv(); @@ -3308,7 +3363,22 @@ if (I.isAtomic()) return visitAtomicLoad(I); + const TargetLowering &TLI = DAG.getTargetLoweringInfo(); const Value *SV = I.getOperand(0); + if (TLI.supportSwiftError()) { + // Swifterror values can come from either a function parameter with + // swifterror attribute or an alloca with swifterror attribute. + if (const Argument *Arg = dyn_cast(SV)) { + if (Arg->hasSwiftErrorAttr()) + return visitLoadFromSwiftError(I); + } + + if (const AllocaInst *Alloca = dyn_cast(SV)) { + if (Alloca->isSwiftError()) + return visitLoadFromSwiftError(I); + } + } + SDValue Ptr = getValue(SV); Type *Ty = I.getType(); @@ -3332,7 +3402,6 @@ I.getAAMetadata(AAInfo); const MDNode *Ranges = I.getMetadata(LLVMContext::MD_range); - const TargetLowering &TLI = DAG.getTargetLoweringInfo(); SmallVector ValueVTs; SmallVector Offsets; ComputeValueVTs(TLI, DAG.getDataLayout(), Ty, ValueVTs, &Offsets); @@ -3409,6 +3478,64 @@ DAG.getVTList(ValueVTs), Values)); } +void SelectionDAGBuilder::visitStoreToSwiftError(const StoreInst &I) { + const TargetLowering &TLI = DAG.getTargetLoweringInfo(); + assert(TLI.supportSwiftError() && + "call visitStoreToSwiftError when backend supports swifterror"); + + SmallVector ValueVTs; + SmallVector Offsets; + const Value *SrcV = I.getOperand(0); + ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(), + SrcV->getType(), ValueVTs, &Offsets); + assert(ValueVTs.size() == 1 && Offsets[0] == 0 && + "expect a single EVT for swifterror"); + + SDValue Src = getValue(SrcV); + // Create a virtual register, then update the virtual register. + auto &DL = DAG.getDataLayout(); + const TargetRegisterClass *RC = TLI.getRegClassFor(TLI.getPointerTy(DL)); + unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC); + // Chain, DL, Reg, N or Chain, DL, Reg, N, Glue + // Chain can be getRoot or getControlRoot. + SDValue CopyNode = DAG.getCopyToReg(getRoot(), getCurSDLoc(), VReg, + SDValue(Src.getNode(), Src.getResNo())); + DAG.setRoot(CopyNode); + FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg); +} + +void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) { + assert(DAG.getTargetLoweringInfo().supportSwiftError() && + "call visitLoadFromSwiftError when backend supports swifterror"); + + assert(!I.isVolatile() && + I.getMetadata(LLVMContext::MD_nontemporal) == nullptr && + I.getMetadata(LLVMContext::MD_invariant_load) == nullptr && + "Support volatile, non temporal, invariant for load_from_swift_error"); + + const Value *SV = I.getOperand(0); + Type *Ty = I.getType(); + AAMDNodes AAInfo; + I.getAAMetadata(AAInfo); + assert(!AA->pointsToConstantMemory(MemoryLocation( + SV, DAG.getDataLayout().getTypeStoreSize(Ty), AAInfo)) && + "load_from_swift_error should not be constant memory"); + + SmallVector ValueVTs; + SmallVector Offsets; + ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(), Ty, + ValueVTs, &Offsets); + assert(ValueVTs.size() == 1 && Offsets[0] == 0 && + "expect a single EVT for swifterror"); + + // Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT + SDValue L = DAG.getCopyFromReg(getRoot(), getCurSDLoc(), + FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, SV), + ValueVTs[0]); + + setValue(&I, L); +} + void SelectionDAGBuilder::visitStore(const StoreInst &I) { if (I.isAtomic()) return visitAtomicStore(I); @@ -3416,6 +3543,21 @@ const Value *SrcV = I.getOperand(0); const Value *PtrV = I.getOperand(1); + const TargetLowering &TLI = DAG.getTargetLoweringInfo(); + if (TLI.supportSwiftError()) { + // Swifterror values can come from either a function parameter with + // swifterror attribute or an alloca with swifterror attribute. + if (const Argument *Arg = dyn_cast(PtrV)) { + if (Arg->hasSwiftErrorAttr()) + return visitStoreToSwiftError(I); + } + + if (const AllocaInst *Alloca = dyn_cast(PtrV)) { + if (Alloca->isSwiftError()) + return visitStoreToSwiftError(I); + } + } + SmallVector ValueVTs; SmallVector Offsets; ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(), @@ -5552,6 +5694,7 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee, bool isTailCall, const BasicBlock *EHPadBB) { + auto &DL = DAG.getDataLayout(); FunctionType *FTy = CS.getFunctionType(); Type *RetTy = CS.getType(); @@ -5559,6 +5702,8 @@ TargetLowering::ArgListEntry Entry; Args.reserve(CS.arg_size()); + bool HasSwiftError = false; + const TargetLowering &TLI = DAG.getTargetLoweringInfo(); for (ImmutableCallSite::arg_iterator i = CS.arg_begin(), e = CS.arg_end(); i != e; ++i) { const Value *V = *i; @@ -5572,6 +5717,17 @@ // Skip the first return-type Attribute to get to params. Entry.setAttributes(&CS, i - CS.arg_begin() + 1); + + // Use swifterror virtual register as input to the call. + if (Entry.isSwiftError && TLI.supportSwiftError()) { + HasSwiftError = true; + // A function can only have a single swifterror argument. And if it does + // have a swifterror argument, it must be the first entry in + // SwiftErrorVals. + Entry.Node = DAG.getRegister(FuncInfo.SwiftErrorMap[FuncInfo.MBB][0], + EVT(TLI.getPointerTy(DL))); + } + Args.push_back(Entry); // If we have an explicit sret argument that is an Instruction, (i.e., it @@ -5598,6 +5754,19 @@ Result.first = lowerRangeToAssertZExt(DAG, *Inst, Result.first); setValue(Inst, Result.first); } + + // The last element of CLI.InVals has the SDValue for swifterror return. + // Here we copy it to a virtual register and update SwiftErrorMap for + // book-keeping. + if (HasSwiftError && TLI.supportSwiftError()) { + // Get the last element of InVals. + SDValue Src = CLI.InVals.back(); + const TargetRegisterClass *RC = TLI.getRegClassFor(TLI.getPointerTy(DL)); + unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC); + SDValue CopyNode = CLI.DAG.getCopyToReg(Result.second, CLI.DL, VReg, Src); + FuncInfo.SwiftErrorMap[FuncInfo.MBB][0] = VReg; + DAG.setRoot(CopyNode); + } } /// IsOnlyUsedInZeroEqualityComparison - Return true if it only matters that the @@ -7311,10 +7480,23 @@ } } + // We push in swifterror return as the last element of CLI.Ins. + ArgListTy &Args = CLI.getArgs(); + if (supportSwiftError()) { + for (unsigned i = 0, e = Args.size(); i != e; ++i) { + if (Args[i].isSwiftError) { + ISD::InputArg MyFlags; + MyFlags.VT = getPointerTy(DL); + MyFlags.ArgVT = EVT(getPointerTy(DL)); + MyFlags.Flags.setSwiftError(); + CLI.Ins.push_back(MyFlags); + } + } + } + // Handle all of the outgoing arguments. CLI.Outs.clear(); CLI.OutVals.clear(); - ArgListTy &Args = CLI.getArgs(); for (unsigned i = 0, e = Args.size(); i != e; ++i) { SmallVector ValueVTs; ComputeValueVTs(*this, DL, Args[i].Ty, ValueVTs); @@ -7432,6 +7614,9 @@ SmallVector InVals; CLI.Chain = LowerCall(CLI, InVals); + // Update CLI.InVals to use outside of this function. + CLI.InVals = InVals; + // Verify that the target's LowerCall behaved as expected. assert(CLI.Chain.getNode() && CLI.Chain.getValueType() == MVT::Other && "LowerCall didn't return a valid chain!"); @@ -7793,6 +7978,14 @@ FuncInfo->setArgumentFrameIndex(&*I, FI->getIndex()); } + // Update SwiftErrorMap. + if (Res.getOpcode() == ISD::CopyFromReg && TLI->supportSwiftError() && + F.getAttributes().hasAttribute(Idx, Attribute::SwiftError)) { + unsigned Reg = cast(Res.getOperand(1))->getReg(); + if (TargetRegisterInfo::isVirtualRegister(Reg)) + FuncInfo->SwiftErrorMap[FuncInfo->MBB][0] = Reg; + } + // If this argument is live outside of the entry block, insert a copy from // wherever we got it to the vreg that other BB's will reference it as. if (!TM.Options.EnableFastISel && Res.getOpcode() == ISD::CopyFromReg) { Index: lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp =================================================================== --- lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp +++ lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp @@ -1159,12 +1159,132 @@ } #endif // NDEBUG +/// Set up SwiftErrorVals by going through the function. If the function has +/// swifterror argument, it will be the first entry. +static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI, + FunctionLoweringInfo *FuncInfo) { + if (!TLI->supportSwiftError()) + return; + + FuncInfo->SwiftErrorVals.clear(); + FuncInfo->SwiftErrorMap.clear(); + FuncInfo->SwiftErrorWorklist.clear(); + + // Check if function has a swifterror argument. + for (Function::const_arg_iterator AI = Fn.arg_begin(), AE = Fn.arg_end(); + AI != AE; ++AI) + if (AI->hasSwiftErrorAttr()) + FuncInfo->SwiftErrorVals.push_back(&*AI); + + for (const auto &LLVMBB : Fn) + for (const auto &Inst : LLVMBB) { + if (const AllocaInst *Alloca = dyn_cast(&Inst)) + if (Alloca->isSwiftError()) + FuncInfo->SwiftErrorVals.push_back(Alloca); + } +} + +/// For each basic block, merge incoming swifterror values or simply propagate +/// them. The merged results will be saved in SwiftErrorMap. For predecessors +/// that are not yet visited, we create virtual registers to hold the swifterror +/// values and save them in SwiftErrorWorklist. +static void mergeIncomingSwiftErrors(FunctionLoweringInfo *FuncInfo, + const TargetLowering *TLI, + const TargetInstrInfo *TII, + const BasicBlock *LLVMBB, + SelectionDAGBuilder *SDB) { + if (!TLI->supportSwiftError()) + return; + + // We should only do this when we have swifterror parameter or swifterror + // alloc. + if (FuncInfo->SwiftErrorVals.empty()) + return; + + // At beginning of a basic block, insert PHI nodes or get the virtual + // register from the only predecessor, and update SwiftErrorMap; if one + // of the predecessors is not visited, update SwiftErrorWorklist. + // At end of a basic block, if a block is in SwiftErrorWorklist, insert copy + // to sync up the virtual register assignment. + + // Always create a virtual register for each swifterror value in entry block. + auto &DL = SDB->DAG.getDataLayout(); + const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL)); + if (pred_begin(LLVMBB) == pred_end(LLVMBB)) { + for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { + unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); + // Assign Undef to Vreg. We construct MI directly to make sure it works + // with FastISel. + BuildMI(*FuncInfo->MBB, FuncInfo->InsertPt, SDB->getCurDebugLoc(), + TII->get(TargetOpcode::IMPLICIT_DEF), VReg); + FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg); + } + return; + } + + if (auto *UniquePred = LLVMBB->getUniquePredecessor()) { + auto *UniquePredMBB = FuncInfo->MBBMap[UniquePred]; + if (!FuncInfo->SwiftErrorMap.count(UniquePredMBB)) { + // Update SwiftErrorWorklist with a new virtual register. + for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { + unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); + FuncInfo->SwiftErrorWorklist[UniquePredMBB].push_back(VReg); + // Propagate the information from the single predecessor. + FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg); + } + return; + } + // Propagate the information from the single predecessor. + FuncInfo->SwiftErrorMap[FuncInfo->MBB] = + FuncInfo->SwiftErrorMap[UniquePredMBB]; + return; + } + + // For the case of multiple predecessors, update SwiftErrorWorklist. + // Handle the case where we have two or more predecessors being the same. + for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB); + PI != PE; ++PI) { + auto *PredMBB = FuncInfo->MBBMap[*PI]; + if (!FuncInfo->SwiftErrorMap.count(PredMBB) && + !FuncInfo->SwiftErrorWorklist.count(PredMBB)) { + for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { + unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); + // When we actually visit the basic block PredMBB, we will materialize + // the virtual register assignment in copySwiftErrorsToFinalVRegs. + FuncInfo->SwiftErrorWorklist[PredMBB].push_back(VReg); + } + } + } + + // For the case of multiple predecessors, create a virtual register for + // each swifterror value and generate Phi node. + for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) { + unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC); + FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg); + + MachineInstrBuilder SwiftErrorPHI = BuildMI(*FuncInfo->MBB, + FuncInfo->MBB->begin(), SDB->getCurDebugLoc(), + TII->get(TargetOpcode::PHI), VReg); + for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB); + PI != PE; ++PI) { + auto *PredMBB = FuncInfo->MBBMap[*PI]; + unsigned SwiftErrorReg = FuncInfo->SwiftErrorMap.count(PredMBB) ? + FuncInfo->SwiftErrorMap[PredMBB][I] : + FuncInfo->SwiftErrorWorklist[PredMBB][I]; + SwiftErrorPHI.addReg(SwiftErrorReg) + .addMBB(PredMBB); + } + } +} + void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) { // Initialize the Fast-ISel state, if needed. FastISel *FastIS = nullptr; if (TM.Options.EnableFastISel) FastIS = TLI->createFastISel(*FuncInfo, LibInfo); + setupSwiftErrorVals(Fn, TLI, FuncInfo); + // Iterate over all basic blocks in the function. ReversePostOrderTraversal RPOT(&Fn); for (ReversePostOrderTraversal::rpo_iterator @@ -1203,6 +1323,7 @@ if (!FuncInfo->MBB) continue; // Some blocks like catchpads have no code or MBB. FuncInfo->InsertPt = FuncInfo->MBB->getFirstNonPHI(); + mergeIncomingSwiftErrors(FuncInfo, TLI, TII, LLVMBB, SDB); // Setup an EH landing-pad block. FuncInfo->ExceptionPointerVirtReg = 0;