200 changes: 197 additions & 3 deletions llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -929,10 +929,48 @@ SDValue SelectionDAGBuilder::getControlRoot() {
return Root;
}

/// Copy swift error to the final virtual register at end of a basic block, as
/// specified by SwiftErrorWorklist, if necessary.
static void copySwiftErrorsToFinalVRegs(SelectionDAGBuilder &SDB) {
const TargetLowering &TLI = SDB.DAG.getTargetLoweringInfo();
if (!TLI.supportSwiftError())
return;

if (!SDB.FuncInfo.SwiftErrorWorklist.count(SDB.FuncInfo.MBB))
return;

// Go through entries in SwiftErrorWorklist, and create copy as necessary.
FunctionLoweringInfo::SwiftErrorVRegs &WorklistEntry =
SDB.FuncInfo.SwiftErrorWorklist[SDB.FuncInfo.MBB];
FunctionLoweringInfo::SwiftErrorVRegs &MapEntry =
SDB.FuncInfo.SwiftErrorMap[SDB.FuncInfo.MBB];
for (unsigned I = 0, E = WorklistEntry.size(); I < E; I++) {
unsigned WorkReg = WorklistEntry[I];

// Find the swifterror virtual register for the value in SwiftErrorMap.
unsigned MapReg = MapEntry[I];
assert(TargetRegisterInfo::isVirtualRegister(MapReg) &&
"Entries in SwiftErrorMap should be virtual registers");

if (WorkReg == MapReg)
continue;

// Create copy from SwiftErrorMap to SwiftWorklist.
auto &DL = SDB.DAG.getDataLayout();
SDValue CopyNode = SDB.DAG.getCopyToReg(
SDB.getRoot(), SDB.getCurSDLoc(), WorkReg,
SDB.DAG.getRegister(MapReg, EVT(TLI.getPointerTy(DL))));
MapEntry[I] = WorkReg;
SDB.DAG.setRoot(CopyNode);
}
}

void SelectionDAGBuilder::visit(const Instruction &I) {
// Set up outgoing PHI node register values before emitting the terminator.
if (isa<TerminatorInst>(&I))
if (isa<TerminatorInst>(&I)) {
copySwiftErrorsToFinalVRegs(*this);
HandlePHINodesInSuccessorBlocks(I.getParent());
}

++SDNodeOrder;

Expand Down Expand Up @@ -1434,6 +1472,23 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
}
}

// Push in swifterror virtual register as the last element of Outs. This makes
// sure swifterror virtual register will be returned in the swifterror
// physical register.
const Function *F = I.getParent()->getParent();
if (TLI.supportSwiftError() &&
F->getAttributes().hasAttrSomewhere(Attribute::SwiftError)) {
ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy();
Flags.setSwiftError();
Outs.push_back(ISD::OutputArg(Flags, EVT(TLI.getPointerTy(DL)) /*vt*/,
EVT(TLI.getPointerTy(DL)) /*argvt*/,
true /*isfixed*/, 1 /*origidx*/,
0 /*partOffs*/));
// Create SDNode for the swifterror virtual register.
OutVals.push_back(DAG.getRegister(FuncInfo.SwiftErrorMap[FuncInfo.MBB][0],
EVT(TLI.getPointerTy(DL))));
}

bool isVarArg = DAG.getMachineFunction().getFunction()->isVarArg();
CallingConv::ID CallConv =
DAG.getMachineFunction().getFunction()->getCallingConv();
Expand Down Expand Up @@ -3308,7 +3363,22 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
if (I.isAtomic())
return visitAtomicLoad(I);

const TargetLowering &TLI = DAG.getTargetLoweringInfo();
const Value *SV = I.getOperand(0);
if (TLI.supportSwiftError()) {
// Swifterror values can come from either a function parameter with
// swifterror attribute or an alloca with swifterror attribute.
if (const Argument *Arg = dyn_cast<Argument>(SV)) {
if (Arg->hasSwiftErrorAttr())
return visitLoadFromSwiftError(I);
}

if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(SV)) {
if (Alloca->isSwiftError())
return visitLoadFromSwiftError(I);
}
}

SDValue Ptr = getValue(SV);

Type *Ty = I.getType();
Expand All @@ -3332,7 +3402,6 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
I.getAAMetadata(AAInfo);
const MDNode *Ranges = I.getMetadata(LLVMContext::MD_range);

const TargetLowering &TLI = DAG.getTargetLoweringInfo();
SmallVector<EVT, 4> ValueVTs;
SmallVector<uint64_t, 4> Offsets;
ComputeValueVTs(TLI, DAG.getDataLayout(), Ty, ValueVTs, &Offsets);
Expand Down Expand Up @@ -3409,13 +3478,86 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
DAG.getVTList(ValueVTs), Values));
}

void SelectionDAGBuilder::visitStoreToSwiftError(const StoreInst &I) {
const TargetLowering &TLI = DAG.getTargetLoweringInfo();
assert(TLI.supportSwiftError() &&
"call visitStoreToSwiftError when backend supports swifterror");

SmallVector<EVT, 4> ValueVTs;
SmallVector<uint64_t, 4> Offsets;
const Value *SrcV = I.getOperand(0);
ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(),
SrcV->getType(), ValueVTs, &Offsets);
assert(ValueVTs.size() == 1 && Offsets[0] == 0 &&
"expect a single EVT for swifterror");

SDValue Src = getValue(SrcV);
// Create a virtual register, then update the virtual register.
auto &DL = DAG.getDataLayout();
const TargetRegisterClass *RC = TLI.getRegClassFor(TLI.getPointerTy(DL));
unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC);
// Chain, DL, Reg, N or Chain, DL, Reg, N, Glue
// Chain can be getRoot or getControlRoot.
SDValue CopyNode = DAG.getCopyToReg(getRoot(), getCurSDLoc(), VReg,
SDValue(Src.getNode(), Src.getResNo()));
DAG.setRoot(CopyNode);
FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg);
}

void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
assert(DAG.getTargetLoweringInfo().supportSwiftError() &&
"call visitLoadFromSwiftError when backend supports swifterror");

assert(!I.isVolatile() &&
I.getMetadata(LLVMContext::MD_nontemporal) == nullptr &&
I.getMetadata(LLVMContext::MD_invariant_load) == nullptr &&
"Support volatile, non temporal, invariant for load_from_swift_error");

const Value *SV = I.getOperand(0);
Type *Ty = I.getType();
AAMDNodes AAInfo;
I.getAAMetadata(AAInfo);
assert(!AA->pointsToConstantMemory(MemoryLocation(
SV, DAG.getDataLayout().getTypeStoreSize(Ty), AAInfo)) &&
"load_from_swift_error should not be constant memory");

SmallVector<EVT, 4> ValueVTs;
SmallVector<uint64_t, 4> Offsets;
ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(), Ty,
ValueVTs, &Offsets);
assert(ValueVTs.size() == 1 && Offsets[0] == 0 &&
"expect a single EVT for swifterror");

// Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT
SDValue L = DAG.getCopyFromReg(getRoot(), getCurSDLoc(),
FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, SV),
ValueVTs[0]);

setValue(&I, L);
}

void SelectionDAGBuilder::visitStore(const StoreInst &I) {
if (I.isAtomic())
return visitAtomicStore(I);

const Value *SrcV = I.getOperand(0);
const Value *PtrV = I.getOperand(1);

const TargetLowering &TLI = DAG.getTargetLoweringInfo();
if (TLI.supportSwiftError()) {
// Swifterror values can come from either a function parameter with
// swifterror attribute or an alloca with swifterror attribute.
if (const Argument *Arg = dyn_cast<Argument>(PtrV)) {
if (Arg->hasSwiftErrorAttr())
return visitStoreToSwiftError(I);
}

if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(PtrV)) {
if (Alloca->isSwiftError())
return visitStoreToSwiftError(I);
}
}

SmallVector<EVT, 4> ValueVTs;
SmallVector<uint64_t, 4> Offsets;
ComputeValueVTs(DAG.getTargetLoweringInfo(), DAG.getDataLayout(),
Expand Down Expand Up @@ -5552,13 +5694,16 @@ SelectionDAGBuilder::lowerInvokable(TargetLowering::CallLoweringInfo &CLI,
void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
bool isTailCall,
const BasicBlock *EHPadBB) {
auto &DL = DAG.getDataLayout();
FunctionType *FTy = CS.getFunctionType();
Type *RetTy = CS.getType();

TargetLowering::ArgListTy Args;
TargetLowering::ArgListEntry Entry;
Args.reserve(CS.arg_size());

const Value *SwiftErrorVal = nullptr;
const TargetLowering &TLI = DAG.getTargetLoweringInfo();
for (ImmutableCallSite::arg_iterator i = CS.arg_begin(), e = CS.arg_end();
i != e; ++i) {
const Value *V = *i;
Expand All @@ -5572,6 +5717,17 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,

// Skip the first return-type Attribute to get to params.
Entry.setAttributes(&CS, i - CS.arg_begin() + 1);

// Use swifterror virtual register as input to the call.
if (Entry.isSwiftError && TLI.supportSwiftError()) {
SwiftErrorVal = V;
// We find the virtual register for the actual swifterror argument.
// Instead of using the Value, we use the virtual register instead.
Entry.Node = DAG.getRegister(
FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, V),
EVT(TLI.getPointerTy(DL)));
}

Args.push_back(Entry);

// If we have an explicit sret argument that is an Instruction, (i.e., it
Expand All @@ -5598,6 +5754,20 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
Result.first = lowerRangeToAssertZExt(DAG, *Inst, Result.first);
setValue(Inst, Result.first);
}

// The last element of CLI.InVals has the SDValue for swifterror return.
// Here we copy it to a virtual register and update SwiftErrorMap for
// book-keeping.
if (SwiftErrorVal && TLI.supportSwiftError()) {
// Get the last element of InVals.
SDValue Src = CLI.InVals.back();
const TargetRegisterClass *RC = TLI.getRegClassFor(TLI.getPointerTy(DL));
unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC);
SDValue CopyNode = CLI.DAG.getCopyToReg(Result.second, CLI.DL, VReg, Src);
// We update the virtual register for the actual swifterror argument.
FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, SwiftErrorVal, VReg);
DAG.setRoot(CopyNode);
}
}

/// IsOnlyUsedInZeroEqualityComparison - Return true if it only matters that the
Expand Down Expand Up @@ -7311,10 +7481,23 @@ TargetLowering::LowerCallTo(TargetLowering::CallLoweringInfo &CLI) const {
}
}

// We push in swifterror return as the last element of CLI.Ins.
ArgListTy &Args = CLI.getArgs();
if (supportSwiftError()) {
for (unsigned i = 0, e = Args.size(); i != e; ++i) {
if (Args[i].isSwiftError) {
ISD::InputArg MyFlags;
MyFlags.VT = getPointerTy(DL);
MyFlags.ArgVT = EVT(getPointerTy(DL));
MyFlags.Flags.setSwiftError();
CLI.Ins.push_back(MyFlags);
}
}
}

// Handle all of the outgoing arguments.
CLI.Outs.clear();
CLI.OutVals.clear();
ArgListTy &Args = CLI.getArgs();
for (unsigned i = 0, e = Args.size(); i != e; ++i) {
SmallVector<EVT, 4> ValueVTs;
ComputeValueVTs(*this, DL, Args[i].Ty, ValueVTs);
Expand Down Expand Up @@ -7432,6 +7615,9 @@ TargetLowering::LowerCallTo(TargetLowering::CallLoweringInfo &CLI) const {
SmallVector<SDValue, 4> InVals;
CLI.Chain = LowerCall(CLI, InVals);

// Update CLI.InVals to use outside of this function.
CLI.InVals = InVals;

// Verify that the target's LowerCall behaved as expected.
assert(CLI.Chain.getNode() && CLI.Chain.getValueType() == MVT::Other &&
"LowerCall didn't return a valid chain!");
Expand Down Expand Up @@ -7793,6 +7979,14 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
FuncInfo->setArgumentFrameIndex(&*I, FI->getIndex());
}

// Update SwiftErrorMap.
if (Res.getOpcode() == ISD::CopyFromReg && TLI->supportSwiftError() &&
F.getAttributes().hasAttribute(Idx, Attribute::SwiftError)) {
unsigned Reg = cast<RegisterSDNode>(Res.getOperand(1))->getReg();
if (TargetRegisterInfo::isVirtualRegister(Reg))
FuncInfo->SwiftErrorMap[FuncInfo->MBB][0] = Reg;
}

// If this argument is live outside of the entry block, insert a copy from
// wherever we got it to the vreg that other BB's will reference it as.
if (!TM.Options.EnableFastISel && Res.getOpcode() == ISD::CopyFromReg) {
Expand Down
2 changes: 2 additions & 0 deletions llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h
Original file line number Diff line number Diff line change
Expand Up @@ -895,6 +895,8 @@ class SelectionDAGBuilder {
bool visitBinaryFloatCall(const CallInst &I, unsigned Opcode);
void visitAtomicLoad(const LoadInst &I);
void visitAtomicStore(const StoreInst &I);
void visitLoadFromSwiftError(const LoadInst &I);
void visitStoreToSwiftError(const StoreInst &I);

void visitInlineAsm(ImmutableCallSite CS);
const char *visitIntrinsicCall(const CallInst &I, unsigned Intrinsic);
Expand Down
121 changes: 121 additions & 0 deletions llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1159,12 +1159,132 @@ static void collectFailStats(const Instruction *I) {
}
#endif // NDEBUG

/// Set up SwiftErrorVals by going through the function. If the function has
/// swifterror argument, it will be the first entry.
static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI,
FunctionLoweringInfo *FuncInfo) {
if (!TLI->supportSwiftError())
return;

FuncInfo->SwiftErrorVals.clear();
FuncInfo->SwiftErrorMap.clear();
FuncInfo->SwiftErrorWorklist.clear();

// Check if function has a swifterror argument.
for (Function::const_arg_iterator AI = Fn.arg_begin(), AE = Fn.arg_end();
AI != AE; ++AI)
if (AI->hasSwiftErrorAttr())
FuncInfo->SwiftErrorVals.push_back(&*AI);

for (const auto &LLVMBB : Fn)
for (const auto &Inst : LLVMBB) {
if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
if (Alloca->isSwiftError())
FuncInfo->SwiftErrorVals.push_back(Alloca);
}
}

/// For each basic block, merge incoming swifterror values or simply propagate
/// them. The merged results will be saved in SwiftErrorMap. For predecessors
/// that are not yet visited, we create virtual registers to hold the swifterror
/// values and save them in SwiftErrorWorklist.
static void mergeIncomingSwiftErrors(FunctionLoweringInfo *FuncInfo,
const TargetLowering *TLI,
const TargetInstrInfo *TII,
const BasicBlock *LLVMBB,
SelectionDAGBuilder *SDB) {
if (!TLI->supportSwiftError())
return;

// We should only do this when we have swifterror parameter or swifterror
// alloc.
if (FuncInfo->SwiftErrorVals.empty())
return;

// At beginning of a basic block, insert PHI nodes or get the virtual
// register from the only predecessor, and update SwiftErrorMap; if one
// of the predecessors is not visited, update SwiftErrorWorklist.
// At end of a basic block, if a block is in SwiftErrorWorklist, insert copy
// to sync up the virtual register assignment.

// Always create a virtual register for each swifterror value in entry block.
auto &DL = SDB->DAG.getDataLayout();
const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
if (pred_begin(LLVMBB) == pred_end(LLVMBB)) {
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
// Assign Undef to Vreg. We construct MI directly to make sure it works
// with FastISel.
BuildMI(*FuncInfo->MBB, FuncInfo->InsertPt, SDB->getCurDebugLoc(),
TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
}
return;
}

if (auto *UniquePred = LLVMBB->getUniquePredecessor()) {
auto *UniquePredMBB = FuncInfo->MBBMap[UniquePred];
if (!FuncInfo->SwiftErrorMap.count(UniquePredMBB)) {
// Update SwiftErrorWorklist with a new virtual register.
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
FuncInfo->SwiftErrorWorklist[UniquePredMBB].push_back(VReg);
// Propagate the information from the single predecessor.
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
}
return;
}
// Propagate the information from the single predecessor.
FuncInfo->SwiftErrorMap[FuncInfo->MBB] =
FuncInfo->SwiftErrorMap[UniquePredMBB];
return;
}

// For the case of multiple predecessors, update SwiftErrorWorklist.
// Handle the case where we have two or more predecessors being the same.
for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB);
PI != PE; ++PI) {
auto *PredMBB = FuncInfo->MBBMap[*PI];
if (!FuncInfo->SwiftErrorMap.count(PredMBB) &&
!FuncInfo->SwiftErrorWorklist.count(PredMBB)) {
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
// When we actually visit the basic block PredMBB, we will materialize
// the virtual register assignment in copySwiftErrorsToFinalVRegs.
FuncInfo->SwiftErrorWorklist[PredMBB].push_back(VReg);
}
}
}

// For the case of multiple predecessors, create a virtual register for
// each swifterror value and generate Phi node.
for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);

MachineInstrBuilder SwiftErrorPHI = BuildMI(*FuncInfo->MBB,
FuncInfo->MBB->begin(), SDB->getCurDebugLoc(),
TII->get(TargetOpcode::PHI), VReg);
for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB);
PI != PE; ++PI) {
auto *PredMBB = FuncInfo->MBBMap[*PI];
unsigned SwiftErrorReg = FuncInfo->SwiftErrorMap.count(PredMBB) ?
FuncInfo->SwiftErrorMap[PredMBB][I] :
FuncInfo->SwiftErrorWorklist[PredMBB][I];
SwiftErrorPHI.addReg(SwiftErrorReg)
.addMBB(PredMBB);
}
}
}

void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
// Initialize the Fast-ISel state, if needed.
FastISel *FastIS = nullptr;
if (TM.Options.EnableFastISel)
FastIS = TLI->createFastISel(*FuncInfo, LibInfo);

setupSwiftErrorVals(Fn, TLI, FuncInfo);

// Iterate over all basic blocks in the function.
ReversePostOrderTraversal<const Function*> RPOT(&Fn);
for (ReversePostOrderTraversal<const Function*>::rpo_iterator
Expand Down Expand Up @@ -1203,6 +1323,7 @@ void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
if (!FuncInfo->MBB)
continue; // Some blocks like catchpads have no code or MBB.
FuncInfo->InsertPt = FuncInfo->MBB->getFirstNonPHI();
mergeIncomingSwiftErrors(FuncInfo, TLI, TII, LLVMBB, SDB);

// Setup an EH landing-pad block.
FuncInfo->ExceptionPointerVirtReg = 0;
Expand Down