Expand Up
@@ -929,10 +929,48 @@ SDValue SelectionDAGBuilder::getControlRoot() {
return Root;
}
// / Copy swift error to the final virtual register at end of a basic block, as
// / specified by SwiftErrorWorklist, if necessary.
static void copySwiftErrorsToFinalVRegs (SelectionDAGBuilder &SDB) {
const TargetLowering &TLI = SDB.DAG .getTargetLoweringInfo ();
if (!TLI.supportSwiftError ())
return ;
if (!SDB.FuncInfo .SwiftErrorWorklist .count (SDB.FuncInfo .MBB ))
return ;
// Go through entries in SwiftErrorWorklist, and create copy as necessary.
FunctionLoweringInfo::SwiftErrorVRegs &WorklistEntry =
SDB.FuncInfo .SwiftErrorWorklist [SDB.FuncInfo .MBB ];
FunctionLoweringInfo::SwiftErrorVRegs &MapEntry =
SDB.FuncInfo .SwiftErrorMap [SDB.FuncInfo .MBB ];
for (unsigned I = 0 , E = WorklistEntry.size (); I < E; I++) {
unsigned WorkReg = WorklistEntry[I];
// Find the swifterror virtual register for the value in SwiftErrorMap.
unsigned MapReg = MapEntry[I];
assert (TargetRegisterInfo::isVirtualRegister (MapReg) &&
" Entries in SwiftErrorMap should be virtual registers" );
if (WorkReg == MapReg)
continue ;
// Create copy from SwiftErrorMap to SwiftWorklist.
auto &DL = SDB.DAG .getDataLayout ();
SDValue CopyNode = SDB.DAG .getCopyToReg (
SDB.getRoot (), SDB.getCurSDLoc (), WorkReg,
SDB.DAG .getRegister (MapReg, EVT (TLI.getPointerTy (DL))));
MapEntry[I] = WorkReg;
SDB.DAG .setRoot (CopyNode);
}
}
void SelectionDAGBuilder::visit (const Instruction &I) {
// Set up outgoing PHI node register values before emitting the terminator.
if (isa<TerminatorInst>(&I))
if (isa<TerminatorInst>(&I)) {
copySwiftErrorsToFinalVRegs (*this );
HandlePHINodesInSuccessorBlocks (I.getParent ());
}
++SDNodeOrder;
Expand Down
Expand Up
@@ -1434,6 +1472,23 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
}
}
// Push in swifterror virtual register as the last element of Outs. This makes
// sure swifterror virtual register will be returned in the swifterror
// physical register.
const Function *F = I.getParent ()->getParent ();
if (TLI.supportSwiftError () &&
F->getAttributes ().hasAttrSomewhere (Attribute::SwiftError)) {
ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy ();
Flags.setSwiftError ();
Outs.push_back (ISD::OutputArg (Flags, EVT (TLI.getPointerTy (DL)) /* vt*/ ,
EVT (TLI.getPointerTy (DL)) /* argvt*/ ,
true /* isfixed*/ , 1 /* origidx*/ ,
0 /* partOffs*/ ));
// Create SDNode for the swifterror virtual register.
OutVals.push_back (DAG.getRegister (FuncInfo.SwiftErrorMap [FuncInfo.MBB ][0 ],
EVT (TLI.getPointerTy (DL))));
}
bool isVarArg = DAG.getMachineFunction ().getFunction ()->isVarArg ();
CallingConv::ID CallConv =
DAG.getMachineFunction ().getFunction ()->getCallingConv ();
Expand Down
Expand Up
@@ -3308,7 +3363,22 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
if (I.isAtomic ())
return visitAtomicLoad (I);
const TargetLowering &TLI = DAG.getTargetLoweringInfo ();
const Value *SV = I.getOperand (0 );
if (TLI.supportSwiftError ()) {
// Swifterror values can come from either a function parameter with
// swifterror attribute or an alloca with swifterror attribute.
if (const Argument *Arg = dyn_cast<Argument>(SV)) {
if (Arg->hasSwiftErrorAttr ())
return visitLoadFromSwiftError (I);
}
if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(SV)) {
if (Alloca->isSwiftError ())
return visitLoadFromSwiftError (I);
}
}
SDValue Ptr = getValue (SV);
Type *Ty = I.getType ();
Expand All
@@ -3332,7 +3402,6 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
I.getAAMetadata (AAInfo);
const MDNode *Ranges = I.getMetadata (LLVMContext::MD_range);
const TargetLowering &TLI = DAG.getTargetLoweringInfo ();
SmallVector<EVT, 4 > ValueVTs;
SmallVector<uint64_t , 4 > Offsets;
ComputeValueVTs (TLI, DAG.getDataLayout (), Ty, ValueVTs, &Offsets);
Expand Down
Expand Up
@@ -3409,13 +3478,86 @@ void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
DAG.getVTList (ValueVTs), Values));
}
void SelectionDAGBuilder::visitStoreToSwiftError (const StoreInst &I) {
const TargetLowering &TLI = DAG.getTargetLoweringInfo ();
assert (TLI.supportSwiftError () &&
" call visitStoreToSwiftError when backend supports swifterror" );
SmallVector<EVT, 4 > ValueVTs;
SmallVector<uint64_t , 4 > Offsets;
const Value *SrcV = I.getOperand (0 );
ComputeValueVTs (DAG.getTargetLoweringInfo (), DAG.getDataLayout (),
SrcV->getType (), ValueVTs, &Offsets);
assert (ValueVTs.size () == 1 && Offsets[0 ] == 0 &&
" expect a single EVT for swifterror" );
SDValue Src = getValue (SrcV);
// Create a virtual register, then update the virtual register.
auto &DL = DAG.getDataLayout ();
const TargetRegisterClass *RC = TLI.getRegClassFor (TLI.getPointerTy (DL));
unsigned VReg = FuncInfo.MF ->getRegInfo ().createVirtualRegister (RC);
// Chain, DL, Reg, N or Chain, DL, Reg, N, Glue
// Chain can be getRoot or getControlRoot.
SDValue CopyNode = DAG.getCopyToReg (getRoot (), getCurSDLoc (), VReg,
SDValue (Src.getNode (), Src.getResNo ()));
DAG.setRoot (CopyNode);
FuncInfo.setSwiftErrorVReg (FuncInfo.MBB , I.getOperand (1 ), VReg);
}
void SelectionDAGBuilder::visitLoadFromSwiftError (const LoadInst &I) {
assert (DAG.getTargetLoweringInfo ().supportSwiftError () &&
" call visitLoadFromSwiftError when backend supports swifterror" );
assert (!I.isVolatile () &&
I.getMetadata (LLVMContext::MD_nontemporal) == nullptr &&
I.getMetadata (LLVMContext::MD_invariant_load) == nullptr &&
" Support volatile, non temporal, invariant for load_from_swift_error" );
const Value *SV = I.getOperand (0 );
Type *Ty = I.getType ();
AAMDNodes AAInfo;
I.getAAMetadata (AAInfo);
assert (!AA->pointsToConstantMemory (MemoryLocation (
SV, DAG.getDataLayout ().getTypeStoreSize (Ty), AAInfo)) &&
" load_from_swift_error should not be constant memory" );
SmallVector<EVT, 4 > ValueVTs;
SmallVector<uint64_t , 4 > Offsets;
ComputeValueVTs (DAG.getTargetLoweringInfo (), DAG.getDataLayout (), Ty,
ValueVTs, &Offsets);
assert (ValueVTs.size () == 1 && Offsets[0 ] == 0 &&
" expect a single EVT for swifterror" );
// Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT
SDValue L = DAG.getCopyFromReg (getRoot (), getCurSDLoc (),
FuncInfo.findSwiftErrorVReg (FuncInfo.MBB , SV),
ValueVTs[0 ]);
setValue (&I, L);
}
void SelectionDAGBuilder::visitStore (const StoreInst &I) {
if (I.isAtomic ())
return visitAtomicStore (I);
const Value *SrcV = I.getOperand (0 );
const Value *PtrV = I.getOperand (1 );
const TargetLowering &TLI = DAG.getTargetLoweringInfo ();
if (TLI.supportSwiftError ()) {
// Swifterror values can come from either a function parameter with
// swifterror attribute or an alloca with swifterror attribute.
if (const Argument *Arg = dyn_cast<Argument>(PtrV)) {
if (Arg->hasSwiftErrorAttr ())
return visitStoreToSwiftError (I);
}
if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(PtrV)) {
if (Alloca->isSwiftError ())
return visitStoreToSwiftError (I);
}
}
SmallVector<EVT, 4 > ValueVTs;
SmallVector<uint64_t , 4 > Offsets;
ComputeValueVTs (DAG.getTargetLoweringInfo (), DAG.getDataLayout (),
Expand Down
Expand Up
@@ -5552,13 +5694,16 @@ SelectionDAGBuilder::lowerInvokable(TargetLowering::CallLoweringInfo &CLI,
void SelectionDAGBuilder::LowerCallTo (ImmutableCallSite CS, SDValue Callee,
bool isTailCall,
const BasicBlock *EHPadBB) {
auto &DL = DAG.getDataLayout ();
FunctionType *FTy = CS.getFunctionType ();
Type *RetTy = CS.getType ();
TargetLowering::ArgListTy Args;
TargetLowering::ArgListEntry Entry;
Args.reserve (CS.arg_size ());
const Value *SwiftErrorVal = nullptr ;
const TargetLowering &TLI = DAG.getTargetLoweringInfo ();
for (ImmutableCallSite::arg_iterator i = CS.arg_begin (), e = CS.arg_end ();
i != e; ++i) {
const Value *V = *i;
Expand All
@@ -5572,6 +5717,17 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
// Skip the first return-type Attribute to get to params.
Entry.setAttributes (&CS, i - CS.arg_begin () + 1 );
// Use swifterror virtual register as input to the call.
if (Entry.isSwiftError && TLI.supportSwiftError ()) {
SwiftErrorVal = V;
// We find the virtual register for the actual swifterror argument.
// Instead of using the Value, we use the virtual register instead.
Entry.Node = DAG.getRegister (
FuncInfo.findSwiftErrorVReg (FuncInfo.MBB , V),
EVT (TLI.getPointerTy (DL)));
}
Args.push_back (Entry);
// If we have an explicit sret argument that is an Instruction, (i.e., it
Expand All
@@ -5598,6 +5754,20 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
Result.first = lowerRangeToAssertZExt (DAG, *Inst, Result.first );
setValue (Inst, Result.first );
}
// The last element of CLI.InVals has the SDValue for swifterror return.
// Here we copy it to a virtual register and update SwiftErrorMap for
// book-keeping.
if (SwiftErrorVal && TLI.supportSwiftError ()) {
// Get the last element of InVals.
SDValue Src = CLI.InVals .back ();
const TargetRegisterClass *RC = TLI.getRegClassFor (TLI.getPointerTy (DL));
unsigned VReg = FuncInfo.MF ->getRegInfo ().createVirtualRegister (RC);
SDValue CopyNode = CLI.DAG .getCopyToReg (Result.second , CLI.DL , VReg, Src);
// We update the virtual register for the actual swifterror argument.
FuncInfo.setSwiftErrorVReg (FuncInfo.MBB , SwiftErrorVal, VReg);
DAG.setRoot (CopyNode);
}
}
// / IsOnlyUsedInZeroEqualityComparison - Return true if it only matters that the
Expand Down
Expand Up
@@ -7311,10 +7481,23 @@ TargetLowering::LowerCallTo(TargetLowering::CallLoweringInfo &CLI) const {
}
}
// We push in swifterror return as the last element of CLI.Ins.
ArgListTy &Args = CLI.getArgs ();
if (supportSwiftError ()) {
for (unsigned i = 0 , e = Args.size (); i != e; ++i) {
if (Args[i].isSwiftError ) {
ISD::InputArg MyFlags;
MyFlags.VT = getPointerTy (DL);
MyFlags.ArgVT = EVT (getPointerTy (DL));
MyFlags.Flags .setSwiftError ();
CLI.Ins .push_back (MyFlags);
}
}
}
// Handle all of the outgoing arguments.
CLI.Outs .clear ();
CLI.OutVals .clear ();
ArgListTy &Args = CLI.getArgs ();
for (unsigned i = 0 , e = Args.size (); i != e; ++i) {
SmallVector<EVT, 4 > ValueVTs;
ComputeValueVTs (*this , DL, Args[i].Ty , ValueVTs);
Expand Down
Expand Up
@@ -7432,6 +7615,9 @@ TargetLowering::LowerCallTo(TargetLowering::CallLoweringInfo &CLI) const {
SmallVector<SDValue, 4 > InVals;
CLI.Chain = LowerCall (CLI, InVals);
// Update CLI.InVals to use outside of this function.
CLI.InVals = InVals;
// Verify that the target's LowerCall behaved as expected.
assert (CLI.Chain .getNode () && CLI.Chain .getValueType () == MVT::Other &&
" LowerCall didn't return a valid chain!" );
Expand Down
Expand Up
@@ -7793,6 +7979,14 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
FuncInfo->setArgumentFrameIndex (&*I, FI->getIndex ());
}
// Update SwiftErrorMap.
if (Res.getOpcode () == ISD::CopyFromReg && TLI->supportSwiftError () &&
F.getAttributes ().hasAttribute (Idx, Attribute::SwiftError)) {
unsigned Reg = cast<RegisterSDNode>(Res.getOperand (1 ))->getReg ();
if (TargetRegisterInfo::isVirtualRegister (Reg))
FuncInfo->SwiftErrorMap [FuncInfo->MBB ][0 ] = Reg;
}
// If this argument is live outside of the entry block, insert a copy from
// wherever we got it to the vreg that other BB's will reference it as.
if (!TM.Options .EnableFastISel && Res.getOpcode () == ISD::CopyFromReg) {
Expand Down