diff --git a/src/coreclr/jit/codegen.h b/src/coreclr/jit/codegen.h
index c36a6776a8cd9..4d17e291d0c8a 100644
--- a/src/coreclr/jit/codegen.h
+++ b/src/coreclr/jit/codegen.h
@@ -1183,6 +1183,9 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
void genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode);
void genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode);
void genCodeForPhysReg(GenTreePhysReg* tree);
+#ifdef SWIFT_SUPPORT
+ void genCodeForSwiftErrorReg(GenTree* tree);
+#endif // SWIFT_SUPPORT
void genCodeForNullCheck(GenTreeIndir* tree);
void genCodeForCmpXchg(GenTreeCmpXchg* tree);
void genCodeForReuseVal(GenTree* treeNode);
diff --git a/src/coreclr/jit/codegenarmarch.cpp b/src/coreclr/jit/codegenarmarch.cpp
index 8eb8d71b617a5..1e517e77c2e9b 100644
--- a/src/coreclr/jit/codegenarmarch.cpp
+++ b/src/coreclr/jit/codegenarmarch.cpp
@@ -441,6 +441,12 @@ void CodeGen::genCodeForTreeNode(GenTree* treeNode)
break;
#endif // TARGET_ARM64
+#ifdef SWIFT_SUPPORT
+ case GT_SWIFT_ERROR:
+ genCodeForSwiftErrorReg(treeNode);
+ break;
+#endif // SWIFT_SUPPORT
+
case GT_RELOAD:
// do nothing - reload is just a marker.
// The parent node will call genConsumeReg on this which will trigger the unspill of this node's child
@@ -3369,6 +3375,17 @@ void CodeGen::genCall(GenTreeCall* call)
genDefineTempLabel(genCreateTempLabel());
}
+#ifdef SWIFT_SUPPORT
+ // Clear the Swift error register before calling a Swift method,
+ // so we can check if it set the error register after returning.
+ // (Flag is only set if we know we need to check the error register)
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ instGen_Set_Reg_To_Zero(EA_PTRSIZE, REG_SWIFT_ERROR);
+ }
+#endif // SWIFT_SUPPORT
+
genCallInstruction(call);
genDefinePendingCallLabel(call);
diff --git a/src/coreclr/jit/codegencommon.cpp b/src/coreclr/jit/codegencommon.cpp
index d965d5c91f42e..e5239b5b057e3 100644
--- a/src/coreclr/jit/codegencommon.cpp
+++ b/src/coreclr/jit/codegencommon.cpp
@@ -8567,3 +8567,31 @@ void CodeGen::genCodeForReuseVal(GenTree* treeNode)
genDefineTempLabel(genCreateTempLabel());
}
}
+
+#ifdef SWIFT_SUPPORT
+//---------------------------------------------------------------------
+// genCodeForSwiftErrorReg - generate code for a GT_SWIFT_ERROR node
+//
+// Arguments
+// tree - the GT_SWIFT_ERROR node
+//
+// Return value:
+// None
+//
+void CodeGen::genCodeForSwiftErrorReg(GenTree* tree)
+{
+ assert(tree->OperIs(GT_SWIFT_ERROR));
+
+ var_types targetType = tree->TypeGet();
+ regNumber targetReg = tree->GetRegNum();
+
+ // LSRA should have picked REG_SWIFT_ERROR as the destination register, too
+ // (see LinearScan::BuildNode for an explanation of why we want this)
+ assert(targetReg == REG_SWIFT_ERROR);
+
+ inst_Mov(targetType, targetReg, REG_SWIFT_ERROR, /* canSkip */ true);
+ genTransferRegGCState(targetReg, REG_SWIFT_ERROR);
+
+ genProduceReg(tree);
+}
+#endif // SWIFT_SUPPORT
diff --git a/src/coreclr/jit/codegenxarch.cpp b/src/coreclr/jit/codegenxarch.cpp
index 884ba901e5d83..0cd02f2924309 100644
--- a/src/coreclr/jit/codegenxarch.cpp
+++ b/src/coreclr/jit/codegenxarch.cpp
@@ -2107,6 +2107,12 @@ void CodeGen::genCodeForTreeNode(GenTree* treeNode)
case GT_NOP:
break;
+#ifdef SWIFT_SUPPORT
+ case GT_SWIFT_ERROR:
+ genCodeForSwiftErrorReg(treeNode);
+ break;
+#endif // SWIFT_SUPPORT
+
case GT_KEEPALIVE:
genConsumeRegs(treeNode->AsOp()->gtOp1);
break;
@@ -6035,6 +6041,17 @@ void CodeGen::genCall(GenTreeCall* call)
instGen(INS_vzeroupper);
}
+#ifdef SWIFT_SUPPORT
+ // Clear the Swift error register before calling a Swift method,
+ // so we can check if it set the error register after returning.
+ // (Flag is only set if we know we need to check the error register)
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ instGen_Set_Reg_To_Zero(EA_PTRSIZE, REG_SWIFT_ERROR);
+ }
+#endif // SWIFT_SUPPORT
+
genCallInstruction(call X86_ARG(stackArgBytes));
genDefinePendingCallLabel(call);
diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h
index 854cda9b9ca90..d70e1f7c77b63 100644
--- a/src/coreclr/jit/compiler.h
+++ b/src/coreclr/jit/compiler.h
@@ -4368,7 +4368,11 @@ class Compiler
void impCheckForPInvokeCall(
GenTreeCall* call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block);
GenTreeCall* impImportIndirectCall(CORINFO_SIG_INFO* sig, const DebugInfo& di = DebugInfo());
- void impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* sig);
+ void impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* sig, /* OUT */ CallArg** swiftErrorArg, /* OUT */ CallArg** swiftSelfArg);
+
+#ifdef SWIFT_SUPPORT
+ void impAppendSwiftErrorStore(GenTreeCall* call, CallArg* const swiftErrorArg);
+#endif // SWIFT_SUPPORT
void impInsertHelperCall(CORINFO_HELPER_DESC* helperCall);
void impHandleAccessAllowed(CorInfoIsAccessAllowedResult result, CORINFO_HELPER_DESC* helperCall);
@@ -11325,6 +11329,7 @@ class GenTreeVisitor
case GT_PINVOKE_EPILOG:
case GT_IL_OFFSET:
case GT_NOP:
+ case GT_SWIFT_ERROR:
break;
// Lclvar unary operators
diff --git a/src/coreclr/jit/compiler.hpp b/src/coreclr/jit/compiler.hpp
index 67f5c59d93265..a8ef2c7086c9b 100644
--- a/src/coreclr/jit/compiler.hpp
+++ b/src/coreclr/jit/compiler.hpp
@@ -4246,6 +4246,7 @@ void GenTree::VisitOperands(TVisitor visitor)
case GT_PINVOKE_EPILOG:
case GT_IL_OFFSET:
case GT_NOP:
+ case GT_SWIFT_ERROR:
return;
// Unary operators with an optional operand
diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp
index 298fd1074a4f1..d19a857bca161 100644
--- a/src/coreclr/jit/gentree.cpp
+++ b/src/coreclr/jit/gentree.cpp
@@ -7066,6 +7066,9 @@ bool GenTree::OperRequiresCallFlag(Compiler* comp) const
case GT_KEEPALIVE:
return true;
+ case GT_SWIFT_ERROR:
+ return true;
+
case GT_INTRINSIC:
return comp->IsIntrinsicImplementedByUserCall(this->AsIntrinsic()->gtIntrinsicName);
@@ -7362,6 +7365,7 @@ bool GenTree::OperRequiresGlobRefFlag(Compiler* comp) const
case GT_CMPXCHG:
case GT_MEMORYBARRIER:
case GT_KEEPALIVE:
+ case GT_SWIFT_ERROR:
return true;
case GT_CALL:
@@ -7420,6 +7424,7 @@ bool GenTree::OperSupportsOrderingSideEffect() const
case GT_CMPXCHG:
case GT_MEMORYBARRIER:
case GT_CATCH_ARG:
+ case GT_SWIFT_ERROR:
return true;
default:
return false;
@@ -8778,7 +8783,7 @@ GenTreeStoreDynBlk* Compiler::gtNewStoreDynBlkNode(GenTree* addr,
//
// Arguments:
// type - Type of the store
-// addr - Destionation address
+// addr - Destination address
// data - Value to store
// indirFlags - Indirection flags
//
@@ -10324,6 +10329,7 @@ GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
case GT_PINVOKE_EPILOG:
case GT_IL_OFFSET:
case GT_NOP:
+ case GT_SWIFT_ERROR:
m_state = -1;
return;
@@ -12451,6 +12457,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
case GT_MEMORYBARRIER:
case GT_PINVOKE_PROLOG:
case GT_JMPTABLE:
+ case GT_SWIFT_ERROR:
break;
case GT_RET_EXPR:
diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h
index 26ab5b2c705a3..b648ba167dad7 100644
--- a/src/coreclr/jit/gentree.h
+++ b/src/coreclr/jit/gentree.h
@@ -4113,6 +4113,10 @@ enum GenTreeCallFlags : unsigned int
GTF_CALL_M_CAST_CAN_BE_EXPANDED = 0x04000000, // this cast (helper call) can be expanded if it's profitable. To be removed.
GTF_CALL_M_CAST_OBJ_NONNULL = 0x08000000, // if we expand this specific cast we don't need to check the input object for null
// NOTE: if needed, this flag can be removed, and we can introduce new _NONNUL cast helpers
+
+#ifdef SWIFT_SUPPORT
+ GTF_CALL_M_SWIFT_ERROR_HANDLING = 0x10000000, // call uses the Swift calling convention, and error register will be checked after it returns.
+#endif // SWIFT_SUPPORT
};
inline constexpr GenTreeCallFlags operator ~(GenTreeCallFlags a)
diff --git a/src/coreclr/jit/gtlist.h b/src/coreclr/jit/gtlist.h
index 00696e6398fcd..2d9208403b363 100644
--- a/src/coreclr/jit/gtlist.h
+++ b/src/coreclr/jit/gtlist.h
@@ -37,6 +37,7 @@ GTNODE(LABEL , GenTree ,0,0,GTK_LEAF) // Jump-
GTNODE(JMP , GenTreeVal ,0,0,GTK_LEAF|GTK_NOVALUE) // Jump to another function
GTNODE(FTN_ADDR , GenTreeFptrVal ,0,0,GTK_LEAF) // Address of a function
GTNODE(RET_EXPR , GenTreeRetExpr ,0,0,GTK_LEAF|DBK_NOTLIR) // Place holder for the return expression from an inline candidate
+GTNODE(SWIFT_ERROR , GenTree ,0,0,GTK_LEAF) // Error register value post-Swift call
//-----------------------------------------------------------------------------
// Constant nodes:
diff --git a/src/coreclr/jit/importercalls.cpp b/src/coreclr/jit/importercalls.cpp
index b557c629e3558..b8370c0f867a6 100644
--- a/src/coreclr/jit/importercalls.cpp
+++ b/src/coreclr/jit/importercalls.cpp
@@ -98,6 +98,11 @@ var_types Compiler::impImportCall(OPCODE opcode,
CORINFO_SIG_INFO calliSig;
NewCallArg extraArg;
+ // Swift calls may use special register types that require additional IR to handle,
+ // so if we're importing a Swift call, look for these types in the signature
+ CallArg* swiftErrorArg = nullptr;
+ CallArg* swiftSelfArg = nullptr;
+
/*-------------------------------------------------------------------------
* First create the call node
*/
@@ -651,6 +656,8 @@ var_types Compiler::impImportCall(OPCODE opcode,
if (call->gtFlags & GTF_CALL_UNMANAGED)
{
+ assert(call->IsCall());
+
// We set up the unmanaged call by linking the frame, disabling GC, etc
// This needs to be cleaned up on return.
// In addition, native calls have different normalization rules than managed code
@@ -663,7 +670,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
checkForSmallType = true;
- impPopArgsForUnmanagedCall(call->AsCall(), sig);
+ impPopArgsForUnmanagedCall(call->AsCall(), sig, &swiftErrorArg, &swiftSelfArg);
goto DONE;
}
@@ -1485,6 +1492,15 @@ var_types Compiler::impImportCall(OPCODE opcode,
impPushOnStack(call, tiRetVal);
}
+#ifdef SWIFT_SUPPORT
+ // If call is a Swift call with error handling, append additional IR
+ // to handle storing the error register's value post-call.
+ if (swiftErrorArg != nullptr)
+ {
+ impAppendSwiftErrorStore(call->AsCall(), swiftErrorArg);
+ }
+#endif // SWIFT_SUPPORT
+
return callRetTyp;
}
#ifdef _PREFAST_
@@ -1822,7 +1838,10 @@ GenTreeCall* Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, const DebugI
/*****************************************************************************/
-void Compiler::impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* sig)
+void Compiler::impPopArgsForUnmanagedCall(GenTreeCall* call,
+ CORINFO_SIG_INFO* sig,
+ /* OUT */ CallArg** swiftErrorArg,
+ /* OUT */ CallArg** swiftSelfArg)
{
assert(call->gtFlags & GTF_CALL_UNMANAGED);
@@ -1842,10 +1861,74 @@ void Compiler::impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* s
if (call->unmgdCallConv == CorInfoCallConvExtension::Thiscall)
{
- assert(argsToReverse);
+ assert(argsToReverse != 0);
argsToReverse--;
}
+#ifdef SWIFT_SUPPORT
+ unsigned short swiftErrorIndex = sig->numArgs;
+
+ // We are importing an unmanaged Swift call, which might require special parameter handling
+ if (call->unmgdCallConv == CorInfoCallConvExtension::Swift)
+ {
+ bool checkEntireStack = false;
+
+ // Check the signature of the Swift call for the special types
+ CORINFO_ARG_LIST_HANDLE sigArg = sig->args;
+
+ for (unsigned short argIndex = 0; argIndex < sig->numArgs;
+ sigArg = info.compCompHnd->getArgNext(sigArg), argIndex++)
+ {
+ CORINFO_CLASS_HANDLE argClass;
+ CorInfoType argType = strip(info.compCompHnd->getArgType(sig, sigArg, &argClass));
+ bool argIsByrefOrPtr = false;
+
+ if ((argType == CORINFO_TYPE_BYREF) || (argType == CORINFO_TYPE_PTR))
+ {
+ argClass = info.compCompHnd->getArgClass(sig, sigArg);
+ argType = info.compCompHnd->getChildType(argClass, &argClass);
+ argIsByrefOrPtr = true;
+ }
+
+ if ((argType != CORINFO_TYPE_VALUECLASS) || !info.compCompHnd->isIntrinsicType(argClass))
+ {
+ continue;
+ }
+
+ const char* namespaceName;
+ const char* className = info.compCompHnd->getClassNameFromMetadata(argClass, &namespaceName);
+
+ if ((strcmp(className, "SwiftError") == 0) &&
+ (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") == 0))
+ {
+ // For error handling purposes, we expect a pointer/reference to a SwiftError to be passed
+ if (!argIsByrefOrPtr)
+ {
+ BADCODE("Expected SwiftError pointer/reference, got struct");
+ }
+
+ if (swiftErrorIndex != sig->numArgs)
+ {
+ BADCODE("Duplicate SwiftError* parameter");
+ }
+
+ swiftErrorIndex = argIndex;
+ checkEntireStack = true;
+ }
+ // TODO: Handle SwiftSelf, SwiftAsync
+ }
+
+ // Don't need to reverse args for Swift calls
+ argsToReverse = 0;
+
+ // If using one of the Swift register types, check entire stack for side effects
+ if (checkEntireStack)
+ {
+ impSpillSideEffects(true, CHECK_SPILL_ALL DEBUGARG("Spill for swift calls"));
+ }
+ }
+#endif // SWIFT_SUPPORT
+
#ifndef TARGET_X86
// Don't reverse args on ARM or x64 - first four args always placed in regs in order
argsToReverse = 0;
@@ -1892,6 +1975,7 @@ void Compiler::impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* s
assert(thisPtr->TypeGet() == TYP_I_IMPL || thisPtr->TypeGet() == TYP_BYREF);
}
+ unsigned short argIndex = 0;
for (CallArg& arg : call->gtArgs.Args())
{
GenTree* argNode = arg.GetEarlyNode();
@@ -1914,9 +1998,55 @@ void Compiler::impPopArgsForUnmanagedCall(GenTreeCall* call, CORINFO_SIG_INFO* s
assert(!"*** invalid IL: gc ref passed to unmanaged call");
}
}
+
+#ifdef SWIFT_SUPPORT
+ if (argIndex == swiftErrorIndex)
+ {
+ // Found the SwiftError* arg
+ assert(swiftErrorArg != nullptr);
+ *swiftErrorArg = &arg;
+ }
+// TODO: SwiftSelf, SwiftAsync
+#endif // SWIFT_SUPPORT
+
+ argIndex++;
}
}
+#ifdef SWIFT_SUPPORT
+//------------------------------------------------------------------------
+// impAppendSwiftErrorStore: Append IR to store the Swift error register value
+// to the SwiftError* argument specified by swiftErrorArg, post-Swift call
+//
+// Arguments:
+// call - the Swift call
+// swiftErrorArg - the SwiftError* argument passed to call
+//
+void Compiler::impAppendSwiftErrorStore(GenTreeCall* call, CallArg* const swiftErrorArg)
+{
+ assert(call != nullptr);
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ assert(swiftErrorArg != nullptr);
+
+ GenTree* const argNode = swiftErrorArg->GetNode();
+ assert(argNode != nullptr);
+
+ // Store the error register value to where the SwiftError* points to
+ GenTree* errorRegNode = new (this, GT_SWIFT_ERROR) GenTree(GT_SWIFT_ERROR, TYP_I_IMPL);
+ errorRegNode->SetHasOrderingSideEffect();
+ errorRegNode->gtFlags |= (GTF_CALL | GTF_GLOB_REF);
+
+ GenTreeStoreInd* swiftErrorStore = gtNewStoreIndNode(argNode->TypeGet(), argNode, errorRegNode);
+ impAppendTree(swiftErrorStore, CHECK_SPILL_ALL, impCurStmtDI, false);
+
+ // Indicate the error register will be checked after this call returns
+ call->gtCallMoreFlags |= GTF_CALL_M_SWIFT_ERROR_HANDLING;
+
+ // Swift call isn't going to use the SwiftError* arg, so don't bother emitting it
+ call->gtArgs.Remove(swiftErrorArg);
+}
+#endif // SWIFT_SUPPORT
+
//------------------------------------------------------------------------
// impInitializeArrayIntrinsic: Attempts to replace a call to InitializeArray
// with a GT_COPYBLK node.
@@ -5618,8 +5748,7 @@ void Compiler::impCheckForPInvokeCall(
// return here without inlining the native call.
if (unmanagedCallConv == CorInfoCallConvExtension::Managed ||
unmanagedCallConv == CorInfoCallConvExtension::Fastcall ||
- unmanagedCallConv == CorInfoCallConvExtension::FastcallMemberFunction ||
- unmanagedCallConv == CorInfoCallConvExtension::Swift)
+ unmanagedCallConv == CorInfoCallConvExtension::FastcallMemberFunction)
{
return;
}
diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp
index 18d7f388e908a..a01500c5c4932 100644
--- a/src/coreclr/jit/lower.cpp
+++ b/src/coreclr/jit/lower.cpp
@@ -5819,6 +5819,26 @@ GenTree* Lowering::LowerNonvirtPinvokeCall(GenTreeCall* call)
InsertPInvokeCallEpilog(call);
}
+#ifdef SWIFT_SUPPORT
+ // For Swift calls that require error handling, ensure the GT_SWIFT_ERROR node
+ // that consumes the error register is the call node's successor.
+ // This is to simplify logic for marking the error register as busy in LSRA.
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ GenTree* swiftErrorNode = call->gtNext;
+ assert(swiftErrorNode != nullptr);
+
+ while (!swiftErrorNode->OperIs(GT_SWIFT_ERROR))
+ {
+ swiftErrorNode = swiftErrorNode->gtNext;
+ assert(swiftErrorNode != nullptr);
+ }
+
+ BlockRange().Remove(swiftErrorNode);
+ BlockRange().InsertAfter(call, swiftErrorNode);
+ }
+#endif // SWIFT_SUPPORT
+
return result;
}
diff --git a/src/coreclr/jit/lsra.cpp b/src/coreclr/jit/lsra.cpp
index 9ca1d59ed7a5e..9a2d493a8df90 100644
--- a/src/coreclr/jit/lsra.cpp
+++ b/src/coreclr/jit/lsra.cpp
@@ -5109,6 +5109,13 @@ void LinearScan::allocateRegistersMinimal()
}
regsInUseThisLocation |= currentRefPosition.registerAssignment;
INDEBUG(dumpLsraAllocationEvent(LSRA_EVENT_FIXED_REG, nullptr, currentRefPosition.assignedReg()));
+
+#ifdef SWIFT_SUPPORT
+ if (currentRefPosition.delayRegFree)
+ {
+ regsInUseNextLocation |= currentRefPosition.registerAssignment;
+ }
+#endif // SWIFT_SUPPORT
}
else
{
@@ -5818,6 +5825,13 @@ void LinearScan::allocateRegisters()
}
regsInUseThisLocation |= currentRefPosition.registerAssignment;
INDEBUG(dumpLsraAllocationEvent(LSRA_EVENT_FIXED_REG, nullptr, currentRefPosition.assignedReg()));
+
+#ifdef SWIFT_SUPPORT
+ if (currentRefPosition.delayRegFree)
+ {
+ regsInUseNextLocation |= currentRefPosition.registerAssignment;
+ }
+#endif // SWIFT_SUPPORT
}
else
{
diff --git a/src/coreclr/jit/lsraarm64.cpp b/src/coreclr/jit/lsraarm64.cpp
index ea3bc9d7fb37e..066b9d4e2b7bb 100644
--- a/src/coreclr/jit/lsraarm64.cpp
+++ b/src/coreclr/jit/lsraarm64.cpp
@@ -1282,6 +1282,20 @@ int LinearScan::BuildNode(GenTree* tree)
srcCount = BuildSelect(tree->AsOp());
break;
+#ifdef SWIFT_SUPPORT
+ case GT_SWIFT_ERROR:
+ srcCount = 0;
+ assert(dstCount == 1);
+
+ // Any register should do here, but the error register value should immediately
+ // be moved from GT_SWIFT_ERROR's destination register to the SwiftError struct,
+ // and we know REG_SWIFT_ERROR should be busy up to this point, anyway.
+ // By forcing LSRA to use REG_SWIFT_ERROR as both the source and destination register,
+ // we can ensure the redundant move is elided.
+ BuildDef(tree, RBM_SWIFT_ERROR);
+ break;
+#endif // SWIFT_SUPPORT
+
} // end switch (tree->OperGet())
if (tree->IsUnusedValue() && (dstCount != 0))
diff --git a/src/coreclr/jit/lsraarmarch.cpp b/src/coreclr/jit/lsraarmarch.cpp
index ad112a817220f..c424a4d6bc620 100644
--- a/src/coreclr/jit/lsraarmarch.cpp
+++ b/src/coreclr/jit/lsraarmarch.cpp
@@ -393,6 +393,30 @@ int LinearScan::BuildCall(GenTreeCall* call)
regMaskTP killMask = getKillSetForCall(call);
BuildDefsWithKills(call, dstCount, dstCandidates, killMask);
+#ifdef SWIFT_SUPPORT
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ // Tree is a Swift call with error handling; error register should have been killed
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ assert((killMask & RBM_SWIFT_ERROR) != 0);
+
+ // After a Swift call that might throw returns, we expect the error register to be consumed
+ // by a GT_SWIFT_ERROR node. However, we want to ensure the error register won't be trashed
+ // before GT_SWIFT_ERROR can consume it.
+ // (For example, the PInvoke epilog comes before the error register store.)
+ // To do so, delay the freeing of the error register until the next node.
+ // This only works if the next node after the call is the GT_SWIFT_ERROR node.
+ // (InsertPInvokeCallEpilog should have moved the GT_SWIFT_ERROR node during lowering.)
+ assert(call->gtNext != nullptr);
+ assert(call->gtNext->OperIs(GT_SWIFT_ERROR));
+
+ // We could use RefTypeKill, but RefTypeFixedReg is used less commonly, so the check for delayRegFree
+ // during register allocation should be cheaper in terms of TP.
+ RefPosition* pos = newRefPosition(REG_SWIFT_ERROR, currentLoc, RefTypeFixedReg, call, RBM_SWIFT_ERROR);
+ setDelayFree(pos);
+ }
+#endif // SWIFT_SUPPORT
+
// No args are placed in registers anymore.
placedArgRegs = RBM_NONE;
numPlacedArgLocals = 0;
diff --git a/src/coreclr/jit/lsrabuild.cpp b/src/coreclr/jit/lsrabuild.cpp
index 3b9ec7f388aec..b3c1e2df4e1da 100644
--- a/src/coreclr/jit/lsrabuild.cpp
+++ b/src/coreclr/jit/lsrabuild.cpp
@@ -880,6 +880,19 @@ regMaskTP LinearScan::getKillSetForCall(GenTreeCall* call)
assert(!call->IsVirtualStub() ||
((killMask & compiler->virtualStubParamInfo->GetRegMask()) == compiler->virtualStubParamInfo->GetRegMask()));
#endif // !TARGET_ARM
+
+#ifdef SWIFT_SUPPORT
+ // Swift calls that throw may trash the callee-saved error register,
+ // so don't use the register post-call until it is consumed by SwiftError.
+ // GTF_CALL_M_SWIFT_ERROR_HANDLING indicates the call has a SwiftError* argument,
+ // so the error register value will eventually be consumed post-call.
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ killMask |= RBM_SWIFT_ERROR;
+ }
+#endif // SWIFT_SUPPORT
+
return killMask;
}
diff --git a/src/coreclr/jit/lsraxarch.cpp b/src/coreclr/jit/lsraxarch.cpp
index 41121ee9bed28..b108659f50ef5 100644
--- a/src/coreclr/jit/lsraxarch.cpp
+++ b/src/coreclr/jit/lsraxarch.cpp
@@ -633,6 +633,20 @@ int LinearScan::BuildNode(GenTree* tree)
}
break;
+#ifdef SWIFT_SUPPORT
+ case GT_SWIFT_ERROR:
+ srcCount = 0;
+ assert(dstCount == 1);
+
+ // Any register should do here, but the error register value should immediately
+ // be moved from GT_SWIFT_ERROR's destination register to the SwiftError struct,
+ // and we know REG_SWIFT_ERROR should be busy up to this point, anyway.
+ // By forcing LSRA to use REG_SWIFT_ERROR as both the source and destination register,
+ // we can ensure the redundant move is elided.
+ BuildDef(tree, RBM_SWIFT_ERROR);
+ break;
+#endif // SWIFT_SUPPORT
+
} // end switch (tree->OperGet())
// We need to be sure that we've set srcCount and dstCount appropriately.
@@ -1357,6 +1371,30 @@ int LinearScan::BuildCall(GenTreeCall* call)
regMaskTP killMask = getKillSetForCall(call);
BuildDefsWithKills(call, dstCount, dstCandidates, killMask);
+#ifdef SWIFT_SUPPORT
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SWIFT_ERROR_HANDLING) != 0)
+ {
+ // Tree is a Swift call with error handling; error register should have been killed
+ assert(call->unmgdCallConv == CorInfoCallConvExtension::Swift);
+ assert((killMask & RBM_SWIFT_ERROR) != 0);
+
+ // After a Swift call that might throw returns, we expect the error register to be consumed
+ // by a GT_SWIFT_ERROR node. However, we want to ensure the error register won't be trashed
+ // before GT_SWIFT_ERROR can consume it.
+ // (For example, the PInvoke epilog comes before the error register store.)
+ // To do so, delay the freeing of the error register until the next node.
+ // This only works if the next node after the call is the GT_SWIFT_ERROR node.
+ // (InsertPInvokeCallEpilog should have moved the GT_SWIFT_ERROR node during lowering.)
+ assert(call->gtNext != nullptr);
+ assert(call->gtNext->OperIs(GT_SWIFT_ERROR));
+
+ // We could use RefTypeKill, but RefTypeFixedReg is used less commonly, so the check for delayRegFree
+ // during register allocation should be cheaper in terms of TP.
+ RefPosition* pos = newRefPosition(REG_SWIFT_ERROR, currentLoc, RefTypeFixedReg, call, RBM_SWIFT_ERROR);
+ setDelayFree(pos);
+ }
+#endif // SWIFT_SUPPORT
+
// No args are placed in registers anymore.
placedArgRegs = RBM_NONE;
numPlacedArgLocals = 0;
diff --git a/src/coreclr/jit/targetamd64.h b/src/coreclr/jit/targetamd64.h
index 4abe71984b57c..147355c2474a2 100644
--- a/src/coreclr/jit/targetamd64.h
+++ b/src/coreclr/jit/targetamd64.h
@@ -563,4 +563,9 @@
#define RBM_STACK_PROBE_HELPER_TRASH RBM_RAX
#endif // !UNIX_AMD64_ABI
+ #define SWIFT_SUPPORT
+ #define REG_SWIFT_ERROR REG_R12
+ #define RBM_SWIFT_ERROR RBM_R12
+ #define SWIFT_SELF_REG REG_R13
+
// clang-format on
diff --git a/src/coreclr/jit/targetarm64.h b/src/coreclr/jit/targetarm64.h
index 3646ecb4407bf..74a14535f1507 100644
--- a/src/coreclr/jit/targetarm64.h
+++ b/src/coreclr/jit/targetarm64.h
@@ -370,4 +370,9 @@
#define REG_ZERO_INIT_FRAME_REG2 REG_R10
#define REG_ZERO_INIT_FRAME_SIMD REG_V16
+ #define SWIFT_SUPPORT
+ #define REG_SWIFT_ERROR REG_R21
+ #define RBM_SWIFT_ERROR RBM_R21
+ #define SWIFT_SELF_REG REG_R20
+
// clang-format on
diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp
index cf4954762cebd..e86b2aff6eb45 100644
--- a/src/coreclr/jit/valuenum.cpp
+++ b/src/coreclr/jit/valuenum.cpp
@@ -11324,7 +11324,9 @@ void Compiler::fgValueNumberTree(GenTree* tree)
break;
case GT_CATCH_ARG:
+ case GT_SWIFT_ERROR:
// We know nothing about the value of a caught expression.
+ // We also know nothing about the error register's value post-Swift call.
tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
break;
diff --git a/src/coreclr/vm/dllimport.cpp b/src/coreclr/vm/dllimport.cpp
index d395b8e32cf5c..542ec14ad198d 100644
--- a/src/coreclr/vm/dllimport.cpp
+++ b/src/coreclr/vm/dllimport.cpp
@@ -4263,8 +4263,7 @@ static void CreateNDirectStubAccessMetadata(
{
if (unmgdCallConv == CorInfoCallConvExtension::Managed ||
unmgdCallConv == CorInfoCallConvExtension::Fastcall ||
- unmgdCallConv == CorInfoCallConvExtension::FastcallMemberFunction ||
- unmgdCallConv == CorInfoCallConvExtension::Swift)
+ unmgdCallConv == CorInfoCallConvExtension::FastcallMemberFunction)
{
COMPlusThrow(kTypeLoadException, IDS_INVALID_PINVOKE_CALLCONV);
}
diff --git a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs
index 67a398d357e11..d4b81bafcd4c8 100644
--- a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs
+++ b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs
@@ -26,6 +26,9 @@ public class ErrorHandlingTests
[DllImport(SwiftLib, EntryPoint = "$s18SwiftErrorHandling05getMyB7Message4from13messageLengthSPys6UInt16VGSgs0B0_p_s5Int32VztF")]
public unsafe static extern void* GetErrorMessage(void* handle, out int length);
+ [DllImport(SwiftLib, EntryPoint = "$s18SwiftErrorHandling16freeStringBuffer6bufferySpys6UInt16VG_tF")]
+ public unsafe static extern void FreeErrorMessageBuffer(void* stringPtr);
+
[Fact]
public unsafe static void TestSwiftErrorThrown()
{
@@ -99,7 +102,7 @@ private unsafe static string GetErrorMessageFromSwift(SwiftError error)
{
void* pointer = GetErrorMessage(error.Value, out int messageLength);
string errorMessage = Marshal.PtrToStringUni((IntPtr)pointer, messageLength);
- NativeMemory.Free((void*)pointer);
+ FreeErrorMessageBuffer(pointer);
return errorMessage;
}
}
diff --git a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.csproj b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.csproj
index 49be10b939391..89eda99352fd2 100644
--- a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.csproj
+++ b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.csproj
@@ -5,8 +5,6 @@
true
true
-
- true
diff --git a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.swift b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.swift
index 20022c0dba3e2..5058014a42ce3 100644
--- a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.swift
+++ b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.swift
@@ -33,3 +33,7 @@ public func getMyErrorMessage(from error: Error, messageLength: inout Int32) ->
}
return nil
}
+
+public func freeStringBuffer(buffer: UnsafeMutablePointer) {
+ buffer.deallocate()
+}
diff --git a/src/tests/issues.targets b/src/tests/issues.targets
index 473e82bc3e969..a2dfa9b1581a3 100644
--- a/src/tests/issues.targets
+++ b/src/tests/issues.targets
@@ -75,7 +75,10 @@
https://github.com/dotnet/runtime/issues/88586
-
+
+ https://github.com/dotnet/runtime/issues/93631
+
+
https://github.com/dotnet/runtime/issues/93631