diff --git a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp index 5d5c4ea57ed56..fcfa60c4081b1 100644 --- a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -43,6 +43,7 @@ #include "llvm/IR/DebugInfoMetadata.h" #include "llvm/IR/DebugLoc.h" #include "llvm/IR/DerivedTypes.h" +#include "llvm/IR/EHPersonalities.h" #include "llvm/IR/Function.h" #include "llvm/IR/GlobalAlias.h" #include "llvm/IR/GlobalValue.h" @@ -787,6 +788,10 @@ struct AddressSanitizer { FunctionCallee AMDGPUAddressPrivate; int InstrumentationWithCallsThreshold; uint32_t MaxInlinePoisoningSize; + + // Block coloring for inserting "funclet" op bundle to calls in case of scoped + // EH functionality + DenseMap BlockColors; }; class ModuleAddressSanitizer { @@ -900,6 +905,39 @@ class ModuleAddressSanitizer { Function *AsanDtorFunction = nullptr; }; +// Returns a "funclet" operand bundle in case the BB is within a funclet. +static SmallVector +getEHFuncletBundle(const BasicBlock *BB, + const DenseMap &BlockColors) { + if (BlockColors.empty()) + return {}; + SmallVector OpBundle; + auto *CurBB = BB; + // AddressSanitizer creates BBs on the fly, especially with + // SplitBlockAndInsertIfThenElse or its variants, in which case the BB does + // not exist in BlockColors. If this BB is one of those newly created BBs, + // navigating to its predecessors until we find one of original BBs will pick + // the right funclet. + auto It = BlockColors.find(CurBB); + while (It == BlockColors.end()) { + if (pred_empty(CurBB)) + // This BB cannot be reached from the entry BB, because the entry BB has + // to be in BlockColors. Skip it. + return {}; + CurBB = *pred_begin(CurBB); + It = BlockColors.find(CurBB); + } + + const ColorVector &CV = BlockColors.find(CurBB)->second; + if (CV.size() != 1) + // Invalid funclet nest. Can't pick a single funclet bundle. + return {}; + Instruction *Pad = CV.front()->getFirstNonPHI(); + if (Pad->isEHPad()) + OpBundle.emplace_back("funclet", Pad); + return OpBundle; +} + // Stack poisoning does not play well with exception handling. // When an exception is thrown, we essentially bypass the code // that unpoisones the stack. This is why the run-time library has @@ -948,12 +986,17 @@ struct FunctionStackPoisoner : public InstVisitor { bool HasReturnsTwiceCall = false; bool PoisonStack; + // Block coloring for inserting "funclet" op bundle to calls in case of scoped + // EH functionality + const DenseMap &BlockColors; + FunctionStackPoisoner(Function &F, AddressSanitizer &ASan) : F(F), ASan(ASan), DIB(*F.getParent(), /*AllowUnresolved*/ false), C(ASan.C), IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0)), Mapping(ASan.Mapping), PoisonStack(ClStack && - !Triple(F.getParent()->getTargetTriple()).isAMDGPU()) {} + !Triple(F.getParent()->getTargetTriple()).isAMDGPU()), + BlockColors(ASan.BlockColors) {} bool runOnFunction() { if (!PoisonStack) @@ -1036,7 +1079,8 @@ struct FunctionStackPoisoner : public InstVisitor { IRB.CreateCall( AsanAllocasUnpoisonFunc, - {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr}); + {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } // Unpoison dynamic allocas redzones. @@ -1257,13 +1301,15 @@ void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) { IRB.CreateCall(isa(MI) ? AsanMemmove : AsanMemcpy, {IRB.CreateAddrSpaceCast(MI->getOperand(0), PtrTy), IRB.CreateAddrSpaceCast(MI->getOperand(1), PtrTy), - IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)}); + IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } else if (isa(MI)) { IRB.CreateCall( AsanMemset, {IRB.CreateAddrSpaceCast(MI->getOperand(0), PtrTy), IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false), - IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)}); + IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } MI->eraseFromParent(); } @@ -1506,7 +1552,8 @@ void AddressSanitizer::instrumentPointerComparisonOrSubtraction( if (i->getType()->isPointerTy()) i = IRB.CreatePointerCast(i, IntptrTy); } - IRB.CreateCall(F, Param); + IRB.CreateCall(F, Param, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, @@ -1667,18 +1714,22 @@ Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore, CallInst *Call = nullptr; if (SizeArgument) { if (Exp == 0) - Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][0], - {Addr, SizeArgument}); + Call = IRB.CreateCall( + AsanErrorCallbackSized[IsWrite][0], {Addr, SizeArgument}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); else - Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][1], - {Addr, SizeArgument, ExpVal}); + Call = IRB.CreateCall( + AsanErrorCallbackSized[IsWrite][1], {Addr, SizeArgument, ExpVal}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } else { if (Exp == 0) Call = - IRB.CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr); + IRB.CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); else - Call = IRB.CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex], - {Addr, ExpVal}); + Call = IRB.CreateCall( + AsanErrorCallback[IsWrite][1][AccessSizeIndex], {Addr, ExpVal}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } Call->setCannotMerge(); @@ -1715,8 +1766,12 @@ Instruction *AddressSanitizer::instrumentAMDGPUAddress( return InsertBefore; // Instrument generic addresses in supported addressspaces. IRBuilder<> IRB(InsertBefore); - Value *IsShared = IRB.CreateCall(AMDGPUAddressShared, {Addr}); - Value *IsPrivate = IRB.CreateCall(AMDGPUAddressPrivate, {Addr}); + Value *IsShared = + IRB.CreateCall(AMDGPUAddressShared, {Addr}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); + Value *IsPrivate = + IRB.CreateCall(AMDGPUAddressPrivate, {Addr}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); Value *IsSharedOrPrivate = IRB.CreateOr(IsShared, IsPrivate); Value *Cmp = IRB.CreateNot(IsSharedOrPrivate); Value *AddrSpaceZeroLanding = @@ -1732,7 +1787,8 @@ Instruction *AddressSanitizer::genAMDGPUReportBlock(IRBuilder<> &IRB, if (!Recover) { auto Ballot = M.getOrInsertFunction(kAMDGPUBallotName, IRB.getInt64Ty(), IRB.getInt1Ty()); - ReportCond = IRB.CreateIsNotNull(IRB.CreateCall(Ballot, {Cond})); + ReportCond = IRB.CreateIsNotNull(IRB.CreateCall( + Ballot, {Cond}, getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors))); } auto *Trm = @@ -1746,7 +1802,8 @@ Instruction *AddressSanitizer::genAMDGPUReportBlock(IRBuilder<> &IRB, Trm = SplitBlockAndInsertIfThen(Cond, Trm, false); IRB.SetInsertPoint(Trm); return IRB.CreateCall( - M.getOrInsertFunction(kAMDGPUUnreachableName, IRB.getVoidTy()), {}); + M.getOrInsertFunction(kAMDGPUUnreachableName, IRB.getVoidTy()), {}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } void AddressSanitizer::instrumentAddress(Instruction *OrigIns, @@ -1780,10 +1837,12 @@ void AddressSanitizer::instrumentAddress(Instruction *OrigIns, if (UseCalls) { if (Exp == 0) IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], - AddrLong); + AddrLong, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); else IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex], - {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)}); + {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); return; } @@ -1851,10 +1910,12 @@ void AddressSanitizer::instrumentUnusualSizeOrAlignment( if (UseCalls) { if (Exp == 0) IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0], - {AddrLong, Size}); + {AddrLong, Size}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); else IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1], - {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)}); + {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } else { Value *SizeMinusOne = IRB.CreateSub(Size, ConstantInt::get(IntptrTy, 1)); Value *LastByte = IRB.CreateIntToPtr( @@ -2861,6 +2922,12 @@ bool AddressSanitizer::instrumentFunction(Function &F, if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) return false; if (F.getName().starts_with("__asan_")) return false; + if (F.hasPersonalityFn() && + isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn()))) + BlockColors = colorEHFunclets(F); + else + BlockColors.clear(); + bool FunctionModified = false; // If needed, insert __asan_init before checking for SanitizeAddress attr. @@ -2892,7 +2959,7 @@ bool AddressSanitizer::instrumentFunction(Function &F, SmallPtrSet TempsToInstrument; SmallVector OperandsToInstrument; SmallVector IntrinToInstrument; - SmallVector NoReturnCalls; + SmallVector NoReturnCalls; SmallVector AllBlocks; SmallVector PointerComparisonsOrSubtracts; @@ -2979,7 +3046,11 @@ bool AddressSanitizer::instrumentFunction(Function &F, // See e.g. https://github.com/google/sanitizers/issues/37 for (auto *CI : NoReturnCalls) { IRBuilder<> IRB(CI); - IRB.CreateCall(AsanHandleNoReturnFunc, {}); + auto Bundle = CI->getOperandBundle(LLVMContext::OB_funclet); + if (Bundle) + IRB.CreateCall(AsanHandleNoReturnFunc, {}, OperandBundleDef(*Bundle)); + else + IRB.CreateCall(AsanHandleNoReturnFunc, {}); } for (auto *Inst : PointerComparisonsOrSubtracts) { @@ -3130,7 +3201,8 @@ void FunctionStackPoisoner::copyToShadow(ArrayRef ShadowMask, copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase); IRB.CreateCall(AsanSetShadowFunc[Val], {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)), - ConstantInt::get(IntptrTy, j - i)}); + ConstantInt::get(IntptrTy, j - i)}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); Done = j; } } @@ -3416,9 +3488,10 @@ void FunctionStackPoisoner::processStaticAllocas() { IRBuilder<> IRBIf(Term); StackMallocIdx = StackMallocSizeClass(LocalStackSize); assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass); - Value *FakeStackValue = - IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx], - ConstantInt::get(IntptrTy, LocalStackSize)); + Value *FakeStackValue = IRBIf.CreateCall( + AsanStackMallocFunc[StackMallocIdx], + ConstantInt::get(IntptrTy, LocalStackSize), + getEHFuncletBundle(IRBIf.GetInsertBlock(), BlockColors)); IRB.SetInsertPoint(InsBefore); FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term, ConstantInt::get(IntptrTy, 0)); @@ -3428,8 +3501,10 @@ void FunctionStackPoisoner::processStaticAllocas() { // void *LocalStackBase = (FakeStack) ? FakeStack : // alloca(LocalStackSize); StackMallocIdx = StackMallocSizeClass(LocalStackSize); - FakeStack = IRB.CreateCall(AsanStackMallocFunc[StackMallocIdx], - ConstantInt::get(IntptrTy, LocalStackSize)); + FakeStack = + IRB.CreateCall(AsanStackMallocFunc[StackMallocIdx], + ConstantInt::get(IntptrTy, LocalStackSize), + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } Value *NoFakeStack = IRB.CreateICmpEQ(FakeStack, Constant::getNullValue(IntptrTy)); @@ -3565,7 +3640,8 @@ void FunctionStackPoisoner::processStaticAllocas() { // For larger frames call __asan_stack_free_*. IRBPoison.CreateCall( AsanStackFreeFunc[StackMallocIdx], - {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)}); + {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)}, + getEHFuncletBundle(IRBPoison.GetInsertBlock(), BlockColors)); } IRBuilder<> IRBElse(ElseTerm); @@ -3585,9 +3661,10 @@ void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size, // For now just insert the call to ASan runtime. Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy); Value *SizeArg = ConstantInt::get(IntptrTy, Size); - IRB.CreateCall( - DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc, - {AddrArg, SizeArg}); + IRB.CreateCall(DoPoison ? AsanPoisonStackMemoryFunc + : AsanUnpoisonStackMemoryFunc, + {AddrArg, SizeArg}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); } // Handling llvm.lifetime intrinsics for a given %alloca: @@ -3647,7 +3724,8 @@ void FunctionStackPoisoner::handleDynamicAllocaCall(AllocaInst *AI) { ConstantInt::get(IntptrTy, Alignment.value())); // Insert __asan_alloca_poison call for new created alloca. - IRB.CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize}); + IRB.CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize}, + getEHFuncletBundle(IRB.GetInsertBlock(), BlockColors)); // Store the last alloca's address to DynamicAllocaLayout. We'll need this // for unpoisoning stuff. diff --git a/llvm/lib/Transforms/Instrumentation/PGOInstrumentation.cpp b/llvm/lib/Transforms/Instrumentation/PGOInstrumentation.cpp index c20fc942eaf0d..d361e062cc151 100644 --- a/llvm/lib/Transforms/Instrumentation/PGOInstrumentation.cpp +++ b/llvm/lib/Transforms/Instrumentation/PGOInstrumentation.cpp @@ -920,7 +920,7 @@ static void instrumentOneFunc( // on the instrumentation call based on the funclet coloring. DenseMap BlockColors; if (F.hasPersonalityFn() && - isFuncletEHPersonality(classifyEHPersonality(F.getPersonalityFn()))) + isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn()))) BlockColors = colorEHFunclets(F); // For each VP Kind, walk the VP candidates and instrument each one. diff --git a/llvm/test/Instrumentation/AddressSanitizer/funclet.ll b/llvm/test/Instrumentation/AddressSanitizer/funclet.ll new file mode 100644 index 0000000000000..773b54e55c10e --- /dev/null +++ b/llvm/test/Instrumentation/AddressSanitizer/funclet.ll @@ -0,0 +1,75 @@ +; RUN: opt < %s -passes=asan -S | FileCheck %s + +target datalayout = "e-m:e-p:32:32-i64:64-n32:64-S128" +target triple = "wasm32-unknown-unknown" + +define void @test(ptr %p) sanitize_address personality ptr @__gxx_wasm_personality_v0 { +entry: + invoke void @foo() + to label %try.cont unwind label %catch.dispatch + +catch.dispatch: ; preds = %entry + %0 = catchswitch within none [label %catch.start] unwind to caller + +catch.start: ; preds = %catch.dispatch + %1 = catchpad within %0 [ptr null] +; CHECK: catch.start: +; CHECK: %[[CATCHPAD0:.*]] = catchpad + %2 = call ptr @llvm.wasm.get.exception(token %1) + %3 = call i32 @llvm.wasm.get.ehselector(token %1) + %4 = call ptr @__cxa_begin_catch(ptr %2) #2 [ "funclet"(token %1) ] + %5 = load i32, ptr %p, align 4 +; This __asan_report_load4 is genereated within a newly created BB, but it +; has the correct "funclet" op bundle. +; CHECK: {{.*}}: +; CHECK: call void @__asan_report_load4(i32 %{{.*}}) {{.*}} [ "funclet"(token %[[CATCHPAD0]]) ] + invoke void @foo() [ "funclet"(token %1) ] + to label %invoke.cont1 unwind label %ehcleanup + +invoke.cont1: ; preds = %catch.start + call void @__cxa_end_catch() [ "funclet"(token %1) ] + catchret from %1 to label %try.cont + +try.cont: ; preds = %entry, %invoke.cont1 + ret void + +ehcleanup: ; preds = %catch.start + %6 = cleanuppad within %1 [] +; CHECK: ehcleanup: +; CHECK: %[[CLEANUPPAD0:.*]] = cleanuppad + store i32 42, ptr %p, align 4 +; This __asan_report_store4 is genereated within a newly created BB, but it +; has the correct "funclet" op bundle. +; CHECK: {{.*}}: +; CHECK: call void @__asan_report_store4(i32 %{{.*}}) {{.*}} [ "funclet"(token %[[CLEANUPPAD0]]) ] + invoke void @__cxa_end_catch() [ "funclet"(token %6) ] + to label %invoke.cont2 unwind label %terminate + +invoke.cont2: ; preds = %ehcleanup + cleanupret from %6 unwind to caller + +terminate: ; preds = %ehcleanup + %7 = cleanuppad within %6 [] +; CHECK: terminate: +; CHECK: %[[CLEANUPPAD1:.*]] = cleanuppad + call void @_ZSt9terminatev() #3 [ "funclet"(token %7) ] +; CHECK: call void @__asan_handle_no_return() [ "funclet"(token %[[CLEANUPPAD1]]) ] + unreachable +} + +declare void @foo() +declare i32 @__gxx_wasm_personality_v0(...) +; Function Attrs: nocallback nofree nosync nounwind willreturn +declare ptr @llvm.wasm.get.exception(token) #0 +; Function Attrs: nocallback nofree nosync nounwind willreturn +declare i32 @llvm.wasm.get.ehselector(token) #0 +; Function Attrs: nounwind memory(none) +declare i32 @llvm.eh.typeid.for(ptr) #1 +declare ptr @__cxa_begin_catch(ptr) +declare void @__cxa_end_catch() +declare void @_ZSt9terminatev() + +attributes #0 = { nocallback nofree nosync nounwind willreturn } +attributes #1 = { nounwind memory(none) } +attributes #2 = { nounwind } +attributes #3 = { noreturn nounwind }