diff --git a/llvm/docs/LangRef.rst b/llvm/docs/LangRef.rst --- a/llvm/docs/LangRef.rst +++ b/llvm/docs/LangRef.rst @@ -12103,6 +12103,65 @@ preallocated call corresponding to the '``llvm.call.preallocated.setup``' has already been called. +.. _int_call_preallocated_teardown: + +'``llvm.call.preallocated.teardown``' Intrinsic +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Syntax: +""""""" + +:: + + declare i8* @llvm.call.preallocated.teardown(token %setup_token) + +Overview: +""""""""" + +The '``llvm.call.preallocated.teardown``' intrinsic cleans up the stack +created by a '``llvm.call.preallocated.setup``'. + +Semantics: +"""""""""" + +The token argument must be a '``llvm.call.preallocated.setup``'. + +The '``llvm.call.preallocated.teardown``' intrinsic cleans up the stack +allocated by the corresponding '``llvm.call.preallocated.setup``'. Exactly +one of this or the preallocated call must be called to prevent stack leaks. +It is undefined behavior to call both a '``llvm.call.preallocated.teardown``' +and the preallocated call for a given '``llvm.call.preallocated.setup``'. + +For example, if the stack is allocated for a preallocated call by a +'``llvm.call.preallocated.setup``', then an initializer function called on an +allocated argument throws an exception, there should be a +'``llvm.call.preallocated.teardown``' in the exception handler to prevent +stack leaks. + +Following the nesting rules in '``llvm.call.preallocated.setup``', nested +calls to '``llvm.call.preallocated.setup``' and +'``llvm.call.preallocated.teardown``' are allowed but must be properly +nested. + +Example: +"""""""" + +.. code-block:: llvm + + %cs = call token @llvm.call.preallocated.setup(i32 1) + %x = call i8* @llvm.call.preallocated.arg(token %cs, i32 0) preallocated(i32) + %y = bitcast i8* %x to i32* + invoke void @constructor(i32* %y) to label %conta unwind label %contb + conta: + call void @foo1(i32* preallocated(i32) %y) ["preallocated"(token %cs)] + ret void + contb: + %s = catchswitch within none [label %catch] unwind to caller + catch: + %p = catchpad within %s [] + call void @llvm.call.preallocated.teardown(token %cs) + ret void + Standard C Library Intrinsics ----------------------------- diff --git a/llvm/include/llvm/IR/Intrinsics.td b/llvm/include/llvm/IR/Intrinsics.td --- a/llvm/include/llvm/IR/Intrinsics.td +++ b/llvm/include/llvm/IR/Intrinsics.td @@ -534,6 +534,7 @@ def int_call_preallocated_setup : Intrinsic<[llvm_token_ty], [llvm_i32_ty]>; def int_call_preallocated_arg : Intrinsic<[llvm_ptr_ty], [llvm_token_ty, llvm_i32_ty]>; +def int_call_preallocated_teardown : Intrinsic<[], [llvm_token_ty]>; //===------------------- Standard C Library Intrinsics --------------------===// // diff --git a/llvm/lib/IR/Verifier.cpp b/llvm/lib/IR/Verifier.cpp --- a/llvm/lib/IR/Verifier.cpp +++ b/llvm/lib/IR/Verifier.cpp @@ -4566,6 +4566,9 @@ "llvm.call.preallocated.alloc arg index must be between 0 and " "corresponding " "llvm.call.preallocated.setup's argument count"); + } else if (Fn && Fn->getIntrinsicID() == + Intrinsic::call_preallocated_teardown) { + // nothing to do } else { Assert(!FoundCall, "Can have at most one call corresponding to a " "llvm.call.preallocated.setup"); @@ -4614,6 +4617,14 @@ "call site attribute"); break; } + case Intrinsic::call_preallocated_teardown: { + auto *Token = dyn_cast(Call.getArgOperand(0)); + Assert(Token && Token->getCalledFunction()->getIntrinsicID() == + Intrinsic::call_preallocated_setup, + "llvm.call.preallocated.teardown token argument must be a " + "llvm.call.preallocated.setup"); + break; + } case Intrinsic::gcroot: case Intrinsic::gcwrite: case Intrinsic::gcread: diff --git a/llvm/test/Verifier/preallocated-invalid.ll b/llvm/test/Verifier/preallocated-invalid.ll --- a/llvm/test/Verifier/preallocated-invalid.ll +++ b/llvm/test/Verifier/preallocated-invalid.ll @@ -2,6 +2,7 @@ declare token @llvm.call.preallocated.setup(i32) declare i8* @llvm.call.preallocated.arg(token, i32) +declare void @llvm.call.preallocated.teardown(token) ; Fake LLVM intrinsic to return a token declare token @llvm.what() @@ -136,3 +137,10 @@ musttail call void @musttail_and_bundle(i32* %a) ret void } + +; CHECK: token argument must be a llvm.call.preallocated.setup +define void @teardown_token_not_from_setup() { + %cs = call token @llvm.what() + call void @llvm.call.preallocated.teardown(token %cs) + ret void +} diff --git a/llvm/test/Verifier/preallocated-valid.ll b/llvm/test/Verifier/preallocated-valid.ll --- a/llvm/test/Verifier/preallocated-valid.ll +++ b/llvm/test/Verifier/preallocated-valid.ll @@ -2,11 +2,16 @@ declare token @llvm.call.preallocated.setup(i32) declare i8* @llvm.call.preallocated.arg(token, i32) +declare void @llvm.call.preallocated.teardown(token) + +declare i32 @__CxxFrameHandler3(...) declare void @foo1(i32* preallocated(i32)) declare i64 @foo1_i64(i32* preallocated(i32)) declare void @foo2(i32* preallocated(i32), i32*, i32* preallocated(i32)) +declare void @constructor(i32*) + define void @preallocated() { %cs = call token @llvm.call.preallocated.setup(i32 1) %x = call i8* @llvm.call.preallocated.arg(token %cs, i32 0) preallocated(i32) @@ -40,12 +45,34 @@ ret void } -define void @preallocate_musttail(i32* preallocated(i32) %a) { +define void @preallocated_musttail(i32* preallocated(i32) %a) { musttail call void @foo1(i32* preallocated(i32) %a) ret void } -define i64 @preallocate_musttail_i64(i32* preallocated(i32) %a) { +define i64 @preallocated_musttail_i64(i32* preallocated(i32) %a) { %r = musttail call i64 @foo1_i64(i32* preallocated(i32) %a) ret i64 %r } + +define void @preallocated_teardown() { + %cs = call token @llvm.call.preallocated.setup(i32 1) + call void @llvm.call.preallocated.teardown(token %cs) + ret void +} + +define void @preallocated_teardown_invoke() personality i8* bitcast (i32 (...)* @__CxxFrameHandler3 to i8*) { + %cs = call token @llvm.call.preallocated.setup(i32 1) + %x = call i8* @llvm.call.preallocated.arg(token %cs, i32 0) preallocated(i32) + %y = bitcast i8* %x to i32* + invoke void @constructor(i32* %y) to label %conta unwind label %contb +conta: + call void @foo1(i32* preallocated(i32) %y) ["preallocated"(token %cs)] + ret void +contb: + %s = catchswitch within none [label %catch] unwind to caller +catch: + %p = catchpad within %s [] + call void @llvm.call.preallocated.teardown(token %cs) + ret void +}