Skip to content

Commit

Permalink
fix leak
Browse files Browse the repository at this point in the history
  • Loading branch information
Tabrizian committed May 15, 2023
1 parent 4884105 commit 505833f
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions src/request_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ void
InferResponseComplete(
TRITONSERVER_InferenceResponse* response, const uint32_t flags, void* userp)
{
auto infer_payload =
*(reinterpret_cast<std::shared_ptr<InferPayload>*>(userp));
auto linfer_payload = reinterpret_cast<std::shared_ptr<InferPayload>*>(userp);
std::unique_ptr<std::shared_ptr<InferPayload>> infer_payload(linfer_payload);
std::unique_ptr<InferResponse> infer_response;
std::vector<std::shared_ptr<PbTensor>> output_tensors;
std::shared_ptr<PbError> pb_error;
Expand Down Expand Up @@ -147,7 +147,7 @@ InferResponseComplete(
output_tensors.clear();
}

if (!infer_payload->IsDecoupled()) {
if (!(*infer_payload)->IsDecoupled()) {
infer_response = std::make_unique<InferResponse>(
output_tensors, pb_error, true /* is_last_response */);
} else {
Expand All @@ -168,7 +168,7 @@ InferResponseComplete(
TRITONSERVER_InferenceResponseDelete(response),
"Failed to release BLS inference response.");
} else if (
infer_payload->IsDecoupled() &&
(*infer_payload)->IsDecoupled() &&
(flags & TRITONSERVER_RESPONSE_COMPLETE_FINAL) != 0) {
// An empty response may be the last reponse for decoupled models.
infer_response = std::make_unique<InferResponse>(
Expand All @@ -179,7 +179,7 @@ InferResponseComplete(
output_tensors, pb_error, true /* is_last_response */, userp /* id */);
}

infer_payload->SetValue(std::move(infer_response));
(*infer_payload)->SetValue(std::move(infer_response));
}

TRITONSERVER_Error*
Expand Down

0 comments on commit 505833f

Please sign in to comment.