From fbcab58b075899d9c6df3a0a123c16b0b8a548be Mon Sep 17 00:00:00 2001 From: Vipin Sharma Date: Mon, 9 Jun 2025 10:46:23 +0400 Subject: [PATCH] Fix TensorRT runtime destruction --- .../include/TRT_InferenceEngine/TensorRT_InferenceEngine.h | 2 ++ .../src/TRT_InferenceEngine/TensorRT_InferenceEngine.cpp | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/botsort/include/TRT_InferenceEngine/TensorRT_InferenceEngine.h b/botsort/include/TRT_InferenceEngine/TensorRT_InferenceEngine.h index b11488f..f253a2f 100644 --- a/botsort/include/TRT_InferenceEngine/TensorRT_InferenceEngine.h +++ b/botsort/include/TRT_InferenceEngine/TensorRT_InferenceEngine.h @@ -98,6 +98,8 @@ class TensorRTInferenceEngine nvinfer1::ILogger::Severity _logSeverity = nvinfer1::ILogger::Severity::kWARNING; TRTOptimizerParams _optimization_params; + // Runtime must outlive the engine and execution context. + TRTUniquePtr _runtime{nullptr}; TRTUniquePtr _engine{nullptr}; TRTUniquePtr _context{nullptr}; std::unique_ptr _logger{nullptr}; diff --git a/botsort/src/TRT_InferenceEngine/TensorRT_InferenceEngine.cpp b/botsort/src/TRT_InferenceEngine/TensorRT_InferenceEngine.cpp index 18f0b9b..fb8ba9a 100644 --- a/botsort/src/TRT_InferenceEngine/TensorRT_InferenceEngine.cpp +++ b/botsort/src/TRT_InferenceEngine/TensorRT_InferenceEngine.cpp @@ -312,9 +312,9 @@ bool inference_backend::TensorRTInferenceEngine::_deserialize_engine( engine_file.close(); // Deserialize engine - std::unique_ptr runtime{ - nvinfer1::createInferRuntime(*_logger)}; - _engine = makeUnique(runtime->deserializeCudaEngine( + // Runtime must outlive the engine. Keep it as a member + _runtime = makeUnique(nvinfer1::createInferRuntime(*_logger)); + _engine = makeUnique(_runtime->deserializeCudaEngine( trt_model_stream.data(), trt_model_stream.size())); if (!_engine) {