Skip to content

Migrate apps from Tensorrt5.0 to TensorRT 8.0 #2358

@hitbuyi

Description

@hitbuyi

In tensorRT 5.0 ,there are
NvOnnxParserRuntime.h
NvOnnxParserRuntime.cpp,

my application need to add custom plugins,the code is as following
`
#include
#include
#include
#include
#include "NvInferPlugin.h"
#include "NvOnnxParser.h"
#include "ctdetConfig.h"
#include "utils.h"
#include "NvOnnxParserRuntime.h"

namespace ctdet
{
    enum class RUN_MODE
    {
        FLOAT32 = 0 ,
        FLOAT16 = 1 ,
        INT8    = 2
    };

    class ctdetNet
    {
    public:
        ctdetNet(const std::string& onnxFile,
                 const std::string& calibFile,
                 RUN_MODE mode = RUN_MODE::FLOAT32);

        ctdetNet(const std::string& engineFile);

        ~ctdetNet(){
            cudaStreamSynchronize(mCudaStream);
            cudaStreamDestroy(mCudaStream);
            for(auto& item : mCudaBuffers)
                cudaFree(item);
            cudaFree(cudaOutputBuffer);
            if(!mRunTime)
                mRunTime->destroy();
            if(!mContext)
                mContext->destroy();
            if(!mEngine)
                mEngine->destroy();
            if(!mPlugins)
                mPlugins->destroy();
        }

        void saveEngine(const std::string& fileName);

        void doInference(const void* inputData, void* outputData);

        void printTime()
        {
            mProfiler.printTime(runIters) ;
        }

        inline size_t getInputSize() {
            return mBindBufferSizes[0];
        };

        int64_t outputBufferSize;
        // bool forwardFace;
        int forwardFace;
    private:

        void InitEngine();

        nvinfer1::IExecutionContext* mContext;
        nvinfer1::ICudaEngine* mEngine;
        nvinfer1::IRuntime* mRunTime;

        RUN_MODE runMode;

        nvonnxparser::IPluginFactory *mPlugins;
        std::vector<void*> mCudaBuffers;
        std::vector<int64_t> mBindBufferSizes;
        void * cudaOutputBuffer;

        cudaStream_t mCudaStream;

        int runIters;
        Profiler mProfiler;
    };

}

`

If I want to migrate my app from TensorRT5.0 to TensorRT8.0.1.6, how to dealwith this two files? simply remove them? if it is needed to include other files ,which files should be included?

Metadata

Metadata

Assignees

Labels

triagedIssue has been triaged by maintainers

Type

No type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions