Description
I am using the IPluginV2DynamicExt interface to create a custom TensorRT plugin. I have generated the dynamic library libyolo_plugin.so. When serializing the model, I can load the plugin successfully. However, when deserializing the engine, it shows an error message indicating that the plugin is not registered.
Environment
TensorRT Version: 8.2
GPU Type: Jetson gpu
Nvidia Driver Version:
CUDA Version: 10.2
CUDNN Version: 8.2
Operating System + Version: Jetpack 4.6.2 Ubuntu 18.04
Relevant Files
When deserializing, using the getPluginCreator() function can find the plugin, but deserializeCudaEngine() still prompts that the plugin is not registered.
CODE:
const char* pluginName = "YoloLayer_TRT";
const char* pluginVersion = "1";
nvinfer1::IPluginCreator* pluginCreator = pluginRegistry->getPluginCreator(pluginName, pluginVersion);
if (pluginCreator)
{
// Plugin found
std::cout << "Plugin Found: " << pluginName << ", VERSION: " << pluginVersion << std::endl;
}
else
{
// Plugin not found
std::cout << "Plugin Not Found" << std::endl;
}
//////////////////////////////////
auto engine_file = argv[1]; // model file
auto input_video_path = argv[2]; // video file
initLibNvInferPlugins(&sample::gLogger.getTRTLogger(), "");
// Create inference runtime
auto runtime = std::unique_ptr<nvinfer1::IRuntime>(nvinfer1::createInferRuntime(sample::gLogger.getTRTLogger()));
if (!runtime)
{
std::cout << "runtime create failed" << std::endl;
return -1;
}
// Deserialization generates engine
// Load Model File
auto plan = load_engine_file(engine_file);
// Deserialization generates engine
auto mEngine = std::shared_ptr<nvinfer1::ICudaEngine>(runtime->deserializeCudaEngine(plan.data(), plan.size()));
if (!mEngine)
{
return -1;
}
output
Plugin Found: YoloLayer_TRT, VERSION: 1
[11/13/2023-08:49:37] [I] [TRT] [MemUsageChange] Init CUDA: CPU +363, GPU +0, now: CPU 384, GPU 7011 (MiB)
[11/13/2023-08:49:37] [I] [TRT] Loaded engine size: 17 MiB
[11/13/2023-08:49:37] [E] [TRT] 1: [pluginV2Runner.cpp::load::290] Error Code 1: Serialization (Serialization assertion creator failed.Cannot deserialize plugin since corresponding IPluginCreator not found in Plugin Registry)
[11/13/2023-08:49:37] [E] [TRT] 4: [runtime.cpp::deserializeCudaEngine::50] Error Code 4: Internal Error (Engine deserialization failed.)
Plugins can be found during serialization
[11/13/2023-09:33:16] [I] [TRT] ONNX IR version: 0.0.9
[11/13/2023-09:33:16] [I] [TRT] Opset version: 13
[11/13/2023-09:33:16] [I] [TRT] Producer name: pytorch
[11/13/2023-09:33:16] [I] [TRT] Producer version: 1.10
[11/13/2023-09:33:16] [I] [TRT] Domain:
[11/13/2023-09:33:16] [I] [TRT] Model version: 0
[11/13/2023-09:33:16] [I] [TRT] Doc string:
[11/13/2023-09:33:16] [I] [TRT] ----------------------------------------------------------------
[11/13/2023-09:33:16] [I] [TRT] No importer registered for op: YoloLayer_TRT. Attempting to import as plugin.
[11/13/2023-09:33:16] [I] [TRT] Searching for plugin: YoloLayer_TRT, plugin_version: 1, plugin_namespace:
[11/13/2023-09:33:16] [I] [TRT] Successfully created plugin: YoloLayer_TRT
[11/13/2023-09:33:17] [I] [TRT] ---------- Layers Running on DLA ----------
[11/13/2023-09:33:17] [I] [TRT] ---------- Layers Running on GPU ----------
When I use “trtexec --plugins=./libyolo_plugin.so --loadEngine=…/weights/model.engine”,same problem occurs
[11/13/2023-09:26:00] [I] TensorRT version: 8.2.1
[11/13/2023-09:26:00] [I] Loading supplied plugin library: ./libyolo_plugin.so
[11/13/2023-09:26:02] [I] [TRT] [MemUsageChange] Init CUDA: CPU +362, GPU +0, now: CPU 398, GPU 7258 (MiB)
[11/13/2023-09:26:02] [I] [TRT] Loaded engine size: 17 MiB
[11/13/2023-09:26:02] [E] Error[1]: [pluginV2Runner.cpp::load::290] Error Code 1: Serialization (Serialization assertion creator failed.Cannot deserialize plugin since corresponding IPluginCreator not found in Plugin Registry)
[11/13/2023-09:26:02] [E] Error[4]: [runtime.cpp::deserializeCudaEngine::50] Error Code 4: Internal Error (Engine deserialization failed.)
[11/13/2023-09:26:02] [E] Failed to create engine from model.
[11/13/2023-09:26:02] [E] Engine set up failed
Plugin Definition:
class YoloDecodePlugin : public nvinfer1::IPluginV2DynamicExt
{
public:
YoloDecodePlugin(const YoloDecodeParam ¶m, const std::vector<float>& anchors);
YoloDecodePlugin(void const* data, size_t length);
~YoloDecodePlugin() override = default;
// IPluginV2 methods
char const* getPluginType() const noexcept override { return kNMS_PLUGIN_NAMES[1]; }
char const* getPluginVersion() const noexcept override { return kNMS_PLUGIN_VERSION; }
int32_t getNbOutputs() const noexcept override { return 4; }
int32_t initialize() noexcept override;
void terminate() noexcept override {}
size_t getSerializationSize() const noexcept override;
void serialize(void* buffer) const noexcept override;
void destroy() noexcept override;
void setPluginNamespace(const char *libNamespace) noexcept override;
const char *getPluginNamespace() const noexcept override;
// IPluginV2Ext methods
nvinfer1::DataType getOutputDataType(
int32_t index, nvinfer1::DataType const* inputType, int32_t nbInputs) const noexcept override;
// IPluginV2DynamicExt methods
IPluginV2DynamicExt* clone() const noexcept override;
virtual nvinfer1::DimsExprs getOutputDimensions(
int32_t outputIndex, nvinfer1::DimsExprs const* inputs, int32_t nbInputs, nvinfer1::IExprBuilder& exprBuilder) noexcept override;
bool supportsFormatCombination(
int32_t pos, nvinfer1::PluginTensorDesc const* inOut, int32_t nbInputs, int32_t nbOutputs) noexcept override;
virtual void configurePlugin(nvinfer1::DynamicPluginTensorDesc const* in, int32_t nbInputs, nvinfer1::DynamicPluginTensorDesc const* out,
int32_t nbOutputs) noexcept override;
virtual size_t getWorkspaceSize(nvinfer1::PluginTensorDesc const* inputs, int32_t nbInputs, nvinfer1::PluginTensorDesc const* outputs,
int32_t nbOutputs) const noexcept override { return 0; }
virtual int32_t enqueue(const PluginTensorDesc* inputDesc, const PluginTensorDesc* outputDesc,
const void* const* inputs, void* const* outputs, void* workspace, cudaStream_t stream) noexcept override;
private:
YoloDecodeParam m_Param{};
std::vector<float> m_Anchors{};
pluginStatus_t m_PluginStatus{};
std::string mNamespace;
};
class YoloDecodeDynamicPluginCreator : public nvinfer1::IPluginCreator
{
public:
YoloDecodeDynamicPluginCreator() noexcept;
~YoloDecodeDynamicPluginCreator() noexcept {}
const char *getPluginName() const noexcept override { return kNMS_PLUGIN_NAMES[0]; }
const char *getPluginVersion() const noexcept override { return kNMS_PLUGIN_VERSION; }
nvinfer1::IPluginV2DynamicExt *createPlugin(const char *name, const nvinfer1::PluginFieldCollection *fc) noexcept override;
nvinfer1::IPluginV2DynamicExt *deserializePlugin(const char *name, const void *serialData, size_t serialLength) noexcept override;
void setPluginNamespace(const char *libNamespace) noexcept override;
const char *getPluginNamespace() const noexcept override;
const nvinfer1::PluginFieldCollection *getFieldNames() noexcept override;
private:
static nvinfer1::PluginFieldCollection mFC;
static std::vector<nvinfer1::PluginField> mPluginAttributes;
std::string mNamespace;
};
REGISTER_TENSORRT_PLUGIN(YoloDecodeDynamicPluginCreator);
Hi @a214451293 ,
Can you please try trtexec --loadEngine=yolov5.plan --plugins=myCustomplugin.so
and share the output?
Thanks
Thanks for your reply. I think I have identified the problem. This is because the plugin type and plugin name are not consistent. When getPluginType() and getPluginName return the same value, the custom plugin can be found.
char const* getPluginType() const noexcept override { return kNMS_PLUGIN_NAMES[0]; }
const char *getPluginName() const noexcept override { return kNMS_PLUGIN_NAMES[0]; }