Assert error occurred when building engine with mulltiple profiles for dynamic shapes. It’s ok if using single profile.
bool onnxToTRTModel(const std::string& modelFile, // name of the onnx model
IHostMemory*& trtModelStream) // output buffer for the TensorRT model
{
// Create the builder
IBuilder* builder = createInferBuilder(gLogger.getTRTLogger());
assert(builder != nullptr);
nvinfer1::IBuilderConfig* config = builder->createBuilderConfig();
config->setMaxWorkspaceSize(8000000000);
// Multiple profiles
IOptimizationProfile* profile = builder->createOptimizationProfile();
profile->setDimensions("data", OptProfileSelector::kMIN, Dims4(1, 1, 32, 320));
profile->setDimensions("data", OptProfileSelector::kOPT, Dims4(32, 1, 32, 640));
profile->setDimensions("data", OptProfileSelector::kMAX, Dims4(32, 1, 32, 960));
config->addOptimizationProfile(profile);
#if 1
IOptimizationProfile* profile1 = builder->createOptimizationProfile();
profile1->setDimensions("data", OptProfileSelector::kMIN, Dims4(1, 1, 32, 960));
profile1->setDimensions("data", OptProfileSelector::kOPT, Dims4(32, 1, 32, 1280));
profile1->setDimensions("data", OptProfileSelector::kMAX, Dims4(32, 1, 32, 1600));
config->addOptimizationProfile(profile1);
#endif
// Parse network
const auto explicitBatch = 1U << static_cast<uint32_t>(NetworkDefinitionCreationFlag::kEXPLICIT_BATCH);
nvinfer1::INetworkDefinition* network = builder->createNetworkV2(explicitBatch);
auto parser = nvonnxparser::createParser(*network, gLogger.getTRTLogger());
// Optional - uncomment below lines to view network layer information
//config->setPrintLayerInfo(true);
//parser->reportParsingInfo();
if ( !parser->parseFromFile( locateFile(modelFile, gArgs.dataDirs).c_str(), static_cast<int>(gLogger.getReportableSeverity()) ) )
{
gLogError << "Failure while parsing ONNX file" << std::endl;
return false;
}
// Build the engine
ICudaEngine* engine = builder->buildEngineWithConfig(*network, *config);
assert(engine);
// we can destroy the parser
parser->destroy();
// serialize the engine, then close everything down
trtModelStream = engine->serialize();
engine->destroy();
network->destroy();
builder->destroy();
return true;
}
Erro messages are:
[02/28/2020-10:14:54] [F] [TRT] Assertion failed: (r->bindType == BindType::kNONE || r->bindType == BindType::kINPUTSUBGRAPH || r->bindType == BindType::kOUTPUTSUBGRAPH) && “should not be an nvm/input/output region at this point”
…/builder/tacticOptimizer.cpp:3323
Aborting…
[02/28/2020-10:14:54] [E] [TRT] …/builder/tacticOptimizer.cpp (3323) - Assertion Error in fixOutputIfConstantNode: 0 ((r->bindType == BindType::kNONE || r->bindType == BindType::kINPUTSUBGRAPH || r->bindType == BindType::kOUTPUTSUBGRAPH) && “should not be an nvm/input/output region at this point”)
sample_onnx_mnist: sampleOnnxMNIST.cpp:148: bool onnxToTRTModel(const string&, nvinfer1::IHostMemory*&): Assertion `engine’ failed.
Platform details are:
o Ubuntu 16.04.5 LTS
o Tesla M40 24GB
o 418.39
o CUDA 10.0
o CUDNN 7.6.5
o TensorRT 7.0.0.11
Any suggestion from you will be appreciated greatly.
Thanks.