Hi everyone!
I encountered the follow problem when I have converted the .uff file form .pb file and then to do inference,My ssd_mobilenet_v2 is trained from VOC dataset with the old version Tensorflow Object Detection API.
[TensorRT] INFO: Detected 1 input and 2 output network tensors.
python: nmsPlugin.cpp:140: virtual void nvinfer1::plugin::DetectionOutput::configureWithFormat(const nvinfer1::Dims*, int, const nvinfer1::Dims*, int, nvinfer1::DataType, nvinfer1::PluginFormat, int): Assertion `numPriors * param.numClasses == inputDims[param.inputOrder[1]].d[0]' failed.
Aborted (core dumped)
The NMS part as follow in the .pbtxt file When I converted the .uff file
graphs {
id: "main"
nodes {
id: "NMS"
inputs: "Squeeze"
inputs: "concat_priorbox"
inputs: "concat_box_loc"
operation: "_NMS_TRT"
fields {
key: "backgroundLabelId_u_int"
value {
i: 0
}
}
fields {
key: "confSigmoid_u_int"
value {
i: 1
}
}
fields {
key: "confidenceThreshold_u_float"
value {
d: 1e-08
}
}
fields {
key: "dtype"
value {
dtype: DT_FLOAT32
}
}
fields {
key: "inputOrder_u_ilist"
value {
i_list {
val: 0
val: 2
val: 1
}
}
}
fields {
key: "isNormalized_u_int"
value {
i: 1
}
}
fields {
key: "keepTopK_u_int"
value {
i: 100
}
}
fields {
key: "nmsThreshold_u_float"
value {
d: 0.6
}
}
fields {
key: "numClasses_u_int"
value {
i: 20
}
}
fields {
key: "shareLocation_u_int"
value {
i: 1
}
}
fields {
key: "topK_u_int"
value {
i: 100
}
}
fields {
key: "varianceEncodedInTarget_u_int"
value {
i: 0
}
}
}
And my config.py file like this
import graphsurgeon as gs
import tensorflow as tf
path = 'model/ssd_mobilenet_v2_coco_2018_03_29/frozen_inference_graph.pb'
TRTbin = 'TRT_ssd_mobilenet_v2_coco_2018_03_29.bin'
output_name = ['NMS']
dims = [3,300,300]
layout = 7
def add_plugin(graph):
all_assert_nodes = graph.find_nodes_by_op("Assert")
graph.remove(all_assert_nodes, remove_exclusive_dependencies=True)
all_identity_nodes = graph.find_nodes_by_op("Identity")
graph.forward_inputs(all_identity_nodes)
Input = gs.create_node(
name="Input",
op="Placeholder",
dtype=tf.float32,
shape=[1, 3, 300, 300]
)
PriorBox = gs.create_plugin_node(
name="GridAnchor",
op="GridAnchor_TRT",
dtype=tf.float32,
minSize=0.2,
maxSize=0.95,
aspectRatios=[1.0, 2.0, 0.5, 3.0, 0.33],
variance=[0.1,0.1,0.2,0.2],
featureMapShapes=[19, 10, 5, 3, 2, 1],
numLayers=6
)
NMS = gs.create_plugin_node(
name="NMS",
op="NMS_TRT",
dtype=tf.float32,
shareLocation=1,
varianceEncodedInTarget=0,
backgroundLabelId=0,
confidenceThreshold=1e-8,
nmsThreshold=0.6,
topK=100,
keepTopK=100,
numClasses=20,
inputOrder=[0, 2, 1],
#inputOrder=[1, 0, 2],
confSigmoid=1,
isNormalized=1,
#scoreConverter="SIGMOID"
)
concat_priorbox = gs.create_node(
name="concat_priorbox",
op="ConcatV2",
dtype=tf.float32,
axis=2
)
concat_box_loc = gs.create_plugin_node(
"concat_box_loc",
op="FlattenConcat_TRT",
dtype=tf.float32,
axis=1,
ignoreBatch=0
)
concat_box_conf = gs.create_plugin_node(
"concat_box_conf",
op="FlattenConcat_TRT",
dtype=tf.float32,
axis=1,
ignoreBatch=0
)
namespace_plugin_map = {
"MultipleGridAnchorGenerator": PriorBox,
"Postprocessor": NMS,
"Preprocessor": Input,
"ToFloat": Input,
"image_tensor": Input,
"Concatenate": concat_priorbox,
"concat": concat_box_loc,
"concat_1": concat_box_conf
}
graph.collapse_namespaces(namespace_plugin_map)
graph.remove(graph.graph_outputs, remove_exclusive_dependencies=False)
graph.find_nodes_by_op("NMS_TRT")[0].input.remove("Input")
return graph
Different from others is my NMS part in the .pbtxt like this:
inputs: "Squeeze"
inputs: "concat_priorbox"
inputs: "concat_box_loc"
Others like this:
inputs: "concat_box_conf"
inputs: "Squeeze"
inputs: "concat_priorbox"
And where can I find the nmsPlugin.cpp?
Any one can help me,thanks!!