TopKLayer's two outputs looks the same, and when I mark the second output as network's output,the en...

TensorRT version:5.0.2.6
cuda version:9.0
cudnn version:7.3
python version:3.5

def build_engine(deploy_file, model_file, chars_top_k=5):
    with trt.Builder(TRT_LOGGER) as builder, builder.create_network() as network, trt.CaffeParser() as parser:
        builder.max_workspace_size = common.MB(200)
        builder.max_batch_size = ModelData.MAX_BATCH_SIZE
        # Set the parser's plugin factory. Note that we bind the factory to a reference so
        # that we can destroy it later. (parser.plugin_factory_ext is a write-only attribute)
        parser.plugin_factory_ext = fc_factory

        # Parse the model and build the engine.
        model_tensors = parser.parse(deploy=deploy_file, model=model_file, network=network, dtype=ModelData.DTYPE)

        prob_tensor = model_tensors.find(ModelData.OUTPUT_NAME)

        topk = network.add_topk(prob_tensor, trt.TopKOperation.MAX, chars_top_k, 2)
        topk.get_output(0).name = 'top%d_prob' % chars_top_k
        topk.get_output(1).name = 'top%d_chars' % chars_top_k

print("output1",topk.get_output(0))
        print("output2",topk.get_output(1))
        # topk.get_output(1).name = 'top%d_chari' % chars_top_k
        network.mark_output(topk.get_output(0))
        network.mark_output(topk.get_output(1))
        # network.mark_output(topk.get_output(1))
        # network.mark_output(prob_tensor)
        return builder.build_cuda_engine(network)

[TensorRT] ERROR: Output tensor top5_chars of type Float produced from output of incompatible type Int32
[TensorRT] ERROR: Could not compute dimensions for top5_prob, because the network is not valid
output1 <tensorrt.tensorrt.ITensor object at 0x7fde7408bc70>
output2 <tensorrt.tensorrt.ITensor object at 0x7fde7408bc70>
Traceback (most recent call last):
File “/home/jiachx/gitcode/ThunisoftOCR/tensorrt/sample.py”, line 192, in
main()
File “/home/jiachx/gitcode/ThunisoftOCR/tensorrt/sample.py”, line 161, in main
with build_engine(deploy_file, model_file, chars_top_k=top_k) as engine:
AttributeError: exit

Hello,

to help us debug, can you please share a small repro that contains the model and build source that exhibit the error you are seeing?

here is the code,the deploy_file and model_file is in the tensorrt data/mnist

import tensorrt as trt

TRT_LOGGER = trt.Logger(trt.Logger.WARNING)


def build_engine(deploy_file, model_file):
    with trt.Builder(TRT_LOGGER) as builder, builder.create_network() as network, trt.CaffeParser() as parser:
        builder.max_workspace_size = 200 * 1 << 20
        builder.max_batch_size = 128
        # Set the parser's plugin factory. Note that we bind the factory to a reference so
        # that we can destroy it later. (parser.plugin_factory_ext is a write-only attribute)

        # Parse the model and build the engine.
        model_tensors = parser.parse(deploy=deploy_file, model=model_file, network=network, dtype=trt.float32)

        prob_tensor = model_tensors.find("prob")
        network.mark_output(prob_tensor)
        return builder.build_cuda_engine(network)


def build_engine_with_topk(deploy_file, model_file, chars_top_k=5):
    with trt.Builder(TRT_LOGGER) as builder, builder.create_network() as network, trt.CaffeParser() as parser:
        builder.max_workspace_size = 200 * 1 << 20
        builder.max_batch_size = 128
        # Set the parser's plugin factory. Note that we bind the factory to a reference so
        # that we can destroy it later. (parser.plugin_factory_ext is a write-only attribute)

        # Parse the model and build the engine.
        model_tensors = parser.parse(deploy=deploy_file, model=model_file, network=network, dtype=trt.float32)

        prob_tensor = model_tensors.find("prob")

        topk = network.add_topk(prob_tensor, trt.TopKOperation.MAX, chars_top_k, 1)
        topk.get_output(0).name = 'top%d_prob' % chars_top_k
        topk.get_output(1).name = 'top%d_chars' % chars_top_k

        print("output1", topk.get_output(0))
        print("output2", topk.get_output(1))
        network.mark_output(topk.get_output(0))
        network.mark_output(topk.get_output(1))  # if you comment this line,no error produce
        return builder.build_cuda_engine(network)


deploy_file = '/home/jiachx/driver/TensorRT-5.0.2.6/data/mnist/deploy.prototxt'
model_file = '/home/jiachx/driver/TensorRT-5.0.2.6/data/mnist/mnist.caffemodel'

engine = build_engine(deploy_file, model_file)
# build_engine_with_topk produce error
engine_with_topk = build_engine_with_topk(deploy_file, model_file, chars_top_k=5)

Is anybody solving this problem?

the case is being reviewed. thank you for your patience.

Per engineering, this is fixed in TensorRT-5.1.5 and Container-19.05.