Conver tf1 model to onnx, inference in tensorrt error

Hi, all

  • convert tensorflow 1 model to onnx model
python nvidia_xavier_nx_convertor.py -ot onnx -if /home/yons/workspace/models/tf1/shuffleNet/ckpt/shufflene_v2_1.0.ckpt.meta -op /home/yons/workspace/models/tf1/shuffleNet/ckpt/onnx -in Placeholder:0 -on shuffle_netv2/dense/MatMul:0 --inputs-as-nchw Placeholder:0
  • scripts nvidia_xavier_nx_converter.py
try:
    import tensorflow as tf
    from tensorflow.contrib import * # this include isn't used but solves some common tf errors
except ImportError:
    print("please install the tensorflow library")
try:
    import tf2onnx
except ImportError:
    print("please install tf2onnx model")
import uff # to convert the graph from a serialized frozen TensorFlow model to UFF.
import os
# import tf2onnx
import subprocess
import argparse

class tensorrt_converter(): 
    def __init__(self, input_type, output_type, input_nodes, output_nodes, input_file, output_path):  
        self.output_type = output_type
        self.input_type = input_type
        self.input_nodes = input_nodes
        self.output_nodes = output_nodes
        self.input_file = input_file
        self.output_path = output_path

    def convert(self,kwargs):  
        """
        conver input model trained by AI framework to inference model supported by tensorrt
        """
        if self.input_type == "caffe":
            if(os.path.isdir(self.input_file)):
                filelist = os.listdir(self.input_file)
                filename_extension = []
                for filename in filelist:
                    if filename_extension.empty():
                        filename_extension.append(filename.split('.')[-1])
                    for file_ex in filename_extension:
                        if filename.split('.')[-1] != file_ex:
                            filename_extension.append(filename.split('.')[-1])
                if (filename_extension[0] == 'caffemodel' and filename_extension[-1] == 'prototxt') or (filename_extension[0] == 'prototxt' and filename_extension[-1] == 'caffemodel'):
                    return True

            else:
                print("the caffe model need multiply files, please input file directory.")
                return False
            return True
        if self.input_type == "pytorch":
            pass
        if self.input_type == "tensorflow":
            if self.output_type == 'uff':
                if not self.tf2uff(): 
                    return False
            if self.output_type == 'onnx':
                if not self.to_onnx('tensorflow',kwargs):
                    return False
        if self.input_type == "paddlepaddle":
            pass
        if self.input_type == "mindspore":
            pass
        return True

    def tf2uff(self): 
        """
        convert the tensorflow model to uff up to tensorflow model format
        """
        if(self.get_tf_file_type(self.input_file) == 'frozen_file'):
            print("convert pb tp uff")
            # process_result = uff.from_tensorflow_frozen_model(frozen_file = self.input_file, output_nodes = self.output_nodes,preprocessor=None, output_filename = os.path.join(self.output_path, os.path.basename(self.input_file).split('.')[0] + ".uff"))
            process_result = uff.from_tensorflow_frozen_model(frozen_file = self.input_file, preprocessor=None, output_filename = os.path.join(self.output_path, os.path.basename(self.input_file).split('.')[0] + ".uff"))
            if not  process_result:
                return False
        elif(self.get_tf_file_type(self.input_file) == 'checkpoint'):

            #Load checkpoint
            checkpoint = tf.train.get_checkpoint_state(self.input_file) 
            input_checkpoint = checkpoint.model_checkpoint_path

            #Devices should be cleared to allow Tensorflow to control placement of graph when loading on different machines
            saver = tf.train.import_meta_graph(input_checkpoint + '.meta',  clear_devices=True)
            if not saver:
                return False
            #Get the graph_def
            graph = tf.get_default_graph()
            input_graph_def = graph.as_graph_def()

            #output names array
            output_nodes_names = self.output_nodes

            with tf.Session(graph=graph) as sess:
                saver.restore(sess, input_checkpoint)
                frozen_graph = tf.graph_util.convert_variables_to_constants(sess, input_graph_def, output_nodes_names)
                frozen_graph = tf.graph_util.remove_training_nodes(frozen_graph)
                uff_model = uff.from_tensorflow(frozen_graph, output_nodes_names, output_filename = os.path.join(self.output_path, os.path.split(self.input_file)[1] + ".uff"))

                if uff_model :
                    return False
        else:
            print("Error: converter tensorflow model to uff format")
            return False
        return True
            
    def to_onnx(self, input_type, kwargs):  
        """
        converter imput type model to onnx type model
        """
        # convert list to dictionary
        kwargs_dict = dict(zip(kwargs[0::2],kwargs[1::2]))
        if input_type == 'tensorflow':
            inputs_as_nchw=''
            for k, v in kwargs_dict.items():
                if k == "--inputs-as-nchw":
                    inputs_as_nchw = v
            print(self.get_tf_file_type(self.input_file))
            if self.get_tf_file_type(self.input_file) == 'checkpoint':
                cmdl = ['-m', 'tf2onnx.convert', '--checkpoint',
                "%s" % self.input_file, '--output', "%s" % os.path.join(self.output_path, os.path.split(self.input_file)[1] + ".onnx"),
                '--opset', str(9), 
                '--inputs', "%s" % self.input_nodes,
                '--outputs', "%s" % self.output_nodes,
                ]
            elif self.get_tf_file_type(self.input_file) == 'frozen_file':
                cmdl = ['-m', 'tf2onnx.convert', '--graphdef',
                "%s" % self.input_file, '--output', "%s" % os.path.join(self.output_path, os.path.basename(self.input_file).split('.')[0] + ".onnx"),
                '--opset', str(9), 
                '--inputs', "%s" % self.input_nodes,
                '--outputs', "%s" % self.output_nodes,
                ]
                if inputs_as_nchw is not '':
                    cmdl.append('--inputs-as-nchw')
                    cmdl.append(inputs_as_nchw)
            else:
                print("input checkpoint file format or frozen_file format")
                return
            final_cmdl = ['python ']
            final_cmdl.extend(cmdl)

            cmd_str = ' '.join(final_cmdl)
            os.system(cmd_str)
        if type == 'paddlepaddle':
            pass
        if type == 'pytorch':
            pass
    
    def get_tf_file_type(self, input): 
        """
        get the input tensorflow type
        """
        # if not os.path.exists(input):
        #     print("the files not exist")
        #     return False
        # if os.path.isdir(input):
        #     filelist = os.listdir(input)
        #     filename_extension = []
        #     for filename in filelist:
        #         filename_extension.append(filename.split('.')[-1])

        #     # duplicate removal
        #     file_ext_unique= list(set(filename_extension))    

        #     for file_ext_name in file_ext_unique:
        #         if 'data' in file_ext_name:
        #             if 'meta' in file_ext_unique:
        #                 return "checkpoint"


        if os.path.isfile(input):
            if 'pb' == os.path.basename(input).split(".")[-1]:
            # if 'pb' == os.path.split(input, '.')[-1]: 
                return 'frozen_file'
            if 'meta' == os.path.basename(input).split('.')[-1]: 
                return 'checkpoint'

def parse_args():
    """ Parses command line arguments. """
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-it',
        "--input_type",
        type=str,
        default='tensorflow',
        help="model trained format "
    )
    parser.add_argument(
        '-ot',
        "--output_type",
        type=str,
        default='uff',
        help="model inferenced format in nvidia tensorrt"
    )
    parser.add_argument(
        '-in',
        "--input_nodes",
        type=str,
        default=[ ],
        help="input nodes name (list)"
    )
    parser.add_argument(
        '-on',
        "--output_nodes",
        type=str,
        default=[ ],
        help="output nodes name(list)"
    )
    parser.add_argument(
        '-if',
        "--input_file",
        type=str,
        required=True,
        help="input model fils. if the model format needs multiple files to save model infromation, please input the directory of model files. otherwise, plese input the absolute file name."
    )
    parser.add_argument(
        '-op',
        "--output_path",
        type=str,
        required=True,
        help="output inference model path. The inference model genereated file name prefix is same as the input model file name. If the input_flie is directory name, the generated file name prefix is the directory name. "
    )
    return parser.parse_known_args()


if __name__ == "__main__":
    convert_args, unparsed = parse_args()
    to=tensorrt_converter(convert_args.input_type, convert_args.output_type,
                      convert_args.input_nodes, convert_args.output_nodes, 
                      convert_args.input_file, convert_args.output_path
    )
    to.convert(unparsed)

import onnxruntime as rt 
import numpy as np 

input_tensor = np.random.random((1,224,224,3)).astype(np.float32)

session = rt.InferenceSession("/home/yons/workspace/models/tf1/shuffleNet/ckpt/onnx/shufflene_v2_1.0.ckpt.meta.onnx")

input_name = session.get_inputs()[0].name
print("input_name : ", input_name)
input_shape = session.get_inputs()[0].shape
print("input_shape : ", input_shape)
output_name = session.get_outputs()[0].name 
print("output_name : ", output_name)
output_shape = session.get_outputs()[0].shape
print("output_shape : ", output_shape)

res = session.run([output_name],{input_name:input_tensor })

out = np.array(res)

print(out)
  • inference the onnx model in tensorrt , Error
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/conv2d_bn__re_lu/conv2d/Conv2D__7 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__558 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit2/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit2/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit2/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 19) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 20) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 21) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 21) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 22) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 23) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_4/conv2d_4/Conv2D__67 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_4/conv2d_4/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_4/conv2d_4/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_4/activation_4/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_4/activation_4/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/depthwise_conv2d_bn_2/depthwise_conv2d_2/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/depthwise_conv2d_bn_2/depthwise_conv2d_2/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/conv2d_5/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/conv2d_5/Conv2D is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/batch_normalization_8/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/batch_normalization_8/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/activation_5/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/conv2d_bn__re_lu_5/activation_5/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__560 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__566 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 39) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 40) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 41) [Concatenation]: DLA only supports concatenation on the C dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 41) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 42) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 43) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_6/conv2d_6/Conv2D__96 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_6/conv2d_6/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_6/conv2d_6/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_6/activation_6/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_6/activation_6/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/depthwise_conv2d_bn_3/depthwise_conv2d_3/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/depthwise_conv2d_bn_3/depthwise_conv2d_3/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/conv2d_7/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/conv2d_7/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/batch_normalization_11/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/batch_normalization_11/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/activation_7/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/conv2d_bn__re_lu_7/activation_7/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__568 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/concat does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__574 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_1/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 59) [Shape] device type to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 60) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 61) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 61) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 62) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 63) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_8/conv2d_8/Conv2D__125 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_8/conv2d_8/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_8/conv2d_8/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_8/activation_8/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_8/activation_8/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/depthwise_conv2d_bn_4/depthwise_conv2d_4/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/depthwise_conv2d_bn_4/depthwise_conv2d_4/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/conv2d_9/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/conv2d_9/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/batch_normalization_14/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/batch_normalization_14/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/activation_9/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/conv2d_bn__re_lu_9/activation_9/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer Transpose__576 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/concat is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer Transpose__582 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage/shufflenet_unit1_2/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__732 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer Transpose__590 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit2_1/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit2_1/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit2_1/Reshape_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 95) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 96) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 97) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 97) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 98) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 99) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_13/conv2d_13/Conv2D__202 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_13/conv2d_13/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_13/conv2d_13/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_13/activation_13/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_13/activation_13/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/depthwise_conv2d_bn_7/depthwise_conv2d_7/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/depthwise_conv2d_bn_7/depthwise_conv2d_7/depthwise is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/conv2d_14/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/conv2d_14/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/batch_normalization_22/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/batch_normalization_22/FusedBatchNorm is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/activation_14/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/conv2d_bn__re_lu_14/activation_14/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__592 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__598 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_3/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 115) [Shape] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 116) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 117) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 117) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 118) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 119) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_15/conv2d_15/Conv2D__231 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_15/conv2d_15/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_15/conv2d_15/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_15/activation_15/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_15/activation_15/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/depthwise_conv2d_bn_8/depthwise_conv2d_8/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/depthwise_conv2d_bn_8/depthwise_conv2d_8/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/conv2d_16/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/conv2d_16/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/batch_normalization_25/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/batch_normalization_25/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/activation_16/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/conv2d_bn__re_lu_16/activation_16/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__600 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/concat does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__606 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_4/Reshape_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 135) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 136) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 137) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 137) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 138) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 139) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/split is not supported on DLA, falling back to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_17/conv2d_17/Conv2D__260 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_17/conv2d_17/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_17/conv2d_17/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_17/activation_17/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_17/activation_17/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/depthwise_conv2d_bn_9/depthwise_conv2d_9/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/depthwise_conv2d_bn_9/depthwise_conv2d_9/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/conv2d_18/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/conv2d_18/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/batch_normalization_28/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/batch_normalization_28/FusedBatchNorm is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/activation_18/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/conv2d_bn__re_lu_18/activation_18/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__608 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/concat does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__614 is not supported on DLA, falling back to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_5/Reshape_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 155) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 156) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 157) [Concatenation]: DLA only supports concatenation on the C dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 157) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 158) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 159) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/split_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_19/conv2d_19/Conv2D__289 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_19/conv2d_19/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_19/conv2d_19/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_19/activation_19/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_19/activation_19/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/depthwise_conv2d_bn_10/depthwise_conv2d_10/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/depthwise_conv2d_bn_10/depthwise_conv2d_10/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/conv2d_20/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/conv2d_20/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/batch_normalization_31/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/batch_normalization_31/FusedBatchNorm is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/activation_20/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/conv2d_bn__re_lu_20/activation_20/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__616 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__622 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_6/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 175) [Shape] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 176) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 177) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 177) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 178) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 179) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_21/conv2d_21/Conv2D__318 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_21/conv2d_21/Conv2D does not support dynamic shapes in any dimension.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_21/conv2d_21/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_21/activation_21/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_21/activation_21/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/depthwise_conv2d_bn_11/depthwise_conv2d_11/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/depthwise_conv2d_bn_11/depthwise_conv2d_11/depthwise is not supported on DLA, falling back to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/conv2d_22/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/conv2d_22/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/batch_normalization_34/FusedBatchNorm does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/batch_normalization_34/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/activation_22/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/conv2d_bn__re_lu_22/activation_22/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__624 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__630 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_7/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 195) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 196) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] (Unnamed Layer* 197) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 197) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 198) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 199) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/split_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_23/conv2d_23/Conv2D__347 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_23/conv2d_23/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_23/conv2d_23/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_23/activation_23/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_23/activation_23/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/depthwise_conv2d_bn_12/depthwise_conv2d_12/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/depthwise_conv2d_bn_12/depthwise_conv2d_12/depthwise is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/conv2d_24/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/conv2d_24/Conv2D is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/batch_normalization_37/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/batch_normalization_37/FusedBatchNorm is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/activation_24/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/conv2d_bn__re_lu_24/activation_24/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__632 is not supported on DLA, falling back to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__638 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_8/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 215) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 216) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] (Unnamed Layer* 217) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 217) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 218) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 219) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_25/conv2d_25/Conv2D__376 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_25/conv2d_25/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_25/conv2d_25/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_25/activation_25/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_25/activation_25/Relu is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/depthwise_conv2d_bn_13/depthwise_conv2d_13/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/depthwise_conv2d_bn_13/depthwise_conv2d_13/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/conv2d_26/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/conv2d_26/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/batch_normalization_40/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/batch_normalization_40/FusedBatchNorm is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/activation_26/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/conv2d_bn__re_lu_26/activation_26/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__640 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__646 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_1/shufflenet_unit1_9/Reshape_1 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer Transpose__736 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer Transpose__654 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit2_2/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit2_2/transpose is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit2_2/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 251) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 252) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 253) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 253) [Concatenation] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 254) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 255) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_30/conv2d_30/Conv2D__453 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_30/conv2d_30/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_30/conv2d_30/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_30/activation_30/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_30/activation_30/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/depthwise_conv2d_bn_16/depthwise_conv2d_16/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/depthwise_conv2d_bn_16/depthwise_conv2d_16/depthwise is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/conv2d_31/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/conv2d_31/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/batch_normalization_48/FusedBatchNorm does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/batch_normalization_48/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/activation_31/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/conv2d_bn__re_lu_31/activation_31/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__656 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__662 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/Reshape is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_10/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 271) [Shape] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 272) [Constant] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] (Unnamed Layer* 273) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 273) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 274) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 275) [Gather] device type to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/split is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_32/conv2d_32/Conv2D__482 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_32/conv2d_32/Conv2D does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_32/conv2d_32/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_32/activation_32/Relu does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_32/activation_32/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/depthwise_conv2d_bn_17/depthwise_conv2d_17/depthwise does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/depthwise_conv2d_bn_17/depthwise_conv2d_17/depthwise is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/conv2d_33/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/conv2d_33/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/batch_normalization_51/FusedBatchNorm does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/batch_normalization_51/FusedBatchNorm is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/activation_33/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/conv2d_bn__re_lu_33/activation_33/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__664 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/concat does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/concat is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer Transpose__670 is not supported on DLA, falling back to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/Reshape is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/transpose is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_11/Reshape_1 is not supported on DLA, falling back to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 291) [Shape] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 292) [Constant] device type to GPU.
[W] [TRT] (Unnamed Layer* 293) [Concatenation]: DLA only supports concatenation on the C dimension.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 293) [Concatenation] device type to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 294) [Constant] device type to GPU.
[W] [TRT] DLA only supports FP16 and Int8 precision type. Switching (Unnamed Layer* 295) [Gather] device type to GPU.
[W] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/split is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/split_1 is not supported on DLA, falling back to GPU.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/conv2d_bn__re_lu_34/conv2d_34/Conv2D__511 is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/conv2d_bn__re_lu_34/conv2d_34/Conv2D does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/conv2d_bn__re_lu_34/conv2d_34/Conv2D is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/conv2d_bn__re_lu_34/activation_34/Relu does not support dynamic shapes in any dimension.
[W] [TRT] Default DLA is enabled but layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/conv2d_bn__re_lu_34/activation_34/Relu is not supported on DLA, falling back to GPU.
[W] [TRT] DLA Layer shuffle_netv2/shufflenet_stage_2/shufflenet_unit1_12/depthwise_conv2d_bn_18/depthwise_conv2d_18/depthwise does not support dynamic shapes in any dimension.
[07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] [07/29/2021-19:44:10] The engine create failed.

thanks for your time!

Hi,

Could you share more about the TensorRT log?
We don’t find an error but some warning message.

Thanks.

Hi,

  • sorry, I copied and pasted less log content
    *The above error message is tensorrt’s log
  • I pasted the missing part again
[07/29/2021-19:44:10] [W] [TRT] Default DLA is enabled but layer (Unnamed Layer* 330) [Shuffle] is not supported on DLA, falling back to GPU.
[07/29/2021-19:44:10] [E] [TRT] Network has dynamic or shape inputs, but no optimization profile has been defined.
[07/29/2021-19:44:10] [E] [TRT] Network validation failed.

Hi,
add the input shape fix the bug

'Placeholder:0[1,224,224,3]'

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.