Tensorrt7 batchsize infer results problems

I was update tensorrt from 5.x to 7.x, All the code run correctly with tensorrt5 no matter single image or batch images. But only single image infer was correct in tensorrt7. only first image result was correct in batch infer, the rest result were all zeros. Is there something wrong with batch infer seting. How to solve this problems?
system:ubuntu16.04
driver version: 410.93
cuda10, cudnn 7.6.5
tensorrt 7.x
image preprocess: opencv
code: c++

void Net::serialize(){
IBuilder* builder = createInferBuilder(mLogger);
INetworkDefinition *network = builder->createNetworkV2(1U << static_cast<int32_t>(NetworkDefinitionCreationFlag::kEXPLICIT_BATCH));
ICaffeParser *parser = createCaffeParser();
const IBlobNameToTensor *blobNameToTensor = parser->parse(mPrototxtFile.c_str(), mCaffemodelFile.c_str(), *network, DataType::kFLOAT);
// specify which tensors are outputs
for (const std::string &s : mOutputLayerNames)
network->markOutput(blobNameToTensor->find(s.c_str()));
//specify net input dims, default set one input layer
for (int i = 0; i < network->getNbInputs(); i++)
{
mInputLayerName = network->getInput(i)->getName();
mInputDims = network->getInput(i)->getDimensions();
}
nvinfer1::IBuilderConfig
config = builder->createBuilderConfig();
builder->setMaxBatchSize(mMaxBatchSize);
config->setMaxWorkspaceSize(10 << 20);
mEngine = builder → buildEngineWithConfig(*network, *config);
assert(mEngine);
mContext = mEngine->createExecutionContext();
assert(mContext != nullptr);
mInputIdx = mEngine->getBindingIndex(mInputLayerName.c_str());
mInputDims = static_cast < Dims && > (mEngine->getBindingDimensions(mInputIdx));
mInputSize = mInputDims.d[1] * mInputDims.d[2] * mInputDims.d[3];
mOutputNum = mOutputLayerNames.size();
for (int i = 0; i < mOutputNum; ++i)
{
int idx = mEngine->getBindingIndex(mOutputLayerNames[i].c_str());
mOutputIdx.push_back(idx);
nvinfer1::Dims dims_out = static_cast < Dims && > (mEngine->getBindingDimensions(idx));
mOutputDims.push_back(dims_out);
mOutputSizes.push_back(dims_out.d[1] * dims_out.d[2]*dims_out.d[3]);
}
builder->destroy();
network->destroy();
config->destroy();
parser->destroy();
}

// batch infer
void Net::forward(float *input, float **outputs, int batchSize){
assert(batchSize <= mMaxBatchSize);
void *buffers = new void[1 + mOutputNum];

int inputByteSize = batchSize * mInputSize * sizeof(float);
vector<int> outputByteSizes(mOutputNum);
for (int i = 0; i < mOutputNum; ++i)
{
    outputByteSizes[i] = batchSize * mOutputSizes[i] * sizeof(float);
}
// create GPU buffers and a stream
cudaMalloc(&buffers[mInputIdx], inputByteSize);
for (int i = 0; i < mOutputNum; ++i)
{
    cudaMalloc(&buffers[mOutputIdx[i]], outputByteSizes[i]);
}
cudaStream_t stream;
cudaStreamCreate(&stream);
// DMA the input to the GPU, execute the batch asynchronously, and DMA it back:
cudaMemcpyAsync(buffers[mInputIdx], input, inputByteSize, cudaMemcpyHostToDevice, stream);
// mContext->enqueue(batchSize, buffers, stream, nullptr);
mContext->enqueueV2(buffers, stream, nullptr);
for (int i = 0; i < mOutputNum; ++i)
{
    cudaMemcpyAsync(outputs[i], buffers[mOutputIdx[i]], outputByteSizes[i], cudaMemcpyDeviceToHost, stream);
}
cudaStreamSynchronize(stream);
for (int i = 0; i < outputByteSizes[0]; ++i)
{
    std::cout << " " << outputs[0][i];
}

}

//infer results batch =2
0.406353 -0.320382 1.5007 0.457494 0.0969265 1.50484 -0.886063 1.35644 0.245721 -0.121499 0.654094 0.441087 -0.0382616 0.438509 0.366007 0.687773 0.0161026 -0.937535 -0.152266 -0.808002 -1.0469 0.0923725 -0.0164187 -1.25007 -0.481815 -0.582204 0.163657 0.867953 0.869949 -0.551438 -0.458272 0.0660124 -1.14261 1.65907 -0.2369 -0.997435 -0.0952726 0.89377 1.01454 -0.236411 0.553196 0.154934 -0.653818 1.0422 -0.842711 -0.705634 1.79741 0.0375392 -0.844775 0.281589 1.48117 -0.668131 -0.00936072 0.405825 -0.0640269 -0.521449 -0.710661 0.0812242 -1.43366 0.781751 0.307415 -1.96069 0.175185 -0.834108 0.562669 -1.11267 0.225003 0.0494112 -0.113228 -0.737181 0.517156 0.27629 -2.01072 0.548 0.113696 0.20527 -1.06622 0.285509 0.733295 -1.97158 0.463969 -1.01816 -0.13495 -0.599245 -0.0708429 -0.527434 0.241324 -0.237106 -0.585738 -0.927675 -1.83781 -0.0975834 -0.229163 1.10344 0.992267 -0.807182 0.366262 -0.0900305 -0.183515 0.0356093 -0.92432 -2.05609 -0.027872 1.36764 -0.502808 -1.3497 -0.436312 -1.1614 -1.24928 0.295046 2.11463 -0.204547 0.510229 -0.913338 -1.47305 -0.464439 1.41911 -0.322183 0.12843 0.0386554 0.403745 0.274886 -1.51517 -0.425126 -0.144974 -1.19989 -0.492305 0.971405 -0.980981 0.226729 -0.962253 -1.11271 0.0819307 0.180224 0.548219 -1.19641 0.969759 -1.63952 -0.976513 -3.16699 0.0459704 0.815024 0.271491 -1.74563 0.0700583 -0.448591 -0.0798372 0.249779 0.339626 -1.28199 1.61843 -0.308285 1.13836 0.511789 -0.54121 -1.19866 0.129429 -0.173854 -0.358251 -0.55828 0.750858 -1.22212 -0.295723 -0.398226 -1.39757 -0.706725 -0.404669 -0.468153 -0.50701 -1.29585 0.994023 0.0881927 0.860236 -1.8756 -1.63142 -1.72427 0.54279 0.5903 0.409991 0.903088 -0.161422 -0.387014 1.30063 -0.609124 -2.22828 -0.480831 -0.728926 -0.578115 0.646032 0.596666 0.608126 -1.54893 -0.295164 -0.143483 -0.21124 1.0247 -0.376839 -0.694354 -0.590893 -0.837382 -0.444814 0.45696 -1.73967 -1.02547 -0.0128817 -0.613873 1.30854 -1.10418 1.15423 -0.315316 -2.0799 -0.478532 0.742416 0.16364 -0.382626 0.31556 0.17189 -1.33718 0.810278 -0.0816591 1.00955 -0.331771 -1.36839 -0.527535 2.24079 0.205742 0.807573 0.76596 -0.908307 0.387546 0.645354 -0.278659 -0.255232 1.709 -1.56609 0.652879 -0.634007 -0.667714 -0.164865 1.16015 -0.158875 0.20278 0.217261 -1.09209 0.594923 -0.274994 1.25857 -1.2823 0.256867 0.405666 0.860042 -0.231883 -0.616637 0.971031 0.274744 -0.399103 1.43199 0.82662 0.514961 0.842954 0.437559 0.565982 1.1632 0.808834 -1.87737 -0.370885 0.337025 0.831043 0.688389 0.595813 -0.460657 0.272426 -0.758543 -0.905068 0.235525 1.89656 -0.080765 -0.640284 -0.219413 0.140819 0.330152 1.03225 0.451335 1.73993 -0.0900418 0.452018 1.93943 0.821069 0.124759 -0.521162 0.0419835 -1.08423 -0.665381 -0.554943 -1.32223 -0.157837 -0.142827 0.384018 -1.4739 -0.89246 1.26983 -1.2753 -0.218849 -0.0530898 0.160818 0.141393 -0.417113 0.170634 -0.897994 -0.680975 0.279027 0.422929 -1.10184 1.02312 1.24519 -1.03106 0.635554 -0.815788 -1.06496 0.587361 -0.0302848 0.299898 -0.664726 0.523555 0.196908 -0.257223 0.638833 -0.839395 -1.4021 0.634566 -0.30287 0.467005 -0.212698 -0.918735 1.35413 -0.619448 1.67605 0.720621 -0.854841 1.79506 -0.392298 1.11476 1.9123 0.170381 0.364481 0.335852 0.513654 -1.57827 0.772346 -0.12321 -0.401701 0.637391 -0.336177 -0.613069 0.384869 -1.57921 0.557065 -0.557831 0.0483764 0.0879191 0.771807 1.10381 -0.0495069 -1.01708 -0.398865 -1.0098 0.323706 0.564023 -0.507627 0.485689 0.271754 -1.05546 -1.04395 -1.02332 -0.158644 -1.89779 0.706836 0.404797 0.19262 1.45922 1.01591 0.318038 0.0846937 0.423788 1.67103 -0.377224 1.43248 -1.68664 0.0705906 -1.44281 1.61475 -1.77278 0.278337 -0.387826 -1.1676 0.47104 -0.0523239 1.73991 0.612604 -1.35199 1.32312 -0.670404 0.617603 -0.939258 -0.816016 -0.140886 -0.347843 -1.16134 -0.137986 1.10472 -2.10966 -1.57219 0.759368 -0.603187 -0.911311 2.07928 -0.109702 -1.4553 0.148527 0.926771 1.01266 -1.53547 -0.64518 0.545981 -0.848883 -0.0265279 -0.539555 -0.575804 -0.333965 -0.479666 -0.307982 -1.15911 0.547309 0.433744 0.580764 -0.618615 1.15845 0.935896 0.40413 1.23144 -0.650113 0.57803 1.08238 1.14253 1.28084 -0.200051 -1.02007 -2.45203 0.294061 -0.0764502 -0.959355 0.306128 -0.665136 0.536571 -0.443366 1.16706 -0.0970458 0.0426857 1.29729 -0.734713 -0.585136 0.69516 -0.124344 -0.414979 0.986587 -0.0787226 -1.21431 1.71252 0.291973 -0.117967 -0.167183 0.246977 0.334573 -1.06436 -0.935838 -1.05446 -0.100502 -0.358832 1.82056 1.11902 0.798935 1.96519 -0.898557 0.139052 -1.01531 -1.34037 -1.12453 1.7604 0.0943001 0.838453 0.28292 -0.139534 0.165661 1.37864 -0.180487 -0.728953 1.49748 -0.448245 0.497511 1.00661 -0.403384 0.792762 -0.389688 -0.77524 0.551162 -1.72863 -0.0433265 -0.177881 1.2181 1.0489 -1.43918 0.654463 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0