Description
Build engine throw the “cudnnRNNBaseLayer.cpp (297) - Cuda Error in RNNDescriptorState: 8” when i constructed my GRU network by TensorRT4.0.
Environment
TensorRT Version: 4.0.2
GPU Type: gtx2080ti
Nvidia Driver Version: 465.27
CUDA Version: 9.0
CUDNN Version: 7.5.1.10
Operating System + Version: ubuntu16.04
Python Version (if applicable): 3.7
TensorFlow Version (if applicable):
PyTorch Version (if applicable): 1.6
Baremetal or Container (if container which image + tag):
could you give some tips to help me resolve the issue? thanks !!!
mycode
ILayer* addGRUEncoder(INetworkDefinition network, std::map<std::string, Weights>& weightMap, ITensor& input, int index, int nHidden,ITensor hiddenIn, std::string lname) {
splitLstmWeights(weightMap, lname + “.weight_ih_l0”);
splitLstmWeights(weightMap, lname + “.weight_hh_l0”);
splitLstmWeights(weightMap, lname + “.bias_ih_l0”);
splitLstmWeights(weightMap, lname + “.bias_hh_l0”);
Dims dims = input.getDimensions();
std::cout <<index<< “encoder gru input shape: " << dims.nbDims << " [” << dims.d[0] << " " << dims.d[1] << " " << dims.d[2] << “]”<< std::endl;
auto gru = network->addRNNv2(input, 1, nHidden, dims.d[1], RNNOperation::kGRU);
gru->setDirection(RNNDirection::kUNIDIRECTION);
gru->setWeightsForGate(0, RNNGateType::kRESET, true, weightMap[lname + “.weight_ih_l00”]);
gru->setWeightsForGate(0, RNNGateType::kUPDATE, true, weightMap[lname + “.weight_ih_l01”]);
gru->setWeightsForGate(0, RNNGateType::kHIDDEN, true, weightMap[lname + “.weight_ih_l02”]);
gru->setWeightsForGate(0, RNNGateType::kRESET, false, weightMap[lname + ".weight_hh_l00"]);
gru->setWeightsForGate(0, RNNGateType::kUPDATE, false, weightMap[lname + ".weight_hh_l01"]);
gru->setWeightsForGate(0, RNNGateType::kHIDDEN, false, weightMap[lname + ".weight_hh_l02"]);
gru->setBiasForGate(0, RNNGateType::kRESET, true, weightMap[lname + ".bias_ih_l00"]);
gru->setBiasForGate(0, RNNGateType::kUPDATE, true, weightMap[lname + ".bias_ih_l01"]);
gru->setBiasForGate(0, RNNGateType::kHIDDEN, true, weightMap[lname + ".bias_ih_l02"]);
gru->setBiasForGate(0, RNNGateType::kRESET, false, weightMap[lname + ".bias_hh_l00"]);
gru->setBiasForGate(0, RNNGateType::kUPDATE, false, weightMap[lname + ".bias_hh_l01"]);
gru->setBiasForGate(0, RNNGateType::kHIDDEN, false, weightMap[lname + ".bias_hh_l02"]);
if (hiddenIn != NULL) gru->setHiddenState(*hiddenIn);
return gru;
}
ILayer* addGRUDecoder(INetworkDefinition network, std::map<std::string, Weights>& weightMap, ITensor& input,int index, int nHidden,ITensor hiddenIn, std::string lname) {
splitLstmWeights(weightMap, lname + “.weight_ih_l0”);
splitLstmWeights(weightMap, lname + “.weight_hh_l0”);
splitLstmWeights(weightMap, lname + “.bias_ih_l0”);
splitLstmWeights(weightMap, lname + “.bias_hh_l0”);
Dims dims = input.getDimensions();
std::cout << “decoder gru input shape: " << dims.nbDims << " [” << dims.d[0] << " " << dims.d[1] << " " << dims.d[2] << “]”<< std::endl;
auto gru = network->addRNNv2(input, 1, nHidden, dims.d[1], RNNOperation::kGRU);
gru->setDirection(RNNDirection::kUNIDIRECTION);
gru->setWeightsForGate(0, RNNGateType::kRESET, true, weightMap[lname + “.weight_ih_l00”]);
gru->setWeightsForGate(0, RNNGateType::kUPDATE, true, weightMap[lname + “.weight_ih_l01”]);
gru->setWeightsForGate(0, RNNGateType::kHIDDEN, true, weightMap[lname + “.weight_ih_l02”]);
gru->setWeightsForGate(0, RNNGateType::kRESET, false, weightMap[lname + ".weight_hh_l00"]);
gru->setWeightsForGate(0, RNNGateType::kUPDATE, false, weightMap[lname + ".weight_hh_l01"]);
gru->setWeightsForGate(0, RNNGateType::kHIDDEN, false, weightMap[lname + ".weight_hh_l02"]);
gru->setBiasForGate(0, RNNGateType::kRESET, true, weightMap[lname + ".bias_ih_l00"]);
gru->setBiasForGate(0, RNNGateType::kUPDATE, true, weightMap[lname + ".bias_ih_l01"]);
gru->setBiasForGate(0, RNNGateType::kHIDDEN, true, weightMap[lname + ".bias_ih_l02"]);
gru->setBiasForGate(0, RNNGateType::kRESET, false, weightMap[lname + ".bias_hh_l00"]);
gru->setBiasForGate(0, RNNGateType::kUPDATE, false, weightMap[lname + ".bias_hh_l01"]);
gru->setBiasForGate(0, RNNGateType::kHIDDEN, false, weightMap[lname + ".bias_hh_l02"]);
if (hiddenIn != NULL) gru->setHiddenState(*hiddenIn);
return gru;
}