TensorRT 5.1.5.0: Engine Refitter should copy weights after call refitter.set_weights

I find that refitter only read weights in refit_cuda_engine call, if we don’t keep arrays alive before call refit_cuda_engine, the refitter will silently read invalid memory.
see a function below (it refit a engine from a pytorch module):

def refit_engine(self, net):
        with trt.Refitter(self.engine, self.logger) as refitter:
            state_dict = net.state_dict()
            variables = []
            # Why use a variable list?
            # we know that in c++ functions, a python array may be deleted
            # after ref count of a var decrease to zero.
            # TensorRT 5.1.5.0 refitter ONLY EXECUTED in refit_cuda_engine,
            # so we must keep variable alive before refit_cuda_engine call.
            for k, v in self.refit_weight_dict.items():
                if v["type"] == "Linear":
                    weight = state_dict[v["weight"]].t().detach().cpu().numpy()
                    refitter.set_weights(k, trt.WeightsRole.KERNEL,
                            weight)
                    variables.append(weight)
                    if "bias" in v:
                        bias = state_dict[v["bias"]].detach().cpu().numpy()
                        refitter.set_weights(k, trt.WeightsRole.BIAS,
                                bias)
                        variables.append(bias)
                elif v["type"] == "Convolution":
                    weight = state_dict[v["weight"]].detach().float().cpu().numpy()
                    refitter.set_weights(k, trt.WeightsRole.KERNEL,
                            weight)
                    variables.append(weight)
                    if "bias" in v:
                        bias = state_dict[v["bias"]].detach().cpu().numpy()
                        refitter.set_weights(k, trt.WeightsRole.BIAS,
                                bias)
                        variables.append(bias)
                elif v["type"] == "BatchNorm":
                    running_var = state_dict[v["running_var"]]
                    running_mean = state_dict[v["running_mean"]]
                    weight = state_dict[v["weight"]]
                    bias = state_dict[v["bias"]]
                    eps = v["eps"]
                    running_mean = running_mean.detach().cpu().numpy()
                    running_var = running_var.detach().cpu().numpy()
                    weight = weight.detach().cpu().numpy()
                    bias = bias.detach().cpu().numpy()
                    shift = (-running_mean / np.sqrt(running_var + eps)) * weight + bias
                    scale = weight / np.sqrt(running_var + eps)
                    refitter.set_weights(k, trt.WeightsRole.SCALE,
                            scale)
                    refitter.set_weights(k, trt.WeightsRole.SHIFT,
                            shift)
                    variables.append(scale)
                    variables.append(shift)
                else:
                    raise NotImplementedError
            # Get description of missing weights. This should return empty
            # lists in this case.
            [missingLayers, weightRoles] = refitter.get_missing()
            assert len(missingLayers) == 0, "Refitter found missing weights. Call set_weights() for all missing weights"
            # Refit the engine with the new weights. This will return True if
            # the refit operation succeeded.
            assert refitter.refit_cuda_engine()