NVDEC with Unity3D. The unity editor cannot repaint !

I plug NVDEC into Unity3d.And I am surely get the picture in unity.
But after continuously several times testing,start and stop this plugin,the unity editor cannot repaint itself,it seems to be the UI thread of editor is blocked !

It is my implementation below.

I startup a sub thread to processing capturing packets and decoding them,then copy the bgra data to a char array by cuMemcpyDtoH.
The mainly thread from unity3d invoke a display function every frame of unity3d, uploading data of the char array to d3d11 for renderering.

when i test several times , the error occured.
I can see the ffmpeg is capturing packets and NvDecoder is decoding frame datas by log file.
But the editor is surely never repaint .

This is my code below:

#include "NVDEC_UnityPlugin.h"

using namespace std;
namespace NVDEC_UnityPlugin
{
	ID3D11Device* g_D3D11Device = NULL;

	// --------------------------------------------------------------------------
	static IUnityInterfaces* s_UnityInterfaces = NULL;
	static IUnityGraphics* s_Graphics = NULL;
	static UnityGfxRenderer s_DeviceType = kUnityGfxRendererNull;
	static void DoEventGraphicsDeviceD3D11(UnityGfxDeviceEventType eventType)
	{
		if (eventType == kUnityGfxDeviceEventInitialize)
		{
			IUnityGraphicsD3D11* d3d11 = s_UnityInterfaces->Get<IUnityGraphicsD3D11>();
			g_D3D11Device = d3d11->GetDevice();
		}
	}

	static void UNITY_INTERFACE_API OnGraphicsDeviceEvent(UnityGfxDeviceEventType eventType)
	{
		UnityGfxRenderer currentDeviceType = s_DeviceType;
		switch (eventType)
		{
		case kUnityGfxDeviceEventInitialize:
		{
			s_DeviceType = s_Graphics->GetRenderer();
			currentDeviceType = s_DeviceType;
			break;
		}

		case kUnityGfxDeviceEventShutdown:
			s_DeviceType = kUnityGfxRendererNull;
			break;

		case kUnityGfxDeviceEventBeforeReset:
			break;

		case kUnityGfxDeviceEventAfterReset:
			break;
		};

#if SUPPORT_D3D11
		if (currentDeviceType == kUnityGfxRendererD3D11)
			DoEventGraphicsDeviceD3D11(eventType);
#endif
	}

	extern "C" void	UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginLoad(IUnityInterfaces* unityInterfaces)
	{
		s_UnityInterfaces = unityInterfaces;
		s_Graphics = s_UnityInterfaces->Get<IUnityGraphics>();
		s_Graphics->RegisterDeviceEventCallback(OnGraphicsDeviceEvent);

		// Run OnGraphicsDeviceEvent(initialize) manually on plugin load
		OnGraphicsDeviceEvent(kUnityGfxDeviceEventInitialize);
	}

	extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginUnload()
	{
		s_Graphics->UnregisterDeviceEventCallback(OnGraphicsDeviceEvent);
	}

	// --------------------------------------------------------------------------

	void DoRendering(int eventID);

	void test(int eventID);

	static void UNITY_INTERFACE_API OnRenderEvent(int eventID)
	{
		// Unknown graphics device type? Do nothing.
		if (s_DeviceType == -1)
			return;

		// Actual functions defined below
		DoRendering(eventID);
	}

	void DoRendering(int eventID) { test(eventID); }
	// --------------------------------------------------------------------------

	void test(int eventID)
	{
		DEBUG_LOG("here:" + to_string(eventID));
	}

	// --------------------------------------------------------------------------
	std::thread t;
	bool tRunning = false;
	int DecodeFrame();

	//unique_ptr<FFmpegDemuxer> demuxer = NULL;
	FFmpegDemuxer* demuxer;
	unsigned char* hostBgra;
	unique_ptr<DX11TextureObj> texObj = NULL;
	mutex m;
	bool canDisplay = false;

	char m_filePath[256];

	int Init(char* filePath, int* videoWidth, int* videoHeight, void*& texBGRA)
	{
		if (g_D3D11Device == NULL)
		{
			return -999;
		}

		strcpy(m_filePath, filePath);

		//
		//demuxer = new FFmpegDemuxer(filePath);
		//DEBUG_LOG("Create FFmpegDemuxer");
		
		//绘制
		//texObj = make_unique<DX11TextureObj>();
		//texObj->create(g_D3D11Device, demuxer->GetWidth(), demuxer->GetHeight());
		////返回值
		//texObj->getResourcePointers(texBGRA);
		//*videoWidth = demuxer->GetWidth();
		//*videoHeight = demuxer->GetHeight();


		int w = 1920;
		int h = 1080;

		demuxer = new FFmpegDemuxer(filePath);
		w = demuxer->GetWidth();
		h = demuxer->GetHeight();
		//delete demuxer;
		//demuxer = NULL;

		texObj = make_unique<DX11TextureObj>();
		texObj->create(g_D3D11Device, w, h);
		//返回值
		texObj->getResourcePointers(texBGRA);
		*videoWidth = w;
		*videoHeight = h;

		
		hostBgra = new unsigned char[w*h * 4];
		

		//启动解码线程
		//std::thread t = std::thread([]() {});
		tRunning = true;
		t = thread(DecodeFrame);

		DEBUG_LOG("DecoderInit End");
		return 0;
	}

	int DecodeFrame()
	{
		if (g_D3D11Device == NULL)
		{
			return -999;
		}

		//
		/*ID3D11DeviceContext* ctx = NULL;
		g_D3D11Device->GetImmediateContext(&ctx);

		ID3D11RenderTargetView* rtView;
		ID3D11DepthStencilView* dsView;
		ctx->OMGetRenderTargets(1, &rtView, &dsView);*/

		//FFmpegDemuxer*demuxer = new FFmpegDemuxer(m_filePath);
		//DEBUG_LOG("Create FFmpegDemuxer");

		size_t frameByteSize = demuxer->GetWidth() * demuxer->GetHeight() * 4;

		DEBUG_LOG("DecodeFrame");

		int iGpu = 0;
		int iD3d = 11;

		//
		ck(cuInit(0));
		//
		int nGpu = 0;
		ck(cuDeviceGetCount(&nGpu));
		if (iGpu < 0 || iGpu >= nGpu)
		{
			DEBUG_ERROR("DecoderInit Error: GPU ordinal out of range. Should be within [0," + to_string(nGpu - 1) + "]");
			return -3;
		}

		//
		CUdevice cuDevice = 0;
		ck(cuDeviceGet(&cuDevice, iGpu));

		//
		char szDeviceName[80];
		ck(cuDeviceGetName(szDeviceName, sizeof(szDeviceName), cuDevice));
		DEBUG_LOG("GPU in use:" + ((string)szDeviceName) + ",cuDevice:" + to_string(cuDevice));

		//
		CUcontext cuContext;
		ck(cuCtxCreate(&cuContext, /*CU_CTX_SCHED_BLOCKING_SYNC*/0, cuDevice));

		DEBUG_LOG("cuCtxCreate");

		//device帧和host帧
		NvDecoder* dec = new NvDecoder(cuContext, demuxer->GetWidth(), demuxer->GetHeight(), true, FFmpeg2NvCodecId(demuxer->GetVideoCodec()), NULL, false);
		DEBUG_LOG("Create NvDecoder");

		//
		CUdeviceptr dpFrame = 0;
		ck(cuMemAlloc(&dpFrame, frameByteSize));
		DEBUG_LOG("dpFrame malloc");

		//
		int nVideoBytes = 0, nFrameReturned = 0;// , nFrame = 0;
		uint8_t *pVideo = NULL, **ppFrame;

		int n = 0;
		int64_t *pTimestamp;

		//
		while (tRunning)
		{
			bool demuxRet = demuxer->Demux(&pVideo, &nVideoBytes);
			if (!demuxRet)
			{
				DEBUG_LOG("Demux Failed");
				break;
			}

			//
			if (nVideoBytes > 0)
			{
				//dec.Decode(pVideo, nVideoBytes, &ppFrame, &nFrameReturned);
				dec->Decode(pVideo, nVideoBytes, &ppFrame, &nFrameReturned, CUVID_PKT_ENDOFPICTURE, &pTimestamp, n++);

				//DEBUG_LOG("nFrameReturned:" + to_string(nFrameReturned) + ",nVideoBytes:" + to_string(nVideoBytes));

				for (int i = 0; i < nFrameReturned; i++)
				{
					if (dec->GetBitDepth() == 8)
					{
						if (dec->GetOutputFormat() == cudaVideoSurfaceFormat_YUV444)
							YUV444ToColor32<BGRA32>((uint8_t *)ppFrame[i], dec->GetWidth(), (uint8_t *)dpFrame, 4 * dec->GetWidth(), dec->GetWidth(), dec->GetHeight());
						else    // default assumed as NV12
							Nv12ToColor32<BGRA32>((uint8_t *)ppFrame[i], dec->GetWidth(), (uint8_t *)dpFrame, 4 * dec->GetWidth(), dec->GetWidth(), dec->GetHeight());
					}
					else
					{
						if (dec->GetOutputFormat() == cudaVideoSurfaceFormat_YUV444_16Bit)
							YUV444P16ToColor32<BGRA32>((uint8_t *)ppFrame[i], 2 * dec->GetWidth(), (uint8_t *)dpFrame, 4 * dec->GetWidth(), dec->GetWidth(), dec->GetHeight());
						else // default assumed as P016
							P016ToColor32<BGRA32>((uint8_t *)ppFrame[i], 2 * dec->GetWidth(), (uint8_t *)dpFrame, 4 * dec->GetWidth(), dec->GetWidth(), dec->GetHeight());
					}

					m.lock();
					ck(cuMemcpyDtoH(hostBgra, dpFrame, frameByteSize));
					m.unlock();

					if (!canDisplay)
					{
						canDisplay = true;
					}
				}
			}
		}

		delete dec;
		dec = NULL;

		ck(cuMemFree(dpFrame));
		ck(cuCtxDestroy(cuContext));

		DEBUG_LOG("Decode Thread exit");

		return 0;
	}

	int Display()
	{
		if (g_D3D11Device == NULL)
		{
			return -999;
		}

		if (!tRunning)
		{
			DEBUG_ERROR("Decoder Thread is not Running !");
			return -1;
		}

		if (texObj == NULL)
		{
			return -2;
		}

		if (!canDisplay)
		{
			return 0;
		}

		m.lock();
		texObj->upload(hostBgra);
		m.unlock();

		return 0;
	}

	int Release()
	{
		if (g_D3D11Device == NULL)
		{
			return -999;
		}

		canDisplay = false;

		//解帧解码线程退出
		tRunning = false;
		if (t.joinable())
		{
			t.join();
		}

		//释放解帧对象
		if (demuxer != NULL)
		{
			delete demuxer;//没有用智能指针,所以调用delete来释放
			demuxer = NULL;
		}

		//释放绘制对象
		if (texObj != NULL)
		{
			texObj = NULL;
		}

		delete[] hostBgra;

		DEBUG_LOG("DecoderRelease");
		return 0;
	}
}

#pragma region ForUnity

#define _DLLExport __declspec (dllexport)

extern "C" _DLLExport int NativeTest() {
	DEBUG_LOG("NativeTest start");
	return 123456789;
}

extern "C" _DLLExport int NativeInit(char* filePath, int* videoWidth, int* videoHeight, void*& texBGRA) {
	DEBUG_LOG("NativeDecoderInit==============================================================================================");
	return ZCamE2_NVDEC_UnityPlugin::Init(filePath, videoWidth, videoHeight, texBGRA);
}

extern "C" _DLLExport int NativeDisplay()
{
	//DEBUG_LOG("NativeDisplay==============================================================================================");
	return ZCamE2_NVDEC_UnityPlugin::Display();
}

extern "C" _DLLExport int NativeRelease()
{
	DEBUG_LOG("NativeRelease==============================================================================================");
	return ZCamE2_NVDEC_UnityPlugin::Release();
}



extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API GetRenderEventFunc()
{
	return ZCamE2_NVDEC_UnityPlugin::OnRenderEvent;
}
#pragma endregion

I know it also can be replaced with copying cudeviceptr to id3d11texture2d directly.
But i do not know how to do it. And unity already have a id3d11device.

Totally two questions , expect your reply !