void Update() { // attempt to reinitialize if connection was lost last frame or is not present on start if (cameraState == CameraState.disconnected || lastError < 0) { bool initialized = Initialize(); if (!initialized) { return; } } // this is the main call to the dll that does the face detection processing on an OpenCV frame // it returns the data needed to build output in Unity cvFrame = GetFaceDetectionResults(); //Debug.LogFormat("from frame, x = {0}, y = {1}, bufferSize = {2}, memPtr = {3}, error = {4}", cvFrame.x, cvFrame.y, cvFrame.bufferSize, cvFrame.memPtr, cvFrame.error); // handle error cases returend by dll; i.e. disconnected usb device if (cvFrame.error < 0) { //Debug.LogFormat("RunFaceDetection returned error {0}", cvFrame.error); OpenCVInterop.Close(); cameraState = CameraState.disconnected; lastError = cvFrame.error; // so we know to reinitialize rather than run demo next frame return; } // create array, marshal data from pointer, output, then free the pointer byte[] bytes = new byte[cvFrame.bufferSize]; Marshal.Copy(cvFrame.memPtr, bytes, 0, cvFrame.bufferSize); OutputFrame(cvFrame, bytes); OpenCVInterop.FreeMemory(); }
public void OutputFrame(CvFrame cvf, byte[] bytes) { if (cvf.x != oldFrameSize.x || cvf.y != oldFrameSize.y) { //Debug.Log("Create new texture 2d"); tex = new Texture2D(cvf.x, cvf.y, TextureFormat.RGB24, false); // must be called on main thread oldFrameSize.x = cvf.x; oldFrameSize.y = cvf.y; } tex.LoadRawTextureData(bytes); tex.Apply(); rawImage.texture = tex; rawImage.SetNativeSize(); }