/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)), (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2))); processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows())); quad_renderer = gameObject.GetComponent <Renderer> () as Renderer; quad_renderer.sharedMaterial.SetTexture("_MainTex", texture); quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y)); //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good. //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity Matrix4x4 projectionMatrix = Matrix4x4.identity; projectionMatrix.m00 = 2.31029f; projectionMatrix.m01 = 0.00000f; projectionMatrix.m02 = 0.09614f; projectionMatrix.m03 = 0.00000f; projectionMatrix.m10 = 0.00000f; projectionMatrix.m11 = 4.10427f; projectionMatrix.m12 = -0.06231f; projectionMatrix.m13 = 0.00000f; projectionMatrix.m20 = 0.00000f; projectionMatrix.m21 = 0.00000f; projectionMatrix.m22 = -1.00000f; projectionMatrix.m23 = 0.00000f; projectionMatrix.m30 = 0.00000f; projectionMatrix.m31 = 0.00000f; projectionMatrix.m32 = -1.00000f; projectionMatrix.m33 = 0.00000f; quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); quad_renderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f); #if !UNITY_WEBGL || UNITY_EDITOR grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1); regionCascade = new CascadeClassifier(); regionCascade.load(Utils.getFilePath("lbpcascade_frontalface.xml")); if (regionCascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } initThread(); #endif }
public TrackedObject(OpenCVForUnity.Rect rect) { lastPositions = new PositionsVector(); numDetectedFrames = 1; numFramesNotDetected = 0; lastPositions.Add(rect); _id = getNextId(); id = _id; }
// Update is called once per frame void Update() { lock (sync) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = imageOptimizationHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); StartThread(ThreadWorker); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } rectangleTracker.GetObjects(resultObjects, true); rects = rectangleTracker.CreateCorrectionBySpeedOfRects(); if (rects.Length > 0) { OpenCVForUnity.Rect rect = rects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(rgbaMat, points, points, false); } UpdateARHeadTransform(points, arCamera.cameraToWorldMatrix); if (displayCameraPreview) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); } } } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //Debug.Log("process: get rectsWhereRegions were got from detectionResult"); rectsWhereRegions = detectionResult.toArray(); } else { //Debug.Log("process: get rectsWhereRegions from previous positions"); rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); if (resultObjects.Count > 0) { OpenCVForUnity.Rect rect = resultObjects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(rgbaMat, points, points, false); } UpdateARHeadTransform(points, arCamera.cameraToWorldMatrix); if (displayCameraPreview) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); } } } if (displayCameraPreview) { OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture); } } }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { Mat downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat); Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); System.Threading.Tasks.Task.Run(() => { isThreadRunning = true; DetectObject(); isThreadRunning = false; OnDetectionDone(); }); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); Mat bgraMat4preview = null; if (displayCameraPreview) { bgraMat4preview = new Mat(); downScaleFrameMat.copyTo(bgraMat4preview); } List <Vector2> points = null; Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } } lock (rectangleTracker) { rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); if (rects.Length > 0) { OpenCVForUnity.Rect rect = rects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(bgraMat, points, points, false); } if (displayCameraPreview && bgraMat4preview != null) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2); } } } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions were got from detectionResult"); //}, true); lock (rectangleTracker) { rectsWhereRegions = detectionResult.toArray(); } } else { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions from previous positions"); //}, true); lock (rectangleTracker) { rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); } if (resultObjects.Count > 0) { OpenCVForUnity.Rect rect = resultObjects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(bgraMat, points, points, false); } if (displayCameraPreview && bgraMat4preview != null) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2); } } } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } if (displayCameraPreview && bgraMat4preview != null) { OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat4preview, texture); } if (points != null) { UpdateARHeadTransform(points, cameraToWorldMatrix); } bgraMat.Dispose(); if (bgraMat4preview != null) { bgraMat4preview.Dispose(); } }, false); }
// Update is called once per frame void Update() { lock (sync) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enable && !detecting) { detecting = true; grayMat.copyTo(grayMat4Thread); StartThread(ThreadWorker); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); Rect[] rects; if (!isUsingSeparateDetection) { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } rectangleTracker.GetObjects(resultObjects, true); rects = rectangleTracker.CreateCorrectionBySpeedOfRects(); if (rects.Length > 0) { OpenCVForUnity.Rect rect = rects [0]; // Adjust to Dilb's result. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); UpdateARHeadTransform(points); } } else { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; //Debug.Log("process: get rectsWhereRegions were got from detectionResult"); rectsWhereRegions = detectionResult.toArray(); } else { //Debug.Log("process: get rectsWhereRegions from previous positions"); rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); if (resultObjects.Count > 0) { OpenCVForUnity.Rect rect = resultObjects [0]; // Adjust to Dilb's result. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); UpdateARHeadTransform(points); } } } }