void DoProcess() { if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect)) { LogError("owner is not initialized. Add Action \"newRect\"."); return; } OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner); storeResult.Value = (float)wrapped_owner.area(); }
// // tracker // private void GetObjects(List <Rect> result) { result.Clear(); for (int i = 0; i < trackedObjects.Count; i++) { Rect r = CalcTrackedObjectPositionToShow(i); if (r.area() == 0) { continue; } result.Add(r); //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height); } }
/// <summary> /// Processes points by filter. /// </summary> /// <param name="img">Image mat.</param> /// <param name="srcPoints">Input points.</param> /// <param name="dstPoints">Output points.</param> /// <param name="drawDebugPoints">if true, draws debug points.</param> /// <returns>Output points.</returns> public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false) { if (srcPoints != null && srcPoints.Count != numberOfElements) { throw new ArgumentException("The number of elements is different."); } if (srcPoints == null) { return(dstPoints == null ? srcPoints : dstPoints); } if (!flag) { if (img.channels() == 4) { Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGBA2GRAY); } else if (img.channels() == 3) { Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGB2GRAY); } else { if (prevgray.total() == 0) { prevgray = img.clone(); } else { img.copyTo(prevgray); } } for (int i = 0; i < numberOfElements; i++) { prevTrackPts[i] = new Point(srcPoints[i].x, srcPoints[i].y); } flag = true; } if (srcPoints != null) { if (dstPoints == null) { dstPoints = new List <Vector2>(); } if (dstPoints != null && dstPoints.Count != numberOfElements) { dstPoints.Clear(); for (int i = 0; i < numberOfElements; i++) { dstPoints.Add(new Vector2()); } } if (img.channels() == 4) { Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY); } else if (img.channels() == 3) { Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY); } else { if (gray.total() == 0) { gray = img.clone(); } else { img.copyTo(gray); } } if (prevgray.total() > 0) { mOP2fPrevTrackPts.fromList(prevTrackPts); mOP2fNextTrackPts.fromList(nextTrackPts); Video.calcOpticalFlowPyrLK(prevgray, gray, mOP2fPrevTrackPts, mOP2fNextTrackPts, status, err); prevTrackPts = mOP2fPrevTrackPts.toList(); nextTrackPts = mOP2fNextTrackPts.toList(); // clac diffDlib prevTrackPtsMat.fromList(prevTrackPts); OpenCVForUnity.CoreModule.Rect rect = Imgproc.boundingRect(prevTrackPtsMat); double diffDlib = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity; // if the face is moving so fast, use dlib to detect the face double diff = calDistanceDiff(prevTrackPts, nextTrackPts); if (drawDebugPoints) { Debug.Log("variance:" + diff); } if (diff > diffDlib) { for (int i = 0; i < numberOfElements; i++) { nextTrackPts[i].x = srcPoints[i].x; nextTrackPts[i].y = srcPoints[i].y; dstPoints[i] = srcPoints[i]; } if (drawDebugPoints) { Debug.Log("DLIB"); for (int i = 0; i < numberOfElements; i++) { Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 2, new Scalar(255, 0, 0, 255), -1); } } } else { // In this case, use Optical Flow for (int i = 0; i < numberOfElements; i++) { dstPoints[i] = new Vector2((float)nextTrackPts[i].x, (float)nextTrackPts[i].y); } if (drawDebugPoints) { Debug.Log("Optical Flow"); for (int i = 0; i < numberOfElements; i++) { Imgproc.circle(img, nextTrackPts[i], 2, new Scalar(0, 0, 255, 255), -1); } } } } Swap(ref prevTrackPts, ref nextTrackPts); Swap(ref prevgray, ref gray); } return(dstPoints); }
public void UpdateTrackedObjects(List <Rect> detectedObjects) { if (detectedObjects == null) { throw new ArgumentNullException("detectedObjects"); } Rect[] correctionRects = CreateCorrectionBySpeedOfRects(); int N1 = (int)_trackedObjects.Count; int N2 = (int)detectedObjects.Count; for (int i = 0; i < N1; i++) { _trackedObjects[i].numDetectedFrames++; } int[] correspondence = Enumerable.Repeat <int>((int)TrackedRectState.NEW_RECTANGLE, N2).ToArray(); for (int i = 0; i < N1; i++) { TrackedObject curObject = _trackedObjects[i]; int bestIndex = -1; int bestArea = -1; //int numpositions = (int)curObject.lastPositions.Count; //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false"); //OpenCVRect prevRect = curObject.lastPositions[numpositions - 1]; Rect prevRect = correctionRects[i]; for (int j = 0; j < N2; j++) { if (correspondence[j] >= 0) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it has correspondence=" + correspondence[j]); continue; } if (correspondence[j] != (int)TrackedRectState.NEW_RECTANGLE) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle"); continue; } if (IsCollideByRectangle(prevRect, detectedObjects[j], _trackerParameters.coeffRectangleOverlap)) { Rect r = Intersect(prevRect, detectedObjects[j]); if ((r.width > 0) && (r.height > 0)) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect r={" + r.x + ", " + r.y + ", " + r.width + ", " + r.height + "]"); correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE; if (r.area() > bestArea) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: The area of intersection is " + r.area() + " it is better than bestArea= " + bestArea); bestIndex = j; bestArea = (int)r.area(); } } } } if (bestIndex >= 0) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=" + i + " is j=" + bestIndex); correspondence[bestIndex] = i; Rect bestRect = detectedObjects[bestIndex]; for (int j = 0; j < N2; j++) { if (correspondence[j] >= 0) { continue; } if (IsCollideByRectangle(detectedObjects[j], bestRect, _trackerParameters.coeffRectangleOverlap)) { Rect r = Intersect(detectedObjects[j], bestRect); if ((r.width > 0) && (r.height > 0)) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: Found intersection between rectangles j= " + j + " and bestIndex= " + bestIndex + " rectangle j= " + j + " is marked as intersected"); correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE; } } } } else { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i= " + i); curObject.numFramesNotDetected++; } } //Debug.Log("DetectionBasedTracker::updateTrackedObjects: start second cycle"); for (int j = 0; j < N2; j++) { int i = correspondence[j]; if (i >= 0) {//add position //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position"); _trackedObjects[i].lastPositions.Add(detectedObjects[j]); while ((int)_trackedObjects[i].lastPositions.Count > (int)_trackerParameters.numLastPositionsToTrack) { _trackedObjects[i].lastPositions.Remove(_trackedObjects[i].lastPositions[0]); } _trackedObjects[i].numFramesNotDetected = 0; if (_trackedObjects[i].state != TrackedState.DELETED) { _trackedObjects[i].state = TrackedState.DISPLAYED; } } else if (i == (int)TrackedRectState.NEW_RECTANGLE) { //new object //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object"); _trackedObjects.Add(new TrackedObject(detectedObjects[j])); } else { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection"); } } int t = 0; TrackedObject it; while (t < _trackedObjects.Count) { it = _trackedObjects[t]; if (it.state == TrackedState.DELETED) { _trackedObjects.Remove(it); } else if ((it.numFramesNotDetected > _trackerParameters.maxTrackLifetime)//ALL || ((it.numDetectedFrames <= _trackerParameters.numStepsToWaitBeforeFirstShow) && (it.numFramesNotDetected > _trackerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown))) { it.state = TrackedState.DELETED; t++; } else if (it.state >= TrackedState.DISPLAYED) {//DISPLAYED, NEW_DISPLAYED, HIDED if (it.numDetectedFrames < _trackerParameters.numStepsToWaitBeforeFirstShow) { it.state = TrackedState.PENDING; } else if (it.numDetectedFrames == _trackerParameters.numStepsToWaitBeforeFirstShow) { //i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow); it.state = TrackedState.NEW_DISPLAYED; } else if (it.numFramesNotDetected == _trackerParameters.numStepsToShowWithoutDetecting) { it.state = TrackedState.NEW_HIDED; } else if (it.numFramesNotDetected > _trackerParameters.numStepsToShowWithoutDetecting) { it.state = TrackedState.HIDED; } t++; } else {//NEW t++; } } }
private void UpdateTrackedObjects(List <Rect> detectedObjects) { int N1 = (int)trackedObjects.Count; int N2 = (int)detectedObjects.Count; for (int i = 0; i < N1; i++) { trackedObjects [i].numDetectedFrames++; } int[] correspondence = new int[N2]; for (int i = 0; i < N2; i++) { correspondence [i] = (int)TrackedState.NEW_RECTANGLE; } for (int i = 0; i < N1; i++) { TrackedObject curObject = trackedObjects [i]; int bestIndex = -1; int bestArea = -1; int numpositions = (int)curObject.lastPositions.Count; //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false"); Rect prevRect = curObject.lastPositions [numpositions - 1]; for (int j = 0; j < N2; j++) { if (correspondence [j] >= 0) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + i + " is rejected, because it has correspondence=" + correspondence[j]); continue; } if (correspondence [j] != (int)TrackedState.NEW_RECTANGLE) { //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle"); continue; } Rect r = Rect.intersect(prevRect, detectedObjects [j]); if (r != null && (r.width > 0) && (r.height > 0)) { //LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}", // r.x, r.y, r.width, r.height); correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE; if (r.area() > bestArea) { //LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea); bestIndex = j; bestArea = (int)r.area(); } } } if (bestIndex >= 0) { //LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex); correspondence [bestIndex] = i; for (int j = 0; j < N2; j++) { if (correspondence [j] >= 0) { continue; } Rect r = Rect.intersect(detectedObjects [j], detectedObjects [bestIndex]); if (r != null && (r.width > 0) && (r.height > 0)) { //LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between " // "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j); correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE; } } } else { //LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i); curObject.numFramesNotDetected++; } } //LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle"); for (int j = 0; j < N2; j++) { int i = correspondence [j]; if (i >= 0) //add position //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position"); { trackedObjects [i].lastPositions.Add(detectedObjects [j]); while ((int)trackedObjects [i].lastPositions.Count > (int)innerParameters.numLastPositionsToTrack) { trackedObjects [i].lastPositions.Remove(trackedObjects [i].lastPositions [0]); } trackedObjects [i].numFramesNotDetected = 0; } else if (i == (int)TrackedState.NEW_RECTANGLE) //new object //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object"); { trackedObjects.Add(new TrackedObject(detectedObjects [j])); } else { //Debug.Log ("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection"); } } int t = 0; TrackedObject it; while (t < trackedObjects.Count) { it = trackedObjects [t]; if ((it.numFramesNotDetected > parameters.maxTrackLifetime) || ((it.numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow) && (it.numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown))) { //int numpos = (int)it.lastPositions.Count; //if (numpos > 0) UnityEngine.Debug.LogError("numpos > 0 is false"); //Rect r = it.lastPositions [numpos - 1]; //Debug.Log("DetectionBasedTracker::updateTrackedObjects: deleted object " + r.x + " " + r.y + " " + r.width + " " + r.height); trackedObjects.Remove(it); } else { t++; } } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (!shouldDetectInMultiThread) { grayMat.copyTo(grayMat4Thread); shouldDetectInMultiThread = true; } OpenCVForUnity.CoreModule.Rect[] rects; if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect"); rectsWhereRegions = detectionResult.toArray(); rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 2); } } else { //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions"); rectsWhereRegions = new Rect[trackedObjects.Count]; for (int i = 0; i < trackedObjects.Count; i++) { int n = trackedObjects [i].lastPositions.Count; //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false"); Rect r = trackedObjects [i].lastPositions [n - 1].clone(); if (r.area() == 0) { Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty"); continue; } //correction by speed of rectangle if (n > 1) { Point center = CenterRect(r); Point center_prev = CenterRect(trackedObjects [i].lastPositions [n - 2]); Point shift = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction, (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction); r.x += (int)Math.Round(shift.x); r.y += (int)Math.Round(shift.y); } rectsWhereRegions [i] = r; } rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 2); } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } UpdateTrackedObjects(detectedObjectsInRegions); GetObjects(resultObjects); rects = resultObjects.ToArray(); for (int i = 0; i < rects.Length; i++) { //Debug.Log ("detect faces " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } #if UNITY_WEBGL Imgproc.putText(rgbaMat, "WebGL platform does not support multi-threading.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); #endif Utils.fastMatToTexture2D(rgbaMat, texture); } }
/// <summary> /// Processes points by filter. /// </summary> /// <param name="img">Image mat.</param> /// <param name="srcPoints">Input points.</param> /// <param name="dstPoints">Output points.</param> /// <param name="drawDebugPoints">if true, draws debug points.</param> /// <returns>Output points.</returns> public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false) { if (srcPoints != null && srcPoints.Count != numberOfElements) { throw new ArgumentException("The number of elements is different."); } if (srcPoints != null) { if (dstPoints == null) { dstPoints = new List <Vector2>(); } if (dstPoints != null && dstPoints.Count != numberOfElements) { dstPoints.Clear(); for (int i = 0; i < numberOfElements; i++) { dstPoints.Add(new Vector2()); } } for (int i = 0; i < numberOfElements; i++) { src_points[i].x = srcPoints[i].x; src_points[i].y = srcPoints[i].y; } // clac diffDlib prevTrackPtsMat.fromList(src_points); OpenCVForUnity.CoreModule.Rect rect = Imgproc.boundingRect(prevTrackPtsMat); double diffDlib = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity; // if the face is moving so fast, use dlib to detect the face double diff = calDistanceDiff(src_points, last_points); if (drawDebugPoints) { Debug.Log("variance:" + diff); } if (diff > diffDlib) { for (int i = 0; i < numberOfElements; i++) { dstPoints[i] = srcPoints[i]; } if (drawDebugPoints) { Debug.Log("DLIB"); for (int i = 0; i < numberOfElements; i++) { Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 2, new Scalar(255, 0, 0, 255), -1); } } flag = false; } else { if (!flag) { // Set initial state estimate. Mat statePreMat = KF.get_statePre(); float[] tmpStatePre = new float[statePreMat.total()]; for (int i = 0; i < numberOfElements; i++) { tmpStatePre[i * 2] = (float)srcPoints[i].x; tmpStatePre[i * 2 + 1] = (float)srcPoints[i].y; } statePreMat.put(0, 0, tmpStatePre); Mat statePostMat = KF.get_statePost(); float[] tmpStatePost = new float[statePostMat.total()]; for (int i = 0; i < numberOfElements; i++) { tmpStatePost[i * 2] = (float)srcPoints[i].x; tmpStatePost[i * 2 + 1] = (float)srcPoints[i].y; } statePostMat.put(0, 0, tmpStatePost); flag = true; } // Kalman Prediction KF.predict(); // Update Measurement float[] tmpMeasurement = new float[measurement.total()]; for (int i = 0; i < numberOfElements; i++) { tmpMeasurement[i * 2] = (float)srcPoints[i].x; tmpMeasurement[i * 2 + 1] = (float)srcPoints[i].y; } measurement.put(0, 0, tmpMeasurement); // Correct Measurement Mat estimated = KF.correct(measurement); float[] tmpEstimated = new float[estimated.total()]; estimated.get(0, 0, tmpEstimated); for (int i = 0; i < numberOfElements; i++) { predict_points[i].x = tmpEstimated[i * 2]; predict_points[i].y = tmpEstimated[i * 2 + 1]; } estimated.Dispose(); for (int i = 0; i < numberOfElements; i++) { dstPoints[i] = new Vector2((float)predict_points[i].x, (float)predict_points[i].y); } if (drawDebugPoints) { Debug.Log("Kalman Filter"); for (int i = 0; i < numberOfElements; i++) { Imgproc.circle(img, predict_points[i], 2, new Scalar(0, 255, 0, 255), -1); } } } for (int i = 0; i < numberOfElements; i++) { last_points[i].x = src_points[i].x; last_points[i].y = src_points[i].y; } return(dstPoints); } else { return(dstPoints == null ? srcPoints : dstPoints); } }