void trackFilteredObject(SpellBook book, Mat threshold) { book.x = -1; book.y = -1; book.isTracked = false; Debug.Log("tracking " + book.color.ToString()); Mat temp = new Mat(); threshold.copyTo(temp); Imgproc.Canny(temp, temp, 50, 100); //these two vectors needed for output of findContours List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); //find contours of filtered image using openCV findContours function Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); double max_area = MIN_OBJECT_AREA; // initialize //use moments method to find our filtered object if (hierarchy.rows() > 0) { int numObjects = contours.Count; //Debug.Log("numObj: " + numObjects.ToString()); //Debug.Log("hierarchy " + hierarchy.ToString()); for (int i = 0; i < numObjects; i++) { //Debug.Log("i = " + i.ToString()); Moments moment = Imgproc.moments(contours[i]); double area = moment.get_m00(); //we only want the object with the largest area so we save a reference area each //iteration and compare it to the area in the next iteration. if (area > max_area) { book.x = (int)(moment.get_m10() / area); book.y = (int)(moment.get_m01() / area); max_area = area; } } if (book.x != -1) { book.isTracked = true; } else { book.isTracked = false; } } }
private void trackFilteredObject(ObjectTracker ot, Mat threshold) { Mat temp = new Mat(); threshold.copyTo(temp); List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); if (hierarchy.rows() > 0) { for (int index = 0; index >= 0; index = (int)hierarchy.get(0, index)[0]) { Moments moment = Imgproc.moments(contours[index]); double area = moment.m00; if (area > 10 * 10) { int x = (int)(moment.get_m10() / area); int y = (int)(moment.get_m01() / area); Vector2 point = new Vector2(x, y); Vector3 dirRay = LocatableCameraUtils.PixelCoordToWorldCoord(_cameraToWorldMatrix, _projectionMatrix, _resolution, point); Application.InvokeOnAppThread(() => { ot.Sphere.transform.position = Camera.main.transform.position + new Vector3(0, ot.offset, 0); SphereCollider collider = ot.Sphere.GetComponent <SphereCollider>(); // We inverse the ray source and dir to make the sphere collider work Vector3 newPosRay = Camera.main.transform.position + dirRay * (collider.radius * 2); Ray ray = new Ray(newPosRay, -dirRay); RaycastHit hit; if (Physics.Raycast(ray, out hit, collider.radius * 3)) { Vector3 pos = hit.point; ot.gameObject.transform.position = pos; } }, false); } } } }
/// <summary> /// Tracks the filtered object. /// </summary> /// <param name="theColorObject">The color object.</param> /// <param name="threshold">Threshold.</param> /// <param name="HSV">HS.</param> /// <param name="cameraFeed">Camera feed.</param> void trackFilteredObject(ColorObject theColorObject, Mat threshold, Mat HSV, Mat cameraFeed) { List <ColorObject> colorObjects = new List <ColorObject> (); Mat temp = new Mat(); threshold.copyTo(temp); //these two vectors needed for output of findContours List <MatOfPoint> contours = new List <MatOfPoint> (); Mat hierarchy = new Mat(); //find contours of filtered image using openCV findContours function Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); //use moments method to find our filtered object bool colorObjectFound = false; if (hierarchy.rows() > 0) { int numObjects = hierarchy.rows(); // Debug.Log("hierarchy " + hierarchy.ToString()); //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter if (numObjects < MAX_NUM_OBJECTS) { for (int index = 0; index >= 0; index = (int)hierarchy.get(0, index)[0]) { Moments moment = Imgproc.moments(contours [index]); double area = moment.get_m00(); //if the area is less than 20 px by 20px then it is probably just noise //if the area is the same as the 3/2 of the image size, probably just a bad filter //we only want the object with the largest area so we safe a reference area each //iteration and compare it to the area in the next iteration. if (area > MIN_OBJECT_AREA) { ColorObject colorObject = new ColorObject(); colorObject.setXPos((int)(moment.get_m10() / area)); colorObject.setYPos((int)(moment.get_m01() / area)); colorObject.setType(theColorObject.getType()); colorObject.setColor(theColorObject.getColor()); colorObjects.Add(colorObject); colorObjectFound = true; } else { colorObjectFound = false; } } //let user know you found an object if (colorObjectFound == true) { //draw object location on screen drawObject(colorObjects, cameraFeed, temp, contours, hierarchy); } } else { Core.putText(cameraFeed, "TOO MUCH NOISE!", new Point(5, cameraFeed.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); } } }
// Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbMat, colors); if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbMat, rgbMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbMat, rgbMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbMat, rgbMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbMat, rgbMat, 1); } } else if (webCamTexture.videoRotationAngle == 180 || webCamTexture.videoRotationAngle == 270) { Core.flip(rgbMat, rgbMat, -1); } if (!isTracking) { trackingThread = new Thread(trackingFunc); trackingThread.Start(); } Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); morphOps(thresholdMat); drawObjectsInMemory(thresholdMat, hsvMat, rgbMat); Utils.matToTexture2D(rgbMat, texture, colors); } } void OnDisable() { webCamTexture.Stop(); } Size erodeSizeParam = new Size(3, 3); Size dilateSizeParam = new Size(20, 20); /// <summary> /// Morphs the ops. /// </summary> /// <param name="thresh">Thresh.</param> void morphOps(Mat thresh) { //create structuring element that will be used to "dilate" and "erode" image. //the element chosen here is a 3px by 3px rectangle Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, erodeSizeParam); //dilate with larger element so make sure object is nicely visible Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, dilateSizeParam); Imgproc.erode(thresh, thresh, erodeElement); //Imgproc.erode(thresh, thresh, erodeElement); Imgproc.dilate(thresh, thresh, dilateElement); //Imgproc.dilate(thresh, thresh, dilateElement); } /// <summary> /// Tracks the filtered object. /// </summary> /// <param name="theColorObject">The color object.</param> /// <param name="threshold">Threshold.</param> /// <param name="HSV">HS.</param> /// <param name="cameraFeed">Camera feed.</param> ColorObject trackFilteredObject(ColorObject theColorObject, Mat threshold, Mat HSV, Mat cameraFeed) { ColorObject tmpForMemory = new ColorObject(); tmpForMemory.setType("null"); Mat temp = new Mat(); threshold.copyTo(temp); //these two vectors needed for output of findContours List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); //find contours of filtered image using openCV findContours function Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); //use moments method to find our filtered object if (hierarchy.rows() > 0) { int numObjects = hierarchy.rows(); //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter if (numObjects < MAX_NUM_OBJECTS) { double areaMemory = 0; for (int index = 0; index >= 0; index = (int)hierarchy.get(0, index)[0]) { Moments moment = Imgproc.moments(contours[index]); double area = moment.get_m00(); //if the area is less than 20 px by 20px then it is probably just noise //if the area is the same as the 3/2 of the image size, probably just a bad filter //we only want the object with the largest area so we safe a reference area each //iteration and compare it to the area in the next iteration. if (area > MIN_OBJECT_AREA) { ColorObject colorObject = new ColorObject(); colorObject.setXPos((int)(moment.get_m10() / area)); colorObject.setYPos((int)(moment.get_m01() / area)); colorObject.setType(theColorObject.getType()); colorObject.setColor(theColorObject.getColor()); if (area > areaMemory) { areaMemory = area; tmpForMemory = colorObject; } } } } else { Core.putText(cameraFeed, "TOO MUCH NOISE!", new Point(5, cameraFeed.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); } } return(tmpForMemory); }