// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net.empty() || classes == null) { Imgproc.putText(rgbaMat, "model file or class names list file is not loaded.", new Point(5, rgbaMat.rows() - 50), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "The model and class names list can be downloaded here:", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { blob = Dnn.blobFromImage(rgbaMat, 1, new Size(224, 224), new Scalar(104, 117, 123), false); net.setInput(blob); Mat prob = net.forward(); Core.MinMaxLocResult minmax = Core.minMaxLoc(prob.reshape(1, 1)); // Debug.Log ("Best match " + (int)minmax.maxLoc.x); // Debug.Log ("Best match class " + classes [(int)minmax.maxLoc.x]); // Debug.Log ("Probability: " + minmax.maxVal * 100 + "%"); prob.Dispose(); Imgproc.putText(rgbaMat, "Best match class " + classes [(int)minmax.maxLoc.x], new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); //first find blue objects Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Core.inRange(hsvMat, blue.getHSVmin(), blue.getHSVmax(), thresholdMat); morphOps(thresholdMat); trackFilteredObject(blue, thresholdMat, hsvMat, rgbMat); //then yellows Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Core.inRange(hsvMat, yellow.getHSVmin(), yellow.getHSVmax(), thresholdMat); morphOps(thresholdMat); trackFilteredObject(yellow, thresholdMat, hsvMat, rgbMat); //then reds Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Core.inRange(hsvMat, red.getHSVmin(), red.getHSVmax(), thresholdMat); morphOps(thresholdMat); trackFilteredObject(red, thresholdMat, hsvMat, rgbMat); //then greens Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV); Core.inRange(hsvMat, green.getHSVmin(), green.getHSVmax(), thresholdMat); morphOps(thresholdMat); trackFilteredObject(green, thresholdMat, hsvMat, rgbMat); Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); using (Mat circles = new Mat()) { Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, 2, 10, 160, 50, 10, 40); Point pt = new Point(); for (int i = 0; i < circles.cols(); i++) { double[] data = circles.get(0, i); pt.x = data [0]; pt.y = data [1]; double rho = data [2]; Imgproc.circle(rgbaMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5); } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (isImagesInputMode) { return; } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (shouldCaptureFrame) { shouldCaptureFrame = false; Mat frameMat = grayMat.clone(); double e = CaptureFrame(frameMat); if (e > 0) { repErr = e; } } DrawFrame(grayMat, bgrMat); Imgproc.cvtColor(bgrMat, rgbaMat, Imgproc.COLOR_BGR2RGBA); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net == null) { Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); blob = Dnn.blobFromImage(bgrMat, inScaleFactor, new Size(inWidth, inHeight), new Scalar(meanVal, meanVal, meanVal), false, false); net.setInput(blob); Mat prob = net.forward(); prob = prob.reshape(1, (int)prob.total() / 7); float[] data = new float[7]; float confidenceThreshold = 0.2f; for (int i = 0; i < prob.rows(); i++) { prob.get(i, 0, data); float confidence = data [2]; if (confidence > confidenceThreshold) { int class_id = (int)(data [1]); float left = data [3] * rgbaMat.cols(); float top = data [4] * rgbaMat.rows(); float right = data [5] * rgbaMat.cols(); float bottom = data [6] * rgbaMat.rows(); Imgproc.rectangle(rgbaMat, new Point(left, top), new Point(right, bottom), new Scalar(0, 255, 0, 255), 2); string label = classNames [class_id] + ": " + confidence; int[] baseLine = new int[1]; Size labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine); Imgproc.rectangle(rgbaMat, new Point(left, top), new Point(left + labelSize.width, top + labelSize.height + baseLine [0]), new Scalar(255, 255, 255, 255), Core.FILLED); Imgproc.putText(rgbaMat, label, new Point(left, top + labelSize.height), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255)); } } prob.Dispose(); } Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { rgbaMat.copyTo(bgMat); setBgTexture(bgMat); } //set fgMaskMat findFgMaskMat(rgbaMat, bgMat, thresh); //set bgMaskMat Core.bitwise_not(fgMaskMat, bgMaskMat); //copy greenMat using bgMaskMat greenMat.copyTo(rgbaMat, bgMaskMat); Imgproc.putText(rgbaMat, "SPACE KEY or TOUCH SCREEN: Reset backgroud img", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net == null) { Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); // Create a 4D blob from a frame. Size inpSize = new Size(inpWidth > 0 ? inpWidth : bgrMat.cols(), inpHeight > 0 ? inpHeight : bgrMat.rows()); Mat blob = Dnn.blobFromImage(bgrMat, scale, inpSize, mean, swapRB, false); // Run a model. net.setInput(blob); if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1) // Faster-RCNN or R-FCN { Imgproc.resize(bgrMat, bgrMat, inpSize); Mat imInfo = new Mat(1, 3, CvType.CV_32FC1); imInfo.put(0, 0, new float[] { (float)inpSize.height, (float)inpSize.width, 1.6f }); net.setInput(imInfo, "im_info"); } TickMeter tm = new TickMeter(); tm.start(); List <Mat> outs = new List <Mat> (); net.forward(outs, outBlobNames); tm.stop(); // Debug.Log ("Inference time, ms: " + tm.getTimeMilli ()); postprocess(rgbaMat, outs, net); for (int i = 0; i < outs.Count; i++) { outs [i].Dispose(); } blob.Dispose(); } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net.empty() || classes == null) { Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); blob = Dnn.blobFromImage(bgrMat, 1, new Size(224, 224), new Scalar(104, 117, 123), false, true); net.setInput(blob); Mat prob = net.forward(); Core.MinMaxLocResult minmax = Core.minMaxLoc(prob.reshape(1, 1)); // Debug.Log ("Best match " + (int)minmax.maxLoc.x); // Debug.Log ("Best match class " + classes [(int)minmax.maxLoc.x]); // Debug.Log ("Probability: " + minmax.maxVal * 100 + "%"); prob.Dispose(); Imgproc.putText(rgbaMat, "Best match class " + classes [(int)minmax.maxLoc.x], new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { // Debug.Log ("detect faces " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); bool result = detector.detect(grayMat, points); if (result) { string decode_info = detector.decode(grayMat, points); // Debug.Log (decode_info); // Debug.Log (points.dump ()); // draw QRCode contour. float[] points_arr = new float[8]; points.get(0, 0, points_arr); Imgproc.line(rgbaMat, new Point(points_arr [0], points_arr [1]), new Point(points_arr [2], points_arr [3]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [2], points_arr [3]), new Point(points_arr [4], points_arr [5]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [4], points_arr [5]), new Point(points_arr [6], points_arr [7]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [6], points_arr [7]), new Point(points_arr [0], points_arr [1]), new Scalar(255, 0, 0, 255), 2); Imgproc.putText(rgbaMat, "DECODE INFO: " + decode_info, new Point(5, grayMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); // Utils.webCamTextureToMat (webCamTexture, grayMat, colors); bgMat.copyTo(dstMat); Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0); grayMat.get(0, 0, grayPixels); for (int i = 0; i < grayPixels.Length; i++) { maskPixels [i] = 0; if (grayPixels [i] < 70) { grayPixels [i] = 0; maskPixels [i] = 1; } else if (70 <= grayPixels [i] && grayPixels [i] < 120) { grayPixels [i] = 100; } else { grayPixels [i] = 255; maskPixels [i] = 1; } } grayMat.put(0, 0, grayPixels); maskMat.put(0, 0, maskPixels); grayMat.copyTo(dstMat, maskMat); Imgproc.Canny(lineMat, lineMat, 20, 120); lineMat.copyTo(maskMat); Core.bitwise_not(lineMat, lineMat); lineMat.copyTo(dstMat, maskMat); // Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false); // Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA); // Utils.matToTexture2D (rgbaMat, texture); Utils.matToTexture2D(dstMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); copyMat = rgbaMat.clone(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); if (rects.Length == 0) { noFaceFrameCount++; if (rectsLast != null && noFaceFrameCount <= maxNegativeFrames && rectsLast.Length > 0) { blurBackground(rectsLast, rgbaMat); } else { //clear last rect fields rowRangeTopLast = null; rowRangeButtomLast = null; colRangeleftLast = null; colRangeRightLast = null; } } if (rects != null && rects.Length > 0) { blurBackground(rects, rgbaMat); noFaceFrameCount = 0; rectsLast = null; rectsLast = new OpenCVForUnity.Rect[rects.Length]; for (int i = 0; i < rects.Length; i++) { rectsLast[i] = rects[i]; } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); // detect faces cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); if (faces.total() > 0) { // fit landmarks for each found face List <MatOfPoint2f> landmarks = new List <MatOfPoint2f>(); facemark.fit(grayMat, faces, landmarks); Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { //Debug.Log ("detect faces " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } // draw them for (int i = 0; i < landmarks.Count; i++) { MatOfPoint2f lm = landmarks[i]; float[] lm_float = new float[lm.total() * lm.channels()]; MatUtils.copyFromMat <float>(lm, lm_float); DrawFaceLandmark(rgbaMat, ConvertArrayToPointList(lm_float), new Scalar(0, 255, 0, 255), 2); //for (int j = 0; j < lm_float.Length; j = j + 2) //{ // Point p = new Point(lm_float[j], lm_float[j + 1]); // Imgproc.circle(rgbaMat, p, 2, new Scalar(255, 0, 0, 255), 1); //} } } // rgbaMat.convertTo(rgbaMat, CvType.CV_8UC3); // Debug.Log(rgbaMat.type()); // OpenCVForUnity.XphotoModule.Xphoto.applyChannelGains(rgbaMat, effectsMat, 1.0f,220.0f, 1.0f); // OpenCVForUnity.XphotoModule.Xphoto.oilPainting(effects, effects, 10, 10); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)) { storedTouchPoint = new Point(t.position.x, t.position.y); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); } } #else //Mouse if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject()) { storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y); //Debug.Log ("mouse X " + Input.mousePosition.x); //Debug.Log ("mouse Y " + Input.mousePosition.y); } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); mRgba = new Mat(); if (storedTouchPoint != null) { ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows()); OnTouch(rgbaMat, storedTouchPoint); storedTouchPoint = null; } FaceLM(rgbaMat); HandPoseEstimationProcess(rgbaMat); Utils.fastMatToTexture2D(rgbaMat, texture); // Imgproc.putText (rgbaMat, "Please touch the area of the open hand.", new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat); Core.bitwise_not(fgmaskMat, fgmaskMat); rgbaMat.setTo(new Scalar(0, 0, 0, 0), fgmaskMat); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGB2HSV); Scalar lower_red = new Scalar(145, 42, 154); Scalar lower_blue = new Scalar(90, 50, 50); Scalar upper_red = new Scalar(255, 255, 255); Scalar upper_blue = new Scalar(130, 255, 255); Core.inRange(grayMat, lower_red, upper_red, redframe_threshold); Core.inRange(grayMat, lower_blue, upper_blue, blueframe_threshold); Core.bitwise_or(redframe_threshold, blueframe_threshold, frame_threshold); Size size = new Size(5, 5); Imgproc.erode(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); Imgproc.dilate(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); Imgproc.erode(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); Imgproc.dilate(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); Imgproc.erode(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); Imgproc.dilate(frame_threshold, frame_threshold, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size)); using (Mat circles = new Mat()) { Imgproc.HoughCircles(frame_threshold, circles, Imgproc.CV_HOUGH_GRADIENT, 1, frame_threshold.rows() / 2, 20, 15, 15, 100); Point pt = new Point(); for (int i = 0; i < circles.cols(); i++) { double[] data = circles.get(0, i); pt.x = data [0]; pt.y = data [1]; double rho = data [2]; // Imgproc.circle (rgbaMat, pt, 3, new Scalar (255, 0, 255), 5); Imgproc.circle(rgbaMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5); } } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)) { storedTouchPoint = new Point(t.position.x, t.position.y); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); } } #else //Mouse if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject()) { storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y); //Debug.Log ("mouse X " + Input.mousePosition.x); //Debug.Log ("mouse Y " + Input.mousePosition.y); //Une fois la main selectionner, l'affichage du webcam sort du champ de vision de la camera transform.position = new Vector3(0, 0, -1); } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (storedTouchPoint != null) { ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows()); OnTouch(rgbaMat, storedTouchPoint); storedTouchPoint = null; } HandPoseEstimationProcess(rgbaMat); // Imgproc.putText (rgbaMat, "Please touch the area of the open hand.", new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net.empty()) { Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); Scalar mean = new Scalar(103.939, 116.779, 123.68); Mat blob = Dnn.blobFromImage(bgrMat, 1.0, new Size(bgrMat.width(), bgrMat.height()), mean, false, false); net.setInput(blob); Mat prob = net.forward(); int[] newshape = new int[] { prob.size(2), prob.size(3) }; using (Mat B_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(0, 1), Range.all(), Range.all() }).reshape(1, newshape)) using (Mat G_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(1, 2), Range.all(), Range.all() }).reshape(1, newshape)) using (Mat R_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(2, 3), Range.all(), Range.all() }).reshape(1, newshape)) { Core.merge(new List <Mat>() { B_channel, G_channel, R_channel }, outMat); } Core.add(outMat, mean, outMat); outMat.convertTo(bgrMat, CvType.CV_8U); Imgproc.cvtColor(bgrMat, rgbaMat, Imgproc.COLOR_BGR2RGBA); prob.Dispose(); blob.Dispose(); } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); bool result = detector.detect(grayMat, points); if (result) { // Debug.Log (points.dump ()); float[] points_arr = new float[8]; points.get(0, 0, points_arr); bool decode = true; // Whether all points are in the image area or not. for (int i = 0; i < 8; i = i + 2) { if (!imageSizeRect.contains(new Point(points_arr [i], points_arr [i + 1]))) { decode = false; // Debug.Log ("The point exists out of the image area."); break; } } // draw QRCode contour. Imgproc.line(rgbaMat, new Point(points_arr [0], points_arr [1]), new Point(points_arr [2], points_arr [3]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [2], points_arr [3]), new Point(points_arr [4], points_arr [5]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [4], points_arr [5]), new Point(points_arr [6], points_arr [7]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [6], points_arr [7]), new Point(points_arr [0], points_arr [1]), new Scalar(255, 0, 0, 255), 2); if (decode) { string decode_info = detector.decode(grayMat, points); // Debug.Log (decode_info); Imgproc.putText(rgbaMat, "DECODE INFO: " + decode_info, new Point(5, grayMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { bool isTouched = false; #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) if (Input.touchCount == 1) { Touch t = Input.GetTouch(0); if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)) { isTouched = true; } } #else if (Input.GetKeyUp(KeyCode.Space)) { isTouched = true; } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (isTouched) { rgbaMat.copyTo(bgMat); setBgTexture(bgMat); } //set fgMaskMat findFgMaskMat(rgbaMat, bgMat, thresh); //set bgMaskMat Core.bitwise_not(fgMaskMat, bgMaskMat); //copy greenMat using bgMaskMat greenMat.copyTo(rgbaMat, bgMaskMat); //Imgproc.putText (rgbaMat, "SPACE KEY or TOUCH SCREEN: Reset backgroud image.", new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)) { storedTouchPoint = new Point(t.position.x, t.position.y); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); } } #else //Mouse if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject()) { storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y); //Debug.Log ("mouse X " + Input.mousePosition.x); //Debug.Log ("mouse Y " + Input.mousePosition.y); } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (storedTouchPoint != null) { onTouch(rgbaMat, convertScreenPoint(storedTouchPoint, gameObject, Camera.main)); storedTouchPoint = null; } handPoseEstimationProcess(rgbaMat); Imgproc.putText(rgbaMat, "Please touch the area of the open hand.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); //Here starts the OpenCV script /* * // ENTER YOUR OPENCV CODE HERES */ // Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); // change the color space to YUV. Imgproc.cvtColor(rgbaMat, yuvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(yuvMat, yuvMat, Imgproc.COLOR_RGB2YUV); // grap only the Y component. Core.extractChannel(yuvMat, yMat, 0); // blur the image to reduce high frequency noises. Imgproc.GaussianBlur(yMat, yMat, new Size(3, 3), 0); // find edges in the image. Imgproc.Canny(yMat, yMat, 50, 200, 3); // find contours. List <MatOfPoint> contours = new List <MatOfPoint>(); Find4PointContours(yMat, contours); // pick the contour of the largest area and rearrange the points in a consistent order. MatOfPoint maxAreaContour = GetMaxAreaContour(contours); maxAreaContour = OrderCornerPoints(maxAreaContour); bool found = (maxAreaContour.size().area() > 0); if (found) { // trasform the prospective of original image. using (Mat transformedMat = PerspectiveTransform(rgbaMat, maxAreaContour)) { outputDisplayAreaMat.setTo(new Scalar(0, 0, 0, 255)); if (transformedMat.width() <= outputDisplayAreaMat.width() && transformedMat.height() <= outputDisplayAreaMat.height() && transformedMat.total() >= outputDisplayAreaMat.total() / 16) { int x = outputDisplayAreaMat.width() / 2 - transformedMat.width() / 2; int y = outputDisplayAreaMat.height() / 2 - transformedMat.height() / 2; using (Mat dstAreaMat = new Mat(outputDisplayAreaMat, new OpenCVForUnity.CoreModule.Rect(x, y, transformedMat.width(), transformedMat.height()))) { transformedMat.copyTo(dstAreaMat); } } } } if (isDebugMode) { // draw edge image. Imgproc.cvtColor(yMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA); // draw all found conours. Imgproc.drawContours(rgbaMat, contours, -1, DEBUG_CONTOUR_COLOR, 1); } if (found) { // draw max area contour. Imgproc.drawContours(rgbaMat, new List <MatOfPoint> { maxAreaContour }, -1, CONTOUR_COLOR, 2); if (isDebugMode) { // draw corner numbers. for (int i = 0; i < maxAreaContour.toArray().Length; i++) { var pt = maxAreaContour.get(i, 0); Imgproc.putText(rgbaMat, i.ToString(), new Point(pt[0], pt[1]), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, DEBUG_CORNER_NUMBER_COLOR, 1, Imgproc.LINE_AA, false); } } } rgbaMat.copyTo(inputDisplayAreaMat); Utils.fastMatToTexture2D(displayMat, texture, true, 0, true); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB); // detect markers. Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected); // estimate pose. if (applyEstimationPose && ids.total() > 0) { Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs); } if (ids.total() > 0) { Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0)); if (applyEstimationPose) { for (int i = 0; i < ids.total(); i++) { Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f); // This example can display ARObject on only first detected marker. if (i == 0) { // position double[] tvec = tvecs.get(i, 0); // rotation double[] rv = rvecs.get(i, 0); Mat rvec = new Mat(3, 1, CvType.CV_64FC1); rvec.put(0, 0, rv [0]); rvec.put(1, 0, rv [1]); rvec.put(2, 0, rv [2]); Calib3d.Rodrigues(rvec, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); // right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); } } } } } if (showRejected && rejected.Count > 0) { Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255)); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (net == null) { Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } else { Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); //! [Resizing without keeping aspect ratio] Imgproc.resize(bgrMat, resized, new Size(network_width, network_height)); //! [Resizing without keeping aspect ratio] //! [Prepare blob] inputBlob = Dnn.blobFromImage(resized, 1 / 255.0, new Size(), new Scalar(0), true, true); //Convert Mat to batch of images //! [Prepare blob] //! [Set input blob] net.setInput(inputBlob, "data"); //set the network input //! [Set input blob] // TickMeter tm = new TickMeter (); // tm.start (); //! [Make forward pass] Mat detectionMat = net.forward("detection_out"); //compute output //! [Make forward pass] // tm.stop (); // Debug.Log ("Inference time, ms: " + tm.getTimeMilli ()); // Debug.Log ("detectionMat.ToString(): " + detectionMat.ToString ()); float[] position = new float[5]; float[] confidences = new float[80]; float confidenceThreshold = 0.24f; for (int i = 0; i < detectionMat.rows(); i++) { detectionMat.get(i, 0, position); detectionMat.get(i, 5, confidences); int maxIdx = confidences.Select((val, idx) => new { V = val, I = idx }).Aggregate((max, working) => (max.V > working.V) ? max : working).I; float confidence = confidences [maxIdx]; if (confidence > confidenceThreshold) { float x = position [0]; float y = position [1]; float width = position [2]; float height = position [3]; int xLeftBottom = (int)((x - width / 2) * rgbaMat.cols()); int yLeftBottom = (int)((y - height / 2) * rgbaMat.rows()); int xRightTop = (int)((x + width / 2) * rgbaMat.cols()); int yRightTop = (int)((y + height / 2) * rgbaMat.rows()); // Debug.Log ("confidence: " + confidence); // // Debug.Log (" " + xLeftBottom // + " " + yLeftBottom // + " " + xRightTop // + " " + yRightTop); Imgproc.rectangle(rgbaMat, new Point(xLeftBottom, yLeftBottom), new Point(xRightTop, yRightTop), new Scalar(0, 255, 0, 255), 2); if (maxIdx < classNames.Count) { string label = classNames [maxIdx] + ": " + confidence; int[] baseLine = new int[1]; Size labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine); Imgproc.rectangle(rgbaMat, new Point(xLeftBottom, yLeftBottom), new Point(xLeftBottom + labelSize.width, yLeftBottom + labelSize.height + baseLine [0]), new Scalar(255, 255, 255, 255), Core.FILLED); Imgproc.putText(rgbaMat, label, new Point(xLeftBottom, yLeftBottom + labelSize.height), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255)); } } } detectionMat.Dispose(); } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (mMOBStatus.rows() > 0) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; for (x = 0; x < y; x++) { if (byteStatus [x] == 1) { Point pt = cornersThis [x]; Point pt2 = cornersPrev [x]; Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1); Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness); } } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
//public RawImage document; void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat mainMat = webCamTextureToMatHelper.GetMat(); if (!selectTarget) //find paper by contours { grayMat = new Mat(); // convert texture to matrix mainMat.copyTo(grayMat); mainMat = findPaper(mainMat); // display matrix on the screen Utils.fastMatToTexture2D(mainMat, texture); } else { // using optical flow // set the currentGrayMat mat currentGrayMat = new Mat(mainMat.rows(), mainMat.cols(), Imgproc.COLOR_RGB2GRAY); Imgproc.cvtColor(mainMat, currentGrayMat, Imgproc.COLOR_RGBA2GRAY); if (initOpticalFlow == true) // doing the init setting for optical flow { // create 40 points Point[] points = new Point[40]; // set those points near the corner // paperCornerMatOfPoint is the corner of the paper for (int i = 0; i < 4; i++) { points[i * 10] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y); points[i * 10 + 1] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y); points[i * 10 + 2] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y + 1); points[i * 10 + 3] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y + 1); points[i * 10 + 4] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 1); points[i * 10 + 5] = new Point(paperCornerMatOfPoint.toList()[i].x - 1, paperCornerMatOfPoint.toList()[i].y); points[i * 10 + 6] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 1); points[i * 10 + 7] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 2); points[i * 10 + 8] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 2); points[i * 10 + 9] = new Point(paperCornerMatOfPoint.toList()[i].x + 2, paperCornerMatOfPoint.toList()[i].y + 2); } // make the points closer to the corners (Harris Corner Detection ) //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40, qualityLevel, minDistance, none, blockSize, false, 0.04); //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40,0.05,20); corners.fromArray(points); prevFeatures.fromList(corners.toList()); currentFeatures.fromList(corners.toList()); prevGrayMat = currentGrayMat.clone(); // won't go back t again initOpticalFlow = false; // not that useful lol // create random color // not working now for (int i = 0; i < maxCorners; i++) { color.Add(new Scalar((int)(Random.value * 255), (int)(Random.value * 255), (int)(Random.value * 255), 255)); } } else { // Don't want ball move //currentFeatures.fromArray(prevFeatures.toArray()); // want ball move prevFeatures.fromArray(currentFeatures.toArray()); // optical flow it will changes the valu of currentFeatures Video.calcOpticalFlowPyrLK(prevGrayMat, currentGrayMat, prevFeatures, currentFeatures, mMOBStatus, err); //Debug.Log(st.rows()); // change to points list List <Point> prevList = prevFeatures.toList(), nextList = currentFeatures.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; for (x = 0; x < y; x++) { if (byteStatus[x] == 1) { Point pt = nextList[x]; Point pt2 = prevList[x]; Imgproc.circle(mainMat, pt, 10, new Scalar(0, 0, 255), -1); Imgproc.line(mainMat, pt, pt2, new Scalar(0, 0, 255)); } } // draw the data //for (int i = 0; i < prevList.Count; i++) //{ // //Imgproc.circle(frame, prevList[i], 5, color[10]); // Imgproc.circle(mainMat, nextList[i], 10, new Scalar(0, 0, 255), -1); // Imgproc.line(mainMat, prevList[i], nextList[i], color[20]); //} List <List <Point> > cornersFeatures = new List <List <Point> >(40); cornersFeatures.Add(new List <Point>(10)); // put the corners features data into the list int tmp = 0; bool last = true; for (int i = 0; i < nextList.Count - 1; i++) { if (Mathf.Abs((float)(nextList[i].x - nextList[i + 1].x)) < 10 && Mathf.Abs((float)(nextList[i].y - nextList[i + 1].y)) < 10) { if (last == true) { cornersFeatures[tmp].Add(nextList[i]); } else { cornersFeatures.Add(new List <Point>(10)); tmp = tmp + 1; cornersFeatures[tmp].Add(nextList[i]); } last = true; } else { last = false; } } // count corners int manyCornersFeatures = 0; for (int i = 0; i < cornersFeatures.Count; i++) { Debug.Log(cornersFeatures[i].Count); if (cornersFeatures[i].Count < 5) { cornersFeatures.RemoveAt(i); } else { manyCornersFeatures++; } } //Debug.Log("Length" + manyCornersFeatures); // if corners equal 4 then diplay virtual docunment into the frame // doing the perspective transform if (manyCornersFeatures == 4) { Mat documentMat = new Mat(document.height, document.width, CvType.CV_8UC3); Utils.texture2DToMat(document, documentMat); List <Point> srcPoints = new List <Point>(); srcPoints.Add(new Point(0, 0)); srcPoints.Add(new Point(documentMat.cols(), 0)); srcPoints.Add(new Point(documentMat.cols(), documentMat.rows())); srcPoints.Add(new Point(0, documentMat.rows())); Mat srcPointsMat = Converters.vector_Point_to_Mat(srcPoints, CvType.CV_32F); List <Point> dstPoints = new List <Point>() { cornersFeatures[0][0], cornersFeatures[1][0], cornersFeatures[2][0], cornersFeatures[3][0] }; Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F); //Make perspective transform Mat m = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat); Mat warpedMat = new Mat(new Size(), documentMat.type()); Debug.Log((cornersFeatures[1][0].x - cornersFeatures[0][0].x) + " " + (cornersFeatures[2][0].y - cornersFeatures[1][0].y)); Imgproc.warpPerspective(documentMat, warpedMat, m, mainMat.size(), Imgproc.INTER_LINEAR); //warpedMat.convertTo(warpedMat, CvType.CV_32F); //warpedMat.convertTo(warpedMat, CvType.CV_8UC3); warpedMat.convertTo(warpedMat, CvType.CV_8UC3); // same size as frame Mat dst = new Mat(mainMat.size(), CvType.CV_8UC3); //Mat dst = new Mat(frame.size(), CvType.CV_8UC3); //Mat dst2 = new Mat(); Imgproc.cvtColor(mainMat, dst, Imgproc.COLOR_RGBA2RGB); //dst.setTo(new Scalar(0, 255, 0)); //currentGrayMat.copyTo(dst); //dst.convertTo(dst, CvType.CV_8UC3); //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA); Mat img1 = new Mat(); Mat mask = new Mat(mainMat.size(), CvType.CV_8UC1, new Scalar(0)); Imgproc.cvtColor(warpedMat, img1, Imgproc.COLOR_RGB2GRAY); Imgproc.Canny(img1, img1, 100, 200); List <MatOfPoint> doc_contours = new List <MatOfPoint>();; Imgproc.findContours(img1, doc_contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); Imgproc.drawContours(mask, doc_contours, -1, new Scalar(255), Core.FILLED); warpedMat.copyTo(dst, mask); dst.convertTo(dst, CvType.CV_8UC3); Debug.Log("dst" + dst.size()); Imgproc.cvtColor(dst, mainMat, Imgproc.COLOR_RGB2RGBA); // display on the right Texture2D finalTextue = new Texture2D(dst.width(), dst.height(), TextureFormat.RGB24, false); Utils.matToTexture2D(dst, finalTextue); targetRawImage.texture = finalTextue; } // current frame to old frame prevGrayMat = currentGrayMat.clone(); //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA); // display matrix on the screen Utils.fastMatToTexture2D(mainMat, texture); } } } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame() && !imageOptimizationHelper.IsCurrentFrameSkipped()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //get downScaleMat; Mat downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat((rgbaMat)); //grayscale Imgproc.cvtColor(downScaleRgbaMat, gray1Mat, Imgproc.COLOR_RGBA2GRAY); //blur Imgproc.blur(gray1Mat, gray2Mat, new Size(5, 5)); //edge filter Imgproc.filter2D(gray2Mat, gray1Mat, gray1Mat.depth(), kernel); //blur Imgproc.blur(gray1Mat, gray2Mat, new Size(3, 3)); //detect edge Imgproc.threshold(gray2Mat, gray2Mat, EDGE_DETECT_VALUE, 255, Imgproc.THRESH_BINARY); //copy Mat to byteArray Utils.copyFromMat <byte> (gray2Mat, byteArray); //set edge pointList List <Point> pointList = new List <Point> (); int w = gray1Mat.width(); int h = gray1Mat.height(); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { if (byteArray [x + w * y] == 255) { pointList.Add(new Point(x, y)); } } } int limit = Mathf.RoundToInt((float)(pointList.Count * POINT_RATE)); if (limit > POINT_MAX_NUM) { limit = POINT_MAX_NUM; } while (pointList.Count > limit) { pointList.RemoveAt(Random.Range(0, pointList.Count)); } // Debug.Log ("pointList.Count " + pointList.Count); //init subdiv subdiv.initDelaunay(new OpenCVForUnity.CoreModule.Rect(0, 0, downScaleRgbaMat.width(), downScaleRgbaMat.height())); for (int i = 0; i < pointList.Count; i++) { subdiv.insert(pointList [i]); } subdiv.insert(new Point(0, 0)); subdiv.insert(new Point(gray1Mat.width() / 2 - 1, 0)); subdiv.insert(new Point(gray1Mat.width() - 1, 0)); subdiv.insert(new Point(gray1Mat.width() - 1, gray1Mat.height() / 2 - 1)); subdiv.insert(new Point(gray1Mat.width() - 1, gray1Mat.height() - 1)); subdiv.insert(new Point(gray1Mat.width() / 2 - 1, gray1Mat.height() - 1)); subdiv.insert(new Point(0, gray1Mat.height() - 1)); subdiv.insert(new Point(0, gray1Mat.height() / 2 - 1)); using (MatOfFloat6 triangleList = new MatOfFloat6()) { subdiv.getTriangleList(triangleList); float[] pointArray = triangleList.toArray(); float downScaleRatio = imageOptimizationHelper.downscaleRatio; if (downScaleRatio < 1) { downScaleRatio = 1; } byte[] color = new byte[4]; for (int i = 0; i < pointArray.Length / 6; i++) { Point p0 = new Point(pointArray [i * 6 + 0] * downScaleRatio, pointArray [i * 6 + 1] * downScaleRatio); Point p1 = new Point(pointArray [i * 6 + 2] * downScaleRatio, pointArray [i * 6 + 3] * downScaleRatio); Point p2 = new Point(pointArray [i * 6 + 4] * downScaleRatio, pointArray [i * 6 + 5] * downScaleRatio); if (p0.x < 0 || p0.x > rgbaMat.width()) { continue; } if (p0.y < 0 || p0.y > rgbaMat.height()) { continue; } if (p1.x < 0 || p1.x > rgbaMat.width()) { continue; } if (p1.y < 0 || p1.y > rgbaMat.height()) { continue; } if (p2.x < 0 || p2.x > rgbaMat.width()) { continue; } if (p2.y < 0 || p2.y > rgbaMat.height()) { continue; } //get center of gravity int cx = (int)((p0.x + p1.x + p2.x) * 0.33333); int cy = (int)((p0.y + p1.y + p2.y) * 0.33333); // Debug.Log ("cx " + cx + " cy " + cy ); //get center of gravity color rgbaMat.get(cy, cx, color); // Debug.Log ("r " + color[0] + " g " + color[1] + " b " + color[2] + " a " + color[3]); //fill Polygon Imgproc.fillConvexPoly(rgbaMat, new MatOfPoint(p0, p1, p2), new Scalar(color [0], color [1], color [2], color [3]), Imgproc.LINE_AA, 0); // Imgproc.line (rgbaMat, p0, p1, new Scalar (64, 255, 128, 255)); // Imgproc.line (rgbaMat, p1, p2, new Scalar (64, 255, 128, 255)); // Imgproc.line (rgbaMat, p2, p0, new Scalar (64, 255, 128, 255)); } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " DOWNSCALE W:" + downScaleRgbaMat.width () + " H:" + downScaleRgbaMat.height (), new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)) { storedTouchPoint = new Point(t.position.x, t.position.y); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); } } #else //Mouse if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject()) { storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y); //Debug.Log ("mouse X " + Input.mousePosition.x); //Debug.Log ("mouse Y " + Input.mousePosition.y); } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); if (storedTouchPoint != null) { ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows()); OnTouch(rgbaMat, storedTouchPoint); storedTouchPoint = null; } Point[] points = roiPointList.ToArray(); if (shouldStartCamShift) { shouldStartCamShift = false; using (MatOfPoint roiPointMat = new MatOfPoint(points)) { roiRect = Imgproc.boundingRect(roiPointMat); } if (roiHistMat != null) { roiHistMat.Dispose(); roiHistMat = null; } roiHistMat = new Mat(); using (Mat roiHSVMat = new Mat(hsvMat, roiRect)) using (Mat maskMat = new Mat()) { Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180)); Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX); //Debug.Log ("roiHist " + roiHistMat.ToString ()); } } else if (points.Length == 4) { using (Mat backProj = new Mat()) { Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0); RotatedRect r = Video.CamShift(backProj, roiRect, termination); r.points(points); } } if (points.Length < 4) { for (int i = 0; i < points.Length; i++) { Imgproc.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2); } } else { for (int i = 0; i < 4; i++) { Imgproc.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2); } Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2); } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } }