/// <summary> /// Combines original and processed images into a new twice wide image /// </summary> /// <param name="original">Source image</param> /// <param name="processed">Processed image</param> /// <param name="detectedContour">Contour to draw over original image to show detected shape</param> /// <returns>OpenCV::Mat image with images combined</returns> private Mat CombineMats(Mat original, Mat processed, Point[] detectedContour) { Size inputSize = new Size(original.Width, original.Height); // combine fancy output image: // - create new texture twice as wide as input // - copy input into the left half // - draw detected paper contour over original input // - put "scanned", un-warped and cleared paper to the right, centered in the right half var matCombined = new Mat(new Size(inputSize.Width * 2, inputSize.Height), original.Type(), Scalar.FromRgb(64, 64, 64)); // copy original image with detected shape drawn over original.CopyTo(matCombined.SubMat(0, inputSize.Height, 0, inputSize.Width)); if (null != detectedContour && detectedContour.Length > 2) { matCombined.DrawContours(new Point[][] { detectedContour }, 0, Scalar.FromRgb(255, 255, 0), 3); } // copy scanned paper without extra scaling, as is if (null != processed) { double hw = processed.Width * 0.5, hh = processed.Height * 0.5; Point2d center = new Point2d(inputSize.Width + inputSize.Width * 0.5, inputSize.Height * 0.5); Mat roi = matCombined.SubMat( (int)(center.Y - hh), (int)(center.Y + hh), (int)(center.X - hw), (int)(center.X + hw) ); processed.CopyTo(roi); } return(matCombined); }
public Texture getScanFrame(WebCamTexture inputTexture) { Mat original = Unity.TextureToMat(inputTexture); Size inputSize = new Size(original.Width, original.Height); scanner.Input = Unity.TextureToMat(inputTexture); if (!scanner.Success) { scanner.Settings.GrayMode = PaperScanner.ScannerSettings.ColorMode.HueGrayscale; } Point[] detectedContour = scanner.PaperShape; var matCombinedFrame = new Mat(new Size(inputSize.Width, inputSize.Height), original.Type(), Scalar.FromRgb(64, 64, 64)); original.CopyTo(matCombinedFrame.SubMat(0, inputSize.Height, 0, inputSize.Width)); if (null != detectedContour && detectedContour.Length > 2) { matCombinedFrame.DrawContours(new Point[][] { detectedContour }, 0, Scalar.FromRgb(255, 255, 0), 3); } return(Unity.MatToTexture(matCombinedFrame)); }
public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_scale, ref sl.Objects objects, bool render_mask, bool isTrackingON) { OpenCvSharp.Mat overlay = left_display.Clone(); OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(0, 0, left_display.Size().Width, left_display.Size().Height); OpenCvSharp.Mat mask = new OpenCvSharp.Mat(left_display.Rows, left_display.Cols, OpenCvSharp.MatType.CV_8UC1); int line_thickness = 2; for (int i = 0; i < objects.numObject; i++) { sl.ObjectData obj = objects.objectData[i]; if (Utils.renderObject(obj, isTrackingON)) { OpenCvSharp.Scalar base_color = Utils.generateColorID_u(obj.id); // Display image scale bouding box 2d if (obj.boundingBox2D.Length < 4) { continue; } Point top_left_corner = Utils.cvt(obj.boundingBox2D[0], img_scale); Point top_right_corner = Utils.cvt(obj.boundingBox2D[1], img_scale); Point bottom_right_corner = Utils.cvt(obj.boundingBox2D[2], img_scale); Point bottom_left_corner = Utils.cvt(obj.boundingBox2D[3], img_scale); // Create of the 2 horizontal lines Cv2.Line(left_display, top_left_corner, top_right_corner, base_color, line_thickness); Cv2.Line(left_display, bottom_left_corner, bottom_right_corner, base_color, line_thickness); // Creation of two vertical lines Utils.drawVerticalLine(ref left_display, bottom_left_corner, top_left_corner, base_color, line_thickness); Utils.drawVerticalLine(ref left_display, bottom_right_corner, top_right_corner, base_color, line_thickness); // Scaled ROI OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, (int)top_right_corner.DistanceTo(top_left_corner), (int)bottom_right_corner.DistanceTo(top_right_corner)); overlay.SubMat(roi).SetTo(base_color); sl.float2 position_image = getImagePosition(obj.boundingBox2D, img_scale); Cv2.PutText(left_display, obj.label.ToString(), new Point(position_image.x - 20, position_image.y - 12), HersheyFonts.HersheyComplexSmall, 0.5f, new Scalar(255, 255, 255, 255), 1); if (!float.IsInfinity(obj.position.Z)) { string text = Math.Abs(obj.position.Z).ToString("0.##M"); Cv2.PutText(left_display, text, new Point(position_image.x - 20, position_image.y), HersheyFonts.HersheyComplexSmall, 0.5, new Scalar(255, 255, 255, 255), 1); } } } // Here, overlay is as the left image, but with opaque masks on each detected objects Cv2.AddWeighted(left_display, 0.7, overlay, 0.3, 0.0, left_display); }
/// <summary> /// キャプチャループスレッド作成 /// </summary> /// <param name="ms">キャプチャ間隔</param> private void StartCaptureLoop(double ms = 50) { //Parallel.Invoke(() => m_Task = Task.Run(() => { var capCnt = 0; var sw = new Stopwatch(); var mat = new Mat(m_DstSize.Height, m_DstSize.Width, MatType.CV_8UC4, m_Bits); var mat_xor = new Mat(); var mat_diff = new Mat(); var segRect = new Rectangle(0, 0, m_DstSize.Width / CaptureDivisionNum, m_DstSize.Height / CaptureDivisionNum); var cData = new CaptureData() { captureData = m_Bits, captureSize = m_DstSize, isIntraFrame = true, }; m_IntraFrameMat = new Mat(m_DstSize.Height, m_DstSize.Width, MatType.CV_8UC4); sw.Start(); while (m_IsCapturing) { //Win32.BitBlt(hdc, 0, 0, dstSize.Height, dstSize.Width, m_ProcessDC, 0, 0, Win32.SRCCOPY); Win32.StretchBlt(m_Hdc, 0, m_DstSize.Height, m_DstSize.Width, -m_DstSize.Height, m_ProcessDC, m_SrcRect.X, m_SrcRect.Y, m_SrcRect.Width, m_SrcRect.Height, Win32.SRCCOPY); Captured(this, cData); try { Cv2.BitwiseXor(mat, m_IntraFrameMat, mat_xor); Cv2.CvtColor(mat_xor, mat_diff, ColorConversionCodes.RGBA2GRAY); } catch { continue; } for (int y = 0; y < CaptureDivisionNum; y++) { for (int x = 0; x < CaptureDivisionNum; x++) { var segIdx = y * CaptureDivisionNum + x; segRect.X = segRect.Width * x; segRect.Y = segRect.Height * y; var sRect = new Rect(segRect.X, segRect.Y, segRect.Width, segRect.Height); var segDiff = mat_diff.SubMat(sRect); var nonZero = segDiff.CountNonZero(); if (nonZero != 0) { var segCapture = mat.SubMat(sRect); var img_buffer = segCapture.ImEncode(EncodeFormatExtension, m_EncodingParam); var sData = new SegmentCaptureData() { segmentIdx = segIdx, rect = segRect, encodedFrameBuffer = img_buffer, }; SegmentCaptured(this, sData); var segIntra = m_IntraFrameMat.SubMat(sRect); segCapture.CopyTo(segIntra); } } } var sleepMs = (ms * capCnt) - sw.Elapsed.TotalMilliseconds; if (sleepMs > 0) Thread.Sleep((int)sleepMs); capCnt++; //Debug.Log(""+capCnt); //GC.Collect(); //File.WriteAllBytes("dump/"+capCnt+".jpg", mat.ImEncode(EncodeFormatExtension, m_EncodingParam)); } sw.Stop(); //Win32.SelectObject(hdc, hbmpPrev); }); }