/// <summary> /// Raises the web cam texture to mat helper disposed event. /// </summary> public void OnFrameToMatHelperDisposed() { Debug.Log("OnFrameToMatHelperDisposed"); if (_rgbMat != null) { _rgbMat.Dispose(); } }
public int CompareFile(string fileName, bool fullScan = false) { ImageInfo isrc = new ImageInfo(); if (!isrc.SetData(fileName)) { return(-1); } //ハッシュレベルで一致ならそのまま返却 if (!fullScan) { int itp = CompareHash(isrc); if (itp >= 0) { isrc.Dispose(); return(itp); } } int idx = -1; double maxVal = 0; Mat src = Mat.FromStream(isrc.DataStream, ImreadModes.AnyColor); foreach (ImageInfo ii in ImageStack) { OpenCvSharp.Mat m = OpenCvSharp.Mat.FromStream(ii.DataStream, ImreadModes.AnyColor); OpenCvSharp.Mat roi = m[0, src.Height > m.Height ? m.Height : src.Height, 0, src.Width > m.Width ? m.Width : src.Width]; OpenCvSharp.Mat res = new Mat(); Cv2.MatchTemplate(src, roi, res, TemplateMatchModes.CCoeffNormed); double min, max; Cv2.MinMaxLoc(res, out min, out max); if (maxVal < max) { idx = ii.id; maxVal = max; } if (!fullScan && max > thresh) { src.Dispose(); return(ii.id); } roi.Dispose(); m.Dispose(); } src.Dispose(); isrc.Dispose(); return(idx); }
void CloseCamera() { lock (m_sync) { if (m_videoCapture != null) { m_videoCapture.Dispose(); } m_videoCapture = null; if (m_image != null) { m_image.Dispose(); } } }
/// <summary> /// Convert to cv::Mat /// </summary> /// <returns></returns> public Mat ToMat() { Mat?mat = null; try { mat = new Mat(); NativeMethods.HandleException( NativeMethods.core_MatExpr_toMat(ptr, mat.CvPtr)); GC.KeepAlive(this); return(mat); } catch { mat?.Dispose(); throw; } }
/// <summary> /// /// </summary> /// <param name="defaultMat"></param> /// <returns></returns> public Mat ReadMat(Mat defaultMat = null) { var value = new Mat(); try { NativeMethods.core_FileNode_read_Mat(ptr, value.CvPtr, Cv2.ToPtr(defaultMat)); } catch { value.Dispose(); throw; } return value; }
private OpenCvSharp.Point DetectCenterOfEye(OpenCvSharp.Mat frame, Rect boundingBox) { // Gets location relative to frame // https://pysource.com/2019/01/04/eye-motion-tracking-opencv-with-python/ //extract the eye region by coordinates. OpenCvSharp.Mat Roi = frame.Clone(boundingBox); if (Roi.Rows == 0 && Roi.Cols == 0) { return(new OpenCvSharp.Point()); } // convert to grayscale OpenCvSharp.Mat grayRoi = new OpenCvSharp.Mat(); Cv2.CvtColor(Roi, grayRoi, ColorConversionCodes.BGR2GRAY); // get rid of surrounding noise to isolate pupil OpenCvSharp.Mat grayRoi2 = new OpenCvSharp.Mat(); Cv2.GaussianBlur(grayRoi, grayRoi2, new OpenCvSharp.Size(7, 7), 0); // try get rid of more noise OpenCvSharp.Mat threshold = new OpenCvSharp.Mat(); Cv2.Threshold(grayRoi2, threshold, 5, 255, ThresholdTypes.BinaryInv); //Cv2.Thresh_Binary OpenCvSharp.Point[][] contours; HierarchyIndex[] hi; Cv2.FindContours(threshold, out contours, out hi, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple); // get the contour with the largest area, the pupil of eye. OpenCvSharp.Point CenterOfEye = new OpenCvSharp.Point(); int EyeRadius = 0; for (int i = 0; i < contours.Length; i++) { OpenCvSharp.Point thisEye; int thisEyeRadius = 0; PupilCenter(contours, i, out thisEye, out thisEyeRadius); if (i == 0 || thisEyeRadius > EyeRadius) { CenterOfEye = thisEye; EyeRadius = thisEyeRadius; } } if (EyeRadius > 0) { Scalar color = new Scalar(0, 0, 255); // locate center relative to the frame int FrameX = CenterOfEye.X + EyeRadius + boundingBox.X; int FrameY = CenterOfEye.Y + EyeRadius + boundingBox.Y; OpenCvSharp.Point ctr = new OpenCvSharp.Point(FrameX, FrameY); Cv2.Circle(frame, ctr, EyeRadius, color: color, thickness: 2); // draw vert line thru center of pupil Cv2.Line(img: frame, pt1: new OpenCvSharp.Point(FrameX, boundingBox.Y), pt2: new OpenCvSharp.Point(FrameX, boundingBox.Y + boundingBox.Height), color, 1); //// draw horiz line thru center of pupil Cv2.Line(img: frame, pt1: new OpenCvSharp.Point(boundingBox.X, FrameY), pt2: new OpenCvSharp.Point(boundingBox.X + boundingBox.Width, FrameY), color, 1); } Roi.Dispose(); grayRoi.Dispose(); grayRoi2.Dispose(); threshold.Dispose(); return(CenterOfEye); }
public void CvDct(ref Mat DST, Mat SRC, int N) { Mat dct, idct; Mat dct2, dct3; int width = SRC.Width;//N; int height = SRC.Height;//N; DST = SRC.Clone(); //DCT,IDCT用の行列作成(double) dct = new Mat(height, width, MatType.CV_64FC1); idct = new Mat(height, width, MatType.CV_64FC1); dct2 = new Mat(height, width, MatType.CV_64FC1); dct3 = new Mat(height, width, MatType.CV_64FC1); var indexer_DST = new MatOfByte3(DST).GetIndexer(); var indexer_dct = new MatOfDouble3(dct).GetIndexer(); //行列dctに画像データをコピー //double fcos; for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) { Vec3d color = indexer_dct[y, x]; color.Item0= indexer_DST[y, x].Item0 / 256.0; indexer_dct[y,x] = color; } //DCT…dctをコサイン変換してdct2を作成します Cv2.Dct(dct, dct2, DctFlags.None); //dct2をDenomで割りdct3を作成します PerformDenom(ref dct3, dct2); //IDCT…dct3を逆コサイン変換します Cv2.Dct(dct3, idct, DctFlags.Inverse); var indexer_idct = new MatOfDouble3(idct).GetIndexer(); //逆変換用画像にデータをコピー for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) { Vec3b color = indexer_DST[y, x]; color.Item0= (byte)(indexer_idct[y,x].Item0 * 256.0); indexer_DST[y,x]=color; } ////正規化 //double min, max; //min = 4000000000000; //max = -4000000000000; //double offset = 0.0; ////輝度値の最大と最小を取得 //DST.MinMaxIdx(out min, out max); ////for (int x = 0; x < width; x++) //// for (int y = 0; y < height; y++) //// { //// double data = indexer_DST[y,x].Item0; //// if (data < min) min = data; //// if (data > max) max = data; //// } //for (int x = 0; x < width; x++) // for (int y = 0; y < height; y++) // { // Vec3b color = indexer_DST[y, x]; // double data = indexer_DST[y, x].Item0; // if (data < min + offset) data = min + offset; // color.Item0 = (byte)( (((data / (max - min + offset))) * 255.0) - (((min + offset) / (max - min + offset)) * 255.0) ); // indexer_DST[y,x] = color; // } ////DST = idct.Clone(); //行列メモリを開放します dct.Dispose(); dct2.Dispose(); dct3.Dispose(); idct.Dispose(); indexer_dct = null; indexer_DST = null; indexer_idct = null; }
//OpenCVを使用して、座標を求める。 void OpenCVTexture(Texture2D texture) { Mat newMat = Unity.TextureToMat(texture); //画像をCv2.Equalsで変化があるかグローバルのoldMatと比較して検知しようとしたが、できなかった。 //Convert image to grayscale Mat imgGray = new Mat(); Cv2.CvtColor(newMat, imgGray, ColorConversionCodes.BGR2GRAY); //Debug.Log(Cv2.Equals(imgGray, imgGray)); // Clean up image using Gaussian Blur Mat imgGrayBlur = new Mat(); Cv2.GaussianBlur(imgGray, imgGrayBlur, new Size(5, 5), 0); //Extract edges Mat cannyEdges = new Mat(); Cv2.Canny(imgGrayBlur, cannyEdges, 10.0, 70.0); //Do an invert binarize the image Mat mask = new Mat(); Cv2.Threshold(cannyEdges, mask, 70.0, 255.0, ThresholdTypes.BinaryInv); // Extract Contours Point[][] contours; //特徴点が格納される変数。 HierarchyIndex[] hierarchy; //特徴点の階層が格納される。 Cv2.FindContours(cannyEdges, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, null); //特徴点を検出する。 PointChangeNumSendUDP(contours); //Pointが変化しなければ送信を実装しようとしている途中。 //StartCoroutine(udpSendCoroutine(contours)); //輪郭描画 int width = (int)transform.GetComponent <RectTransform>().sizeDelta.x; int height = (int)transform.GetComponent <RectTransform>().sizeDelta.y; Mat Contours = new Mat(width, height, MatType.CV_8UC3, new Scalar(0, 0, 0)); //初期値として黒い画面を作成する。 Cv2.DrawContours(Contours, contours, -1, new Scalar(0, 255, 0, 255), 1); //MatにCountours(特徴点)を描画する。 Texture2D changedTex = Unity.MatToTexture(Contours); //MatをTexture2Dへ変更 GetComponent <RawImage>().texture = changedTex; //RaxImageにTexture2Dを書き込み。 //MatをDisposeする。 newMat.Dispose(); imgGray.Dispose(); imgGrayBlur.Dispose(); cannyEdges.Dispose(); mask.Dispose(); Contours.Dispose(); //TextureをDestryしないとメモリーリークを送りました。 MonoBehaviour.Destroy(texture); if (changedTex != oldChangedTex) { MonoBehaviour.Destroy(oldChangedTex); oldChangedTex = changedTex; } }
void 画像取得(ref Mat src) { if (src != null) src.Dispose(); OpenFileDialog dialog = new OpenFileDialog() { Multiselect = false, // 複数選択の可否 Filter = // フィルタ "画像ファイル|*.bmp;*.gif;*.jpg;*.png|全てのファイル|*.*", }; //ダイアログを表示 //ダイアログを表示 DialogResult result = dialog.ShowDialog(); if (result == DialogResult.OK) { // ファイル名をタイトルバーに設定 this.Text = dialog.SafeFileName; //OpenCV処理 src = new Mat(dialog.FileName, ImreadModes.GrayScale); pictureBoxIpl1.ImageIpl = src; } }