public FitLine() { CvSize imageSize = new CvSize(500, 500); // cvFitLine CvPoint2D32f[] points = GetRandomPoints(20, imageSize); CvLine2D line = Cv.FitLine2D(points, DistanceType.L2, 0, 0.01, 0.01); using (IplImage img = new IplImage(imageSize, BitDepth.U8, 3)) { img.Zero(); // draw line { CvPoint pt1, pt2; line.FitSize(img.Width, img.Height, out pt1, out pt2); img.Line(pt1, pt2, CvColor.Green, 1, LineType.Link8); } // draw points and distances using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.33, 0.33)) { foreach (CvPoint2D32f p in points) { double d = line.Distance(p); img.Circle(p, 2, CvColor.White, -1, LineType.AntiAlias); img.PutText(string.Format("{0:F1}", d), new CvPoint((int)(p.X + 3), (int)(p.Y + 3)), font, CvColor.Green); } } CvWindow.ShowImages(img); } }
public VideoWriter() { // (1)カメラに対するキャプチャ構造体を作成する using (CvCapture capture = CvCapture.FromCamera(0)) { // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する) int width = capture.FrameWidth; int height = capture.FrameHeight; double fps = 15;//capture.Fps; // (3)ビデオライタ構造体を作成する using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height))) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7)) using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize)) { // (4)カメラから画像をキャプチャし,ファイルに書き出す for (int frames = 0; ; frames++) { IplImage frame = capture.QueryFrame(); string str = string.Format("{0}[frame]", frames); frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); writer.WriteFrame(frame); window.ShowImage(frame); int key = CvWindow.WaitKey((int)(1000 / fps)); if (key == '\x1b') { break; } } } } }
/// <summary> /// Prints tracks information. /// </summary> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_TRACK_RENDER_ID.</param> /// <param name="font">OpenCV font for print on the image.</param> public void Render(IplImage imgSource, IplImage imgDest, RenderTracksMode mode, CvFont font) { if (imgSource == null) { throw new ArgumentNullException(nameof(imgSource)); } if (imgDest == null) { throw new ArgumentNullException(nameof(imgDest)); } if (imgDest.Depth != BitDepth.U8) { throw new ArgumentException("imgDest.Depth != U8"); } if (imgDest.NChannels != 3) { throw new ArgumentException("imgDest.NChannels != 3"); } if ((mode & RenderTracksMode.Id) == RenderTracksMode.Id && font == null) { font = new CvFont(FontFace.HersheyDuplex, 0.5, 0.5, 0, 1); } if (mode != RenderTracksMode.None) { foreach (KeyValuePair <int, CvTrack> kv in this) { int key = kv.Key; CvTrack value = kv.Value; if ((mode & RenderTracksMode.Id) == RenderTracksMode.Id) { if (value.Inactive == 0) { Cv.PutText(imgDest, key.ToString(), value.Centroid, font, CvColor.Green); } } if ((mode & RenderTracksMode.BoundingBox) == RenderTracksMode.BoundingBox) { if (value.Inactive > 0) { Cv.Rectangle( imgDest, new CvPoint(value.MinX, value.MinY), new CvPoint(value.MaxX - 1, value.MaxY - 1), new CvColor(0, 0, 50)); } else { Cv.Rectangle( imgDest, new CvPoint(value.MinX, value.MinY), new CvPoint(value.MaxX - 1, value.MaxY - 1), new CvColor(0, 0, 255)); } } } } }
public static void addDisplayFPS() { font = new CvFont(FontFace.HersheyComplexSmall, 1.0, 1.0); sw.Reset(); sw.Start(); m_updateDel += displayFPS; }
public Text() { // cvInitFont, cvPutText // フォントを初期化して,テキストを描画する List <FontFace> font_face = new List <FontFace>( (FontFace[])Enum.GetValues(typeof(FontFace)) ); font_face.Remove(FontFace.Italic); // (1)画像を確保し初期化する using (IplImage img = Cv.CreateImage(new CvSize(450, 600), BitDepth.U8, 3)) { Cv.Zero(img); // (2)フォント構造体を初期化する CvFont[] font = new CvFont[font_face.Count * 2]; for (int i = 0; i < font.Length; i += 2) { font[i] = new CvFont(font_face[i / 2], 1.0, 1.0); font[i + 1] = new CvFont(font_face[i / 2] | FontFace.Italic, 1.0, 1.0); } // (3)フォントを指定して,テキストを描画する for (int i = 0; i < font.Length; i++) { CvColor rcolor = CvColor.Random(); Cv.PutText(img, "OpenCV sample code", new CvPoint(15, (i + 1) * 30), font[i], rcolor); } // (4)画像の表示,キーが押されたときに終了 using (CvWindow w = new CvWindow(img)) { CvWindow.WaitKey(0); } } }
public Contour() { // cvContourArea, cvArcLength // 輪郭によって区切られた領域の面積と,輪郭の長さを求める const int SIZE = 500; // (1)画像を確保し初期化する using (CvMemStorage storage = new CvMemStorage()) using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3)) { img.Zero(); // (2)点列を生成する CvSeq <CvPoint> points = new CvSeq <CvPoint>(SeqType.PolyLine, storage); CvRNG rng = new CvRNG((ulong)DateTime.Now.Ticks); double scale = rng.RandReal() + 0.5; CvPoint pt0 = new CvPoint { X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2) }; img.Circle(pt0, 2, CvColor.Green); points.Push(pt0); for (int i = 1; i < 20; i++) { scale = rng.RandReal() + 0.5; CvPoint pt1 = new CvPoint { X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2) }; img.Line(pt0, pt1, CvColor.Green, 2); pt0.X = pt1.X; pt0.Y = pt1.Y; img.Circle(pt0, 3, CvColor.Green, Cv.FILLED); points.Push(pt0); } img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2); // (3)包含矩形,面積,長さを求める CvRect rect = points.BoundingRect(false); double area = points.ContourArea(); double length = points.ArcLength(CvSlice.WholeSeq, 1); // (4)結果を画像に書き込む img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2); string text_area = string.Format("Area: wrect={0}, contour={1}", rect.Width * rect.Height, area); string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length); using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias)) { img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White); img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White); } // (5)画像を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize)) { window.Image = img; CvWindow.WaitKey(0); } } }
/// <summary> /// Video writer /// </summary> /// <param name="frame"></param> /// <param name="frames"></param> public void VideoWriter(IplImage frame, int frames) { CvFont font = new CvFont(FontFace.HersheyComplex, 0.5, 0.5); string str = string.Format("{0}[Frame]", frames); frame.PutText(str, new CvPoint(10, 20), font, new CvColor(255, 0, 0)); this.writer.WriteFrame(frame); }
public Moments() { // (1)画像を読み込む.3チャンネル画像の場合はCOIがセットされていなければならない using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } // (2)入力画像の3次までの画像モーメントを計算する CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; // (3)モーメントやHuモーメント不変量を,得られたCvMoments構造体の値を使って計算する. double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // (4)得られたモーメントやHuモーメント不変量を文字として画像に描画 using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } // (5)入力画像とモーメント計算結果を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
public Moments() { using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // drawing using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } using (var window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
/// <summary> /// Prints tracks information. /// </summary> /// <param name="tracks">List of tracks.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_TRACK_RENDER_ID.</param> /// <param name="font">OpenCV font for print on the image.</param> public static void RenderTracks(CvTracks tracks, IplImage imgSource, IplImage imgDest, RenderTracksMode mode, CvFont font) { if (tracks == null) throw new ArgumentNullException("tracks"); if (imgSource == null) throw new ArgumentNullException("imgSource"); if (imgDest == null) throw new ArgumentNullException("imgDest"); IntPtr fontPtr = (font == null) ? IntPtr.Zero : font.CvPtr; CvBlobInvoke.cvb_cvRenderTracks(tracks.CvPtr, imgSource.CvPtr, imgDest.CvPtr, mode, fontPtr); }
/// <summary> /// Prints tracks information. /// </summary> /// <param name="tracks">List of tracks.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_TRACK_RENDER_ID.</param> /// <param name="font">OpenCV font for print on the image.</param> public static void RenderTracks(CvTracks tracks, IplImage imgSource, IplImage imgDest, RenderTracksMode mode, CvFont font) { if (tracks == null) { throw new ArgumentNullException("tracks"); } tracks.Render(imgSource, imgDest, mode, font); }
/// <summary> /// Prints tracks information. /// </summary> /// <param name="tracks">List of tracks.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_TRACK_RENDER_ID.</param> /// <param name="font">OpenCV font for print on the image.</param> public static void RenderTracks(CvTracks tracks, IplImage imgSource, IplImage imgDest, RenderTracksMode mode, CvFont font) { if (tracks == null) throw new ArgumentNullException("tracks"); tracks.Render(imgSource, imgDest, mode, font); }
private void timerGrab_Tick(object sender, EventArgs e) { IplImage frame = cvlib.cvQueryFrame( videoCapture); if (frame.ptr == IntPtr.Zero) { timerGrab.Stop(); MessageBox.Show("Invalid Frame"); return; } imgMain = cvlib.cvCreateImage(cvlib.cvGetSize( frame), 8, 3); if (reset) { showROI = false; initialized = false; resetting(); } cvlib.cvCopy( frame, imgMain); cvlib.cvFlip( imgMain); #region ROI if (showROI && initialized) { cvlib.cvRectangle( imgMain, cvlib.cvPoint(roiX, roiY), cvlib.cvPoint(roiX + roiW, roiY + roiH), cvlib.CV_RGB(255, 0, 125), 1, 8, 0); imgCrop = cvlib.cvCreateImage(cvlib.cvSize(roiW, roiH), 8, 3); #region skinHSV/RGB if (showSkinHSV || showSkinRGB) { imgSkin = new IplImage(); imgSkin = cvlib.cvCreateImage(cvlib.cvGetSize( frame), 8, 3); if (showSkinHSV) imgSkin = skinDet.skin_hsv(imgMain); else if (showSkinRGB) imgSkin = skinDet.skin_rgb(imgMain); cvlib.cvSetImageROI( imgSkin, cvlib.cvRect(roiX, roiY, roiW, roiH)); cvlib.cvCopy( imgSkin, imgCrop); cvlib.cvReleaseImage( imgSkin); //noise removal cvlib.cvDilate(imgCrop, imgCrop, dlgParam.GetP(0).i); cvlib.cvErode( imgCrop, imgCrop, dlgParam.GetP(1).i); for (int i = 0; i < dlgParam.GetP(2).i; i++) cvlib.cvSmooth( imgCrop, imgCrop); } #endregion #region show threshold if (showThres || showEdge) { imgGray = cvlib.cvCreateImage(cvlib.cvGetSize( imgCrop), 8, 1); imgBin = cvlib.cvCreateImage(cvlib.cvGetSize( imgCrop), 8, 1); imgMot = cvlib.cvCreateImage(cvlib.cvGetSize( imgCrop), 8, 1); cvlib.cvCvtColor( imgCrop, imgGray, cvlib.CV_BGR2GRAY); cvlib.cvThreshold( imgGray, imgMot, 0, 255, cvlib.CV_THRESH_BINARY_INV); abs.Absolute(imgMot); if (showThres) cvlib.cvThreshold( imgGray, imgBin, 0, 255, cvlib.CV_THRESH_BINARY_INV); else if (showEdge) { edge_dlg(); cvlib.cvCanny( imgGray, imgBin, dlgCanny.GetP(0).i, dlgCanny.GetP(1).i); } cvlib.cvShowImage("Crop", imgBin); #region matching if (match) { if (adaBlackPix(imgBin)) hasil = (int)KNN.classify(ref imgBin, false); else hasil = 19; WriteLine(Signs[hasil], false, false); match = false; show_letter = true; } #endregion cvlib.cvReleaseImage( imgGray); cvlib.cvReleaseImage( imgCrop); cvlib.cvReleaseImage( imgBin); cvlib.cvReleaseImage( imgMot); } else { cvlib.cvShowImage("Crop", imgCrop); cvlib.cvReleaseImage( imgCrop); } #endregion } else if (!initialized && !showROI) imgMain = hc.cariHaar(imgMain); else if (!initialized) //initialize windows initialize(); #endregion if (show_letter) { CvFont font = new CvFont(); cvlib.cvInitFont( font, cvlib.CV_FONT_HERSHEY_SIMPLEX, 5, 5, 0, 10, cvlib.CV_AA); cvlib.cvPutText( imgMain, Signs[hasil], cvlib.cvPoint(50, 200), font, cvlib.cvScalar(0, 255, 0)); } picBoxMain.Image = cvlib.ToBitmap(imgMain, false); cvlib.cvReleaseImage( imgMain); fps++; if ((openx != 0 && openy != 0 && closex != 0 && closey != 0) && !showROI) euclidean(); }
private void timerGrab_Tick(object sender, EventArgs e) { frame = highgui.CvQueryFrame(ref videoCapture); if (frame.ptr == IntPtr.Zero) { timerGrab.Stop(); MessageBox.Show("Invalid Frame"); return; } imgMain = cxcore.CvCreateImage(cxcore.CvGetSize(ref frame), 8, 3); if (reset) { showROI = false; initialized = false; resetting(); } cxcore.CvCopy(ref frame, ref imgMain); cxcore.CvFlip(ref imgMain, 0); #region ROI if (showROI && initialized) { cxcore.CvRectangle(ref imgMain, new CvPoint(roiX, roiY), new CvPoint(roiX + roiW, roiY + roiH), cxcore.CV_RGB(255, 0, 125), 1, 8, 0); imgCrop = cxcore.CvCreateImage(new CvSize(roiW, roiH), 8, 3); #region skinHSV/RGB if (showSkinHSV || showSkinRGB) { imgSkin = new IplImage(); imgSkin = cxcore.CvCreateImage(cxcore.CvGetSize(ref frame), 8, 3); if (showSkinHSV) { imgSkin = skinDet.skin_hsv(imgMain); } else if (showSkinRGB) { imgSkin = skinDet.skin_rgb(imgMain); } cxcore.CvSetImageROI(ref imgSkin, new CvRect(roiX, roiY, roiW, roiH)); cxcore.CvCopy(ref imgSkin, ref imgCrop); cxcore.CvReleaseImage(ref imgSkin); //noise removal cv.CvDilate(ref imgCrop, ref imgCrop, dlgParam.GetP(0).i); cv.CvErode(ref imgCrop, ref imgCrop, dlgParam.GetP(1).i); for (int i = 0; i < dlgParam.GetP(2).i; i++) { cv.CvSmooth(ref imgCrop, ref imgCrop); } } #endregion #region show threshold if (showThres || showEdge) { imgGray = cxcore.CvCreateImage(cxcore.CvGetSize(ref imgCrop), 8, 1); imgBin = cxcore.CvCreateImage(cxcore.CvGetSize(ref imgCrop), 8, 1); imgMot = cxcore.CvCreateImage(cxcore.CvGetSize(ref imgCrop), 8, 1); cv.CvCvtColor(ref imgCrop, ref imgGray, cvtypes.CV_BGR2GRAY); cv.CvThreshold(ref imgGray, ref imgMot, 0, 255, cv.CV_THRESH_BINARY_INV); abs.Absolute(imgMot); if (showThres) { cv.CvThreshold(ref imgGray, ref imgBin, 0, 255, cv.CV_THRESH_BINARY_INV); } else if (showEdge) { edge_dlg(); cv.CvCanny(ref imgGray, ref imgBin, dlgCanny.GetP(0).i, dlgCanny.GetP(1).i); } highgui.CvShowImage("Crop", ref imgBin); #region matching if (match) { if (adaBlackPix(imgBin)) { hasil = (int)KNN.classify(ref imgBin, false); } else { hasil = 19; } WriteLine(Signs[hasil], false, false); match = false; show_letter = true; } #endregion cxcore.CvReleaseImage(ref imgGray); cxcore.CvReleaseImage(ref imgCrop); cxcore.CvReleaseImage(ref imgBin); cxcore.CvReleaseImage(ref imgMot); } else { highgui.CvShowImage("Crop", ref imgCrop); cxcore.CvReleaseImage(ref imgCrop); } #endregion } else if (!initialized && !showROI) { imgMain = hc.cariHaar(imgMain); } else if (!initialized) //initialize windows { initialize(); } #endregion if (show_letter) { CvFont font = new CvFont(); cxcore.CvInitFont(ref font, cxcore.CV_FONT_HERSHEY_SIMPLEX, 5, 5, 0, 10, cxcore.CV_AA); cxcore.CvPutText(ref imgMain, Signs[hasil], new CvPoint(50, 200), ref font, new CvScalar(0, 255, 0)); } picBoxMain.Image = highgui.ToBitmap(imgMain, false); cxcore.CvReleaseImage(ref imgMain); fps++; if ((openx != 0 && openy != 0 && closex != 0 && closey != 0) && !showROI) { euclidean(); } }
// 別スレッド処理(キャプチャー) private void worker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; Stopwatch sw = new Stopwatch(); string str; id = 0; //PID送信用UDP //バインドするローカルポート番号 // FSI_PID_DATA pid_data = new FSI_PID_DATA(); int localPort = mmFsiUdpPortMT3PV; System.Net.Sockets.UdpClient udpc2 = null;; /* try * { * udpc2 = new System.Net.Sockets.UdpClient(localPort); * * } * catch (Exception ex) * { * //匿名デリゲートで表示する * this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); * } */ //videoInputオブジェクト const int DeviceID = 0; // 0; // 3 (pro), 4(piccolo) 7(DMK) const int CaptureFps = 30; // 30 int interval = (int)(1000 / CaptureFps / 10); const int CaptureWidth = 640; const int CaptureHeight = 480; // 画像保存枚数 int mmFsiPostRec = 60; int save_counter = mmFsiPostRec; using (VideoInput vi = new VideoInput()) { vi.SetIdealFramerate(DeviceID, CaptureFps); vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight); int width = vi.GetWidth(DeviceID); int height = vi.GetHeight(DeviceID); using (IplImage img = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_dark8 = Cv.LoadImage(@"C:\piccolo\MT3V_dark.bmp", LoadMode.GrayScale)) //using (IplImage img_dark = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_mono = new IplImage(width, height, BitDepth.U8, 1)) using (IplImage img2 = new IplImage(width, height, BitDepth.U8, 1)) // using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.45, 0.45)) //using (CvWindow window0 = new CvWindow("FIFO0", WindowMode.AutoSize)) { //this.Size = new Size(width + 12, height + 148); double min_val, max_val; CvPoint min_loc, max_loc; int size = 15; int size2x = size / 2; int size2y = size / 2; int crop = 20; double sigma = 3; long elapsed0 = 0, elapsed1 = 0; double framerate0 = 0, framerate1 = 0; double alfa_fr = 0.99; sw.Start(); while (bw.CancellationPending == false) { if (vi.IsFrameNew(DeviceID)) { DateTime dn = DateTime.Now; //取得時刻 vi.GetPixels(DeviceID, img.ImageData, false, true); // 画面time表示 str = String.Format("Wide ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff");// +String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, max_loc.X, max_loc.Y, max_val); img.PutText(str, new CvPoint(10, 475), font, new CvColor(0, 100, 40)); Cv.CvtColor(img, img_mono, ColorConversion.BgrToGray); Cv.Sub(img_mono, img_dark8, imgdata.img); // dark減算 imgdata.id = ++id; imgdata.t = dn; imgdata.ImgSaveFlag = !(ImgSaveFlag != 0); //int->bool変換 if (fifo.Count == MaxFrame - 1) { fifo.EraseLast(); } fifo.InsertFirst(imgdata); #region 位置検出1//MinMaxLoc /*// 位置検出 * Cv.Smooth(imgdata.img, img2, SmoothType.Gaussian, size, 0, sigma, 0); * CvRect rect; * if (PvMode == MyDETECT) * { * rect = new CvRect( (int)(gx+0.5) - size, (int)(gy+0.5) - size, size*2, size*2); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += (int)(gx + 0.5) - size; // 基準点が(1,1)のため+1 * max_loc.Y += (int)(gy + 0.5) - size; * } * else * { * rect = new CvRect(crop, crop, width - (crop + crop), height - (crop + crop)); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += crop; // 基準点が(1,1)のため+1 * max_loc.Y += crop; * } * window0.ShowImage(img2); * * double m00, m10, m01; * size2x = size2y = size / 2; * if (max_loc.X - size2x < 0) size2x = max_loc.X; * if (max_loc.Y - size2y < 0) size2y = max_loc.Y; * if (max_loc.X + size2x >= width ) size2x = width -max_loc.X -1; * if (max_loc.Y + size2y >= height) size2y = height -max_loc.Y -1; * rect = new CvRect(max_loc.X - size2x, max_loc.Y - size2y, size, size); * CvMoments moments; * Cv.SetImageROI(img2, rect); * Cv.Moments(img2, out moments, false); * Cv.ResetImageROI(img2); * m00 = Cv.GetSpatialMoment(moments, 0, 0); * m10 = Cv.GetSpatialMoment(moments, 1, 0); * m01 = Cv.GetSpatialMoment(moments, 0, 1); * gx = max_loc.X - size2x + m10 / m00; * gy = max_loc.Y - size2y + m01 / m00; */ #endregion #region 位置検出2 //Blob Cv.Threshold(imgdata.img, img2, threshold_blob, 255, ThresholdType.Binary); //2ms blobs.Label(img2, imgLabel); //1.4ms max_label = blobs.GreaterBlob(); elapsed1 = sw.ElapsedTicks; //1.3ms if (blobs.Count > 1 && gx >= 0) { uint min_area = (uint)(threshold_min_area * blobs[max_label].Area); blobs.FilterByArea(min_area, uint.MaxValue); //0.001ms // 最適blobの選定(area大 かつ 前回からの距離小) double x = blobs[max_label].Centroid.X; double y = blobs[max_label].Centroid.Y; uint area = blobs[max_label].Area; //CvRect rect; distance_min = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //Math.Sqrt() foreach (var item in blobs) { //Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, item.Value.Centroid, item.Value.Area); x = item.Value.Centroid.X; y = item.Value.Centroid.Y; //rect = item.Value.Rect; distance = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //将来はマハラノビス距離 if (distance < distance_min) { d_val = (item.Value.Area) / max_area; if (distance <= 25) //近距離(5pix) { if (d_val >= 0.4) //&& d_val <= 1.2) { max_label = item.Key; distance_min = distance; } } else { if (d_val >= 0.8 && d_val <= 1.5) { max_label = item.Key; distance_min = distance; } } } //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //gx = x; gy = y; max_val = area; } if (max_label > 0) { maxBlob = blobs[max_label]; max_centroid = maxBlob.Centroid; gx = max_centroid.X; gy = max_centroid.Y; max_area = maxBlob.Area; if (this.States == SAVE) { Pid_Data_Send(); timerSavePostTime.Stop(); timerSaveMainTime.Stop(); timerSaveMainTime.Start(); } } else { gx = gy = 0; max_area = 0; } #endregion // 画面表示 str = String.Format("ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff") + String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, xoa, yoa, max_area); if (imgdata.ImgSaveFlag) { str += " True"; } img.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); img.Circle(new CvPoint((int)gx, (int)gy), 10, new CvColor(255, 255, 100)); bw.ReportProgress(0, img); // 処理速度 elapsed0 = sw.ElapsedTicks - elapsed1; // 1frameのticks elapsed1 = sw.ElapsedTicks; framerate0 = alfa_fr * framerate1 + (1 - alfa_fr) * (Stopwatch.Frequency / (double)elapsed0); framerate1 = framerate0; str = String.Format("fr time = {0}({1}){2:F1}", sw.Elapsed, id, framerate0); //," ", sw.ElapsedMilliseconds); //匿名デリゲートで現在の時間をラベルに表示する this.Invoke(new dlgSetString(ShowText), new object[] { textBox1, str }); //img.ToBitmap(bitmap); //pictureBox1.Refresh(); } Application.DoEvents(); Thread.Sleep(interval); } this.States = STOP; this.Invoke(new dlgSetColor(SetColor), new object[] { ObsStart, this.States }); this.Invoke(new dlgSetColor(SetColor), new object[] { ObsEndButton, this.States }); vi.StopDevice(DeviceID); //udpc2.Close(); } } }
/// <summary> /// Classical Multidimensional Scaling /// </summary> public MDS() { // creates distance matrix int size = CityDistance.GetLength(0); CvMat t = new CvMat(size, size, MatrixType.F64C1, CityDistance); // adds Torgerson's additive constant to t t += Torgerson(t); // squares all elements of t t.Mul(t, t); // centering matrix G CvMat g = CenteringMatrix(size); // calculates inner product matrix B CvMat b = g * t * g.T() * -0.5; // calculates eigenvalues and eigenvectors of B CvMat vectors = new CvMat(size, size, MatrixType.F64C1); CvMat values = new CvMat(size, 1, MatrixType.F64C1); Cv.EigenVV(b, vectors, values); for (int r = 0; r < values.Rows; r++) { if (values[r] < 0) { values[r] = 0; } } // multiplies sqrt(eigenvalue) by eigenvector CvMat result = vectors.GetRows(0, 2); for (int r = 0; r < result.Rows; r++) { for (int c = 0; c < result.Cols; c++) { result[r, c] *= Math.Sqrt(values[r]); } } // scaling Cv.Normalize(result, result, 0, 800, NormType.MinMax); //Console.WriteLine(vectors); //Console.WriteLine(values); //Console.WriteLine(result); // opens a window using (IplImage img = new IplImage(800, 600, BitDepth.U8, 3)) using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.5f, 0.5f)) using (CvWindow window = new CvWindow("City Location Estimation")) { img.Zero(); for (int c = 0; c < size; c++) { double x = result[0, c]; double y = result[1, c]; x = x * 0.7 + img.Width * 0.1; y = y * 0.7 + img.Height * 0.1; img.Circle((int)x, (int)y, 5, CvColor.Red, -1); img.PutText(CityNames[c], new CvPoint((int)x + 5, (int)y + 10), font, CvColor.White); } window.Image = img; Cv.WaitKey(); } }