public LineDetectionFromFileTesting() { viewer = new ImageViewer(); //create an image viewer //Convert the image to grayscale and filter out the noise // gray = new Image<Gray, Byte>("C:/RoboSub/RoboImagesTest2/92c.png"); fileImage = new Image<Bgr, Byte>(fileName); fileImage = fileImage.Resize(300, 200, Emgu.CV.CvEnum.INTER.CV_INTER_AREA, true); img = fileImage.Clone(); gray = img.Convert<Gray, Byte>(); // img = new Image<Bgr, Byte>("C:/RoboSub/RoboImagesTest2/92c.png"); viewer.Size = new Size(fileImage.Width * 3, fileImage.Height * 3); Thread input = new Thread(getKeyboardInput); input.Start(); Thread test = new Thread(testShapeDetection); test.Start(); Application.Idle += new EventHandler(delegate(object sender, EventArgs e) { //testShapeDetection(); }); viewer.ShowDialog(); test.Abort(); input.Abort(); }
private void frmMain_Load(object sender, EventArgs e) { OpenFileDialog dlgSourcePicture = new OpenFileDialog(); dlgSourcePicture.Filter = "Image Files(*.BMP;*.JPG;*.GIF;*.PNG)|*.BMP;*.JPG;*.GIF;*.PNG|All files (*.*)|*.*"; dlgSourcePicture.Multiselect = false; if (dlgSourcePicture.ShowDialog() != System.Windows.Forms.DialogResult.OK) Application.Exit(); m_imgSource = new Image<Bgr, Byte>((Bitmap) Image.FromFile(dlgSourcePicture.FileName)).Convert<Hsv, Byte>(); m_imgThreshold = new Image<Gray, Byte>(m_imgSource.Size); m_frmSourceImage = new ImageViewer(m_imgSource, "Original Image"); m_frmSourceImage.ShowIcon = false; m_frmSourceImage.MaximizeBox = false; m_frmSourceImage.Show(); m_frmSourceImage.SetDesktopLocation(100, 0); m_frmSourceImage.SizeChanged += m_frmSourceImage_SizeChanged; m_frmThresholdImage = new ImageViewer(m_imgThreshold, "Threshold Image"); m_frmThresholdImage.ShowIcon = false; m_frmThresholdImage.MaximizeBox = false; m_frmThresholdImage.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; m_frmThresholdImage.Show(); m_frmThresholdImage.SetDesktopLocation(m_frmSourceImage.DesktopLocation.X + m_frmSourceImage.Size.Width + 100, m_frmSourceImage.DesktopLocation.Y); ProduceThresholdImage(); }
public static void DetectBackground(String filepath) { Console.WriteLine("Running Heuristic Background Detector"); var bg_color = Heuristics.DetectBackground( new System.Drawing.Bitmap(filepath)); Console.WriteLine("R,G,B : " + bg_color.Red + "," + bg_color.Green + "," + bg_color.Blue); var display = new ImageViewer(new Emgu.CV.Image<Bgr,Byte>(600,600, bg_color), "Heuristic Background Detection Result"); display.ShowDialog(); }
public void testCam() { ImageViewer viewer = new ImageViewer(); //create an image viewer Capture capture = new Capture(); //create a camera captue Application.Idle += new EventHandler(delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) viewer.Image = capture.QueryFrame(); //draw the image obtained from camera }); viewer.ShowDialog(); //show the image viewer }
public ImageReceiver(int camNumber) { viewer = new ImageViewer(); capture = new Capture(camNumber); if (viewer == null || capture == null) { throw new NullReferenceException("Cannot initialize web-cam."); } }
public static void TestCodeBook() { int learningFrames = 40; using (Capture capture = new Capture("tree.avi")) using (BGCodeBookModel<Ycc> bgmodel = new BGCodeBookModel<Ycc>()) { #region Set color thresholds values MCvBGCodeBookModel param = bgmodel.MCvBGCodeBookModel; param.modMin[0] = param.modMin[1] = param.modMin[2] = 3; param.modMax[0] = param.modMax[1] = param.modMax[2] = 10; param.cbBounds[0] = param.cbBounds[1] = param.cbBounds[2] = 10; bgmodel.MCvBGCodeBookModel = param; #endregion ImageViewer viewer = new ImageViewer(); int count = 0; EventHandler processFrame = delegate(Object sender, EventArgs e) { Image<Bgr, Byte> img = capture.QueryFrame(); if (img == null) { return; } Image<Gray, byte> mask = new Image<Gray, Byte>(img.Size); mask.SetValue(255); viewer.Text = String.Format("Processing {0}th image. {1}", count++, learningFrames > 0 ? "(Learning)" : String.Empty); using (Image<Ycc, Byte> ycc = img.Convert<Ycc, Byte>()) //using YCC color space for BGCodeBook { bgmodel.Update(ycc, ycc.ROI, mask); if (learningFrames == 0) //training is completed bgmodel.ClearStale(bgmodel.MCvBGCodeBookModel.t / 2, ycc.ROI, mask); learningFrames--; Image<Gray, Byte> m = bgmodel.ForgroundMask.Clone(); if (count == 56) { m = bgmodel.ForgroundMask.Clone(); } //m._EqualizeHist(); viewer.Image = m; //viewer.Image = img; System.Threading.Thread.Sleep(100); } img.Dispose(); }; Application.Idle += processFrame; viewer.ShowDialog(); } }
public static void ShowImage(Image img) { if (img != null) { Bitmap map = img as Bitmap; Image<Bgr, byte> image = new Image<Bgr, byte>(map); ImageViewer iv = new ImageViewer(image); iv.StartPosition = FormStartPosition.CenterParent; iv.ShowDialog(); } }
protected override void Show(IDialogVisualizerService windowService, IVisualizerObjectProvider objectProvider) { IImage image = objectProvider.GetObject() as IImage; if (image != null) { using (ImageViewer viewer = new ImageViewer()) { viewer.Image = image; windowService.ShowDialog(viewer); } } }
protected override void Show(IDialogVisualizerService windowService, IVisualizerObjectProvider objectProvider) { Bitmap image = objectProvider.GetObject() as Bitmap; if (image != null) { using (ImageViewer viewer = new ImageViewer()) { viewer.Image = new Image<Bgr, Byte>(image); windowService.ShowDialog(viewer); } } }
protected VideoProcessor() { m_opencv = OpenCVController.GetSingletonInstance(); viewer = new ImageViewer(); vs = VisualData.GetSingleton(); viewer.Show(); this.ColorWriteBitmap = new WriteableBitmap(FrameWidth, FrameHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Bgr24, null); this.DepthWriteBitmap = new WriteableBitmap(FrameWidth, FrameHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Bgr24, null); this.WrtBMP_RightHandFront = new WriteableBitmap(handShapeWidth, handShapeHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Gray8, null); this.WrtBMP_LeftHandFront = new WriteableBitmap(handShapeWidth, handShapeHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Gray8, null); rightHandPosition = new System.Drawing.Point(); }
public void TestKinect() { using (KinectCapture capture = new KinectCapture(KinectCapture.DeviceType.Kinect, KinectCapture.ImageGeneratorOutputMode.Vga30Hz)) { ImageViewer viewer = new ImageViewer(); Application.Idle += delegate(Object sender, EventArgs e) { //Image<Bgr, Byte> img = capture.RetrieveBgrFrame(); capture.Grab(); Mat img = new Mat(); capture.RetrieveDisparityMap(img); viewer.Image = img; }; viewer.ShowDialog(); } }
public static void GetBlobsFromImage(String filepath, Bgr color) { string imagesrc = filepath; Bitmap source = new Bitmap(imagesrc); Console.WriteLine("beginning flood fill..."); Bitmap Mask = Preprocessing.FloodFill(source, 100, 100, 120, color); Console.WriteLine("flood fill complete..."); Console.WriteLine("extracting objects..."); List<Bitmap> extractedobj = Preprocessing.ExtractImages(source, Mask); Console.WriteLine("Extracted " + extractedobj.Count + " objects"); // Display to the User var result = new Image<Bgr, Byte>(source); int ii = 0; foreach (Bitmap bm in extractedobj) { // Bitmap bm2 = Preprocessing.Orient(bm); bm.Save("image" + ii++ + ".png"); } Console.WriteLine("wrote files to disk"); Image<Bgra, Byte> image = new Image<Bgra, byte>(Mask); ImageViewer display = new ImageViewer(image, "Mask"); var scale = Math.Min(800.0/result.Height, 800.0/result.Width); display.ImageBox.SetZoomScale(scale, new Point(10, 10)); display.ShowDialog(); // Display Each Shred That is extracted foreach (var shred in extractedobj) { Image<Bgra, Byte> cvShred = new Image<Bgra, byte>(shred); ImageViewer box = new ImageViewer(cvShred, "Mask"); var shredScale = Math.Min(800.0/cvShred.Height, 800.0/cvShred.Width); display.ImageBox.SetZoomScale(shredScale, new Point(10, 10)); box.ShowDialog(); } // Save to Working Dir }
public static void GetBlobsFromImage(String filepath) { string imagesrc = filepath; Bitmap source = new Bitmap(imagesrc); Bitmap Mask = Preprocessing.FloodFill(source, 100, 100, 100); List<Bitmap> extractedobj = Preprocessing.ExtractImages(source, Mask); // Display to the User var result = new Image<Bgr, Byte>(source); int ii = 0; foreach(Bitmap bm in extractedobj) { Bitmap bm2 = Preprocessing.Orient(bm); bm2.Save("image" + ii++ + ".jpg"); } Emgu.CV.Image<Bgra, Byte> image = new Image<Bgra, byte>(Mask); ImageViewer display = new ImageViewer(image, "Mask"); var scale = Math.Min(800.0 / (double)result.Height, 800.0 / (double)result.Width); display.ImageBox.SetZoomScale(scale, new Point(10, 10)); display.ShowDialog(); // Save to Working Dir }
public void SetFile(string filePath) { if (capture != null) capture.Dispose(); capture = new Capture(filePath); nextFrame = capture.QueryFrame(); if (nextFrame != null) isDirty = true; this.VideoSize = new Size2((int)capture.GetCaptureProperty( Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH), (int)capture.GetCaptureProperty( Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT)); #if false Image<Bgr, byte> img = capture.QueryFrame(); img = capture.QueryFrame(); ImageViewer viewer = new ImageViewer(); viewer.Image = img; viewer.ShowDialog(); #endif }
/* public static void TestOnePassVideoStabilizerCamera() { ImageViewer viewer = new ImageViewer(); using (Capture capture = new Capture()) using (GaussianMotionFilter motionFilter = new GaussianMotionFilter()) //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true)) using (Features2D.SURF detector = new Features2D.SURF(500, false)) //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500)) using (OnePassStabilizer stabilizer = new OnePassStabilizer(capture)) { stabilizer.SetMotionFilter(motionFilter); //motionEstimator.SetDetector(detector); //stabilizer.SetMotionEstimator(motionEstimator); Application.Idle += delegate(object sender, EventArgs e) { Image<Bgr, byte> frame = stabilizer.NextFrame(); if (frame != null) viewer.Image = frame; }; viewer.ShowDialog(); } }*/ public static void TestOnePassVideoStabilizer() { ImageViewer viewer = new ImageViewer(); using (Capture capture = new Capture("tree.avi")) using (CaptureFrameSource frameSource = new CaptureFrameSource(capture)) using (OnePassStabilizer stabilizer = new OnePassStabilizer(frameSource)) { Stopwatch watch = new Stopwatch(); //stabilizer.SetMotionEstimator(motionEstimator); Application.Idle += delegate(object sender, EventArgs e) { watch.Reset(); watch.Start(); Mat frame = stabilizer.NextFrame(); watch.Stop(); if (watch.ElapsedMilliseconds < 200) { Thread.Sleep(200 - (int)watch.ElapsedMilliseconds); } if (frame != null) viewer.Image = frame; }; viewer.ShowDialog(); } }
/* public void TestGpuVibe() { int warmUpFrames = 20; GpuVibe<Gray> vibe = null; Image<Gray, Byte> mask = null; using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (Capture capture = new Capture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0)) using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame)) using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>()) { if (warmUpFrames > 0) { warmUpFrames--; return; } if (vibe == null) { vibe = new GpuVibe<Gray>(1234567, gpuGray, null); return; } else { vibe.Apply(gpuGray, null); if (mask == null) mask = new Image<Gray, byte>(vibe.ForgroundMask.Size); vibe.ForgroundMask.Download(mask); viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera } } }; capture.Start(); viewer.ShowDialog(); //show the image viewer } } public void TestGpuBackgroundModel() { int warmUpFrames = 20; int totalFrames = 0; //CudaBackgroundSubtractorMOG2<Bgr> bgModel = null; //CudaBackgroundSubtractorMOG<Bgr> bgModel = null; CudaBackgroundSubtractorGMG<Bgr> bgModel = null; //CudaBackgroundSubtractorFGD<Bgr> bgModel = null; Image<Gray, Byte> mask = null; using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (Capture capture = new Capture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) totalFrames++; if (viewer != null && !viewer.IsDisposed) { if (viewer.InvokeRequired) { viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); }); } else { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); } } using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0)) using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame)) { if (warmUpFrames > 0) { warmUpFrames--; return; } if (bgModel == null) { //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true); //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0); bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8); bgModel.Apply(gpuFrame, -1.0f, null); //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f); //bgModel.Apply(gpuFrame, -1.0f); return; } else { bgModel.Apply(gpuFrame, -1.0f, null); //bgModel.Apply(gpuFrame, -1.0f); if (mask == null) mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size); bgModel.ForgroundMask.Download(mask); Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); if (viewer != null && !viewer.IsDisposed) { if (viewer.InvokeRequired) { viewer.Invoke((Action)delegate { viewer.Image = result; }); } else { viewer.Image = result; //draw the image obtained from camera } } } } }; capture.Start(); viewer.ShowDialog(); //show the image viewer } }*/ public void CameraTest() { using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (Capture capture = new Capture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) Mat m = new Mat(); capture.Retrieve(m); viewer.Image = m; //draw the image obtained from camera }; capture.Start(); viewer.ShowDialog(); //show the image viewer } }
public void TestImage() { ImageViewer viewer = new ImageViewer(); Application.Idle += delegate(Object sender, EventArgs e) { Image<Bgr, Byte> image = new Image<Bgr, byte>(400, 400); image.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255)); image.Save("temp.jpeg"); Image<Bgr, Byte> img = new Image<Bgr, byte>("temp.jpeg"); viewer.Image = img; }; viewer.ShowDialog(); /* for (int i = 0; i < 10000; i++) { Image<Bgr, Byte> img = new Image<Bgr, byte>("temp.jpeg"); viewer.Image = img; }*/ }
/* public void CameraTest2() { using (ImageViewer viewer = new ImageViewer()) using (Capture capture = new Capture()) { capture.ImageGrabbed += delegate(object sender, EventArgs e) { Image<Bgr, Byte> img = capture.RetrieveBgrFrame(0); img = img.Resize(0.8, Emgu.CV.CvEnum.Inter.Linear); Image<Gray, Byte> gray = img.Convert<Gray, Byte>(); gray._EqualizeHist(); viewer.Image = gray; capture.Pause(); System.Threading.ThreadPool.QueueUserWorkItem(delegate { Thread.Sleep(1000); capture.Start(); }); }; capture.Start(); viewer.ShowDialog(); } }*/ public void CameraTest3() { ImageViewer viewer = new ImageViewer(); using (Capture capture = new Capture()) { Application.Idle += delegate(object sender, EventArgs e) { Mat frame = capture.QueryFrame(); if (frame != null) { Bitmap bmp = frame.ToImage<Bgr, Byte>().ToBitmap(); viewer.Image = new Image<Bgr, Byte>(bmp); } }; viewer.ShowDialog(); } }
public void DriveAndRecordImageTest() { InitializeMotors(); DriveForward(); // testCam(); ImageViewer viewer = new ImageViewer(); //create an image viewer Capture capture = new Capture(3); long frame = 0; long time = DateTime.Now.Second; Application.Idle += new EventHandler(delegate(object sender, EventArgs e) { if (time + 1 < DateTime.Now.Second) { capture = new Capture(3); time = DateTime.Now.Second; capture.QueryFrame().Save("C:/RoboSub/Images/movingTest/forwardFrame" + frame + ".png"); frame++; } viewer.Image = capture.QueryFrame(); }); viewer.ShowDialog(); foreach (Motor m in motors) { //System.Console.WriteLine("Stopping motor " + m.info.serial + "."); m.KillMotors(); } }
public static void TestSuperres() { ImageViewer viewer = new ImageViewer(); //using (Capture capture = new Capture("car.avi")) using (Superres.FrameSource frameSource = new Superres.FrameSource("car.avi", false)) using (Superres.SuperResolution sr = new Superres.SuperResolution(Superres.SuperResolution.OpticalFlowType.Btvl, frameSource)) //using (Superres.SuperResolution sr = new Superres.SuperResolution(Superres.SuperResolution.OpticalFlowType.BTVL1_OCL, frameSource)) { Stopwatch watch = new Stopwatch(); int counter = 0; Application.Idle += delegate(object sender, EventArgs e) { watch.Reset(); watch.Start(); //Image<Bgr, byte> frame = frameSource.NextFrame(); Mat frame = new Mat(); sr.NextFrame(frame); //Image<Gray, Byte> frame = capture.QueryGrayFrame(); watch.Stop(); if (watch.ElapsedMilliseconds < 200) { Thread.Sleep(200 - (int)watch.ElapsedMilliseconds); } if (!frame.IsEmpty) { viewer.Image = frame; viewer.Text = String.Format("Frame {0}: {1} milliseconds.", counter++, watch.ElapsedMilliseconds); } else { viewer.Text = String.Format("{0} frames processed", counter); } }; viewer.ShowDialog(); } }
public static void TestCaptureFrameSource() { ImageViewer viewer = new ImageViewer(); using (Capture capture = new Capture()) using (CaptureFrameSource frameSource = new CaptureFrameSource(capture)) { Application.Idle += delegate(object sender, EventArgs e) { Mat frame = frameSource.NextFrame(); if (frame != null) viewer.Image = frame; }; viewer.ShowDialog(); } }
public static void TestTwoPassVideoStabilizer() { ImageViewer viewer = new ImageViewer(); using (Capture capture = new Capture("tree.avi")) using (GaussianMotionFilter motionFilter = new GaussianMotionFilter(15, -1.0f)) //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true)) //using (Features2D.SURF detector = new Features2D.SURF(500, false)) //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500)) using (CaptureFrameSource frameSource = new CaptureFrameSource(capture)) using (TwoPassStabilizer stabilizer = new TwoPassStabilizer(frameSource)) { Stopwatch watch = new Stopwatch(); //stabilizer.SetMotionEstimator(motionEstimator); Application.Idle += delegate(object sender, EventArgs e) { watch.Reset(); watch.Start(); Mat frame = stabilizer.NextFrame(); watch.Stop(); if (watch.ElapsedMilliseconds < 200) { Thread.Sleep(200 - (int) watch.ElapsedMilliseconds); } if (frame != null) viewer.Image = frame; }; viewer.ShowDialog(); } }
public void TestImageViewer() { //System.Threading.Thread.CurrentThread.CurrentUICulture = new System.Globalization.CultureInfo("zh-CN"); ImageViewer viewer = new ImageViewer(null); //viewer.Image = new Image<Bgr, Byte>(50, 50); //viewer.Image = null; //viewer.ImageBox.FunctionalMode = ImageBox.FunctionalModeOption.RightClickMenu; viewer.ShowDialog(); //Application.Run(new ImageViewer(null)); }
private void Form1_Load(object sender, EventArgs e) { int AllMarkCount = 6; //原始影像=照片, 目標影像=Mark輸入影像 //原始影像長寬 int orgImgWidth = 640; int orgImgHeight = 480; //滑動窗掃描間隔 int slidingWindowStepX = 3; int slidingWindowStepY = 3; //滑動窗長寬(目標影像長寬) int tarImgWidth = 50; int tarImgHeight = 50; Size MarkSizeByCell = new Size(5, 5); //以子區塊為單位的mark長寬 Size CellSize = new Size(tarImgWidth / MarkSizeByCell.Width, tarImgHeight / MarkSizeByCell.Height); // ------------------ //KNN分類器訓練 // Mark類別 載入標籤影像 List <string> sampleImgPath = Directory.GetFiles(@"Image\").ToList(); List <Mark> sampleMark = new List <Mark>(); // 擷取特徵 // KNN分類器 監督學習 for (int i = 0; i < sampleImgPath.Count; i++) { FileInfo curFile = new FileInfo(sampleImgPath[i]); string exName = curFile.Extension; if (exName == ".png") { Image <Gray, byte> imageTmp = new Image <Gray, byte>(sampleImgPath[i]); //threshold ---------------------- Emgu.CV.CvInvoke.cvThreshold(imageTmp.Ptr, imageTmp.Ptr, -1, 255d, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY | Emgu.CV.CvEnum.THRESH.CV_THRESH_OTSU); Mark markTmp = new Mark(imageTmp, MarkSizeByCell, CellSize); sampleMark.Add(markTmp); sampleMark[i].GetFeatures(); int length = AllMarkCount; for (int j = 0; j < length; j++) { if (curFile.Name.Contains("training0" + (j + 1).ToString())) { sampleMark[i].MarkIndex = j + 1; break; } } new ImageViewer(sampleMark[i].iptImg, "[" + sampleMark[i].MarkIndex.ToString() + "]FrameMean" + sampleMark[i].FrameMean.ToString() + ",StdDivSum" + sampleMark[i].StdDivSum.ToString()).Show(); } } // KNN分類器 // ---------------- //攝影機載入影像 List <string> orgImgPath = Directory.GetFiles(@"D:\Phisten\GoogleCloud\圖訊識別\圖訊testdata\").ToList(); int imgCoung = orgImgPath.Count; //imgCoung = imgCoung > 5 ? 5 : imgCoung; //imgCoung = 1; for (int imgIdx = 0; imgIdx < imgCoung; imgIdx++) { string imgPath = orgImgPath[imgIdx]; Image <Rgb, byte> orgImg = new Image <Rgb, byte>(imgPath); //正規化 Image <Gray, byte> norImg; norImg = orgImg.Convert <Gray, byte>(); //norImg = norImg.ConvertScale<byte>(3d, -100d); // 影像長寬 // 亮度 //SlidingWindow擷取輸入影像 List <IImage> iptImgList = new List <IImage>(); List <Rectangle> iptImgRectList = new List <Rectangle>(); Rectangle tmpRect = new Rectangle(0, 0, 50, 50); norImg.ROI = tmpRect; int StepWidthLimit = orgImgWidth - tarImgWidth - (orgImgWidth - tarImgWidth) % slidingWindowStepX; int StepHeightLimit = orgImgHeight - tarImgHeight - (orgImgHeight - tarImgHeight) % slidingWindowStepY; for (int i = 0; i < StepWidthLimit; i += slidingWindowStepX) { tmpRect.Y = 0; for (int j = 0; j < StepHeightLimit; j += slidingWindowStepY) { Image <Gray, byte> curMarkImg = norImg.CopyBlank(); //threshold ---------------------- int greyThreshValue = (int)Emgu.CV.CvInvoke.cvThreshold(norImg.Ptr, curMarkImg.Ptr, -1, 255d, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY | Emgu.CV.CvEnum.THRESH.CV_THRESH_OTSU); iptImgList.Add(curMarkImg); //iptImgList.Add(norImg.Copy()); iptImgRectList.Add(norImg.ROI); tmpRect.Offset(0, slidingWindowStepY); norImg.ROI = tmpRect; } tmpRect.Offset(slidingWindowStepX, 0); norImg.ROI = tmpRect; } List <Mark> markList = new List <Mark>(); for (int imgIndex = 0; imgIndex < iptImgList.Count; imgIndex++) { //擷取特徵 Mark curMark = new Mark(iptImgList[imgIndex] as Image <Gray, byte>, MarkSizeByCell, CellSize); curMark.GetFeatures(); //特徵匹配 double KNNdistanceThreshold = 256d; // 過濾外框平均值過高 // 過濾標準差總和過高 if (curMark.FrameFilter(96) && curMark.StdDivSumFilter(1024d)) { // 最近鄰分類 SortedList <double, int> distanceSList = new SortedList <double, int>(); for (int i = 0, length = sampleMark.Count; i < length; i++) { double curDis = sampleMark[i].Distance(curMark); if (curDis < KNNdistanceThreshold) { distanceSList.Add(curDis, sampleMark[i].MarkIndex); } } //合格的Mark //int markIndex = Phisten.Classifier.KNN(1, distanceSList);. if (distanceSList.Count > 0)// && distanceSList.Keys[0] < KNNdistanceThreshold) { curMark.MarkIndex = distanceSList.Values[0]; curMark.MarkIndexDistance = distanceSList.Keys[0]; Rectangle markRect = iptImgRectList[imgIndex]; curMark.MarkRectangle = markRect; bool IsNewMark = true; //重疊過濾 for (int i = 0, length = markList.Count; i < length; i++) { if (markRect.IntersectsWith(markList[i].MarkRectangle)) //若區域重疊則不新增mark { if (distanceSList.Keys[0] < markList[i].MarkIndexDistance) //且若curMark距離較近 { //則替換Mark markList[i] = curMark; } IsNewMark = false; break; } } if (IsNewMark) { //否則新增Mark markList.Add(curMark); } } } } Image <Rgb, byte> outputImg = orgImg.Convert <Gray, byte>().Convert <Rgb, byte>(); outputImg = outputImg.SmoothGaussian(3); MCvFont pen1 = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_SIMPLEX, 0.5d, 0.5d); //繪製分類結果 for (int i = 0; i < markList.Count; i++) { string fileName1 = i.ToString() + ".jpg"; //markList[i].iptImg.Save(@"opt\" + fileName1); outputImg.Draw(markList[i].MarkRectangle, new Rgb(255, 0, 0), 1); outputImg.Draw("[" + markList[i].MarkIndex + "]" + Math.Round(markList[i].MarkIndexDistance), ref pen1, markList[i].MarkRectangle.Location, new Rgb(50, 0, 200)); } //輸出影像 this.Width = 0; this.Height = 0; ImageViewer imgViewer = new Emgu.CV.UI.ImageViewer(outputImg); imgViewer.Show(); } }
private void button1_Click(object sender, EventArgs e) { capture = new Capture(); v = new ImageViewer(); Application.Idle += ProcessFrame; }
/// <summary> /// Event handler /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Show_ItemClick(object sender, ItemClickEventArgs e) { try { using (var imageViewer = new ImageViewer(Image, "FZP")) imageViewer.ShowDialog(); } catch (Exception exception) { XtraMessageBox.Show(exception.Message); } }
public void Run() { Rectangle biggestBlobForButtons; //ImageViewer viewer1 = new ImageViewer(); //Thread aThread = new Thread(delegate() { viewer1.ShowDialog(); }); //aThread.Start(); ImageViewer viewer2 = new ImageViewer(); Thread otherThread = new Thread(delegate() { viewer2.ShowDialog(); }); otherThread.Start(); while (runThread) { if (this.newImageFromWebcam) { //this.newImageFromWebcam = false; this.Image = new Bitmap(CaptureModel.Instance.Image); webcamImage = new Image<Bgr, byte>(this.Image); NewImageFromWebcam = false; this.Image.Dispose(); cutImageForButtons = webcamImage.Copy(interestRectangleForButtons); rangeImageForButtons = detectSkin(cutImageForButtons); cutImageForButtons.Dispose(); //viewer1.Image = rangeImageForSideBar; viewer2.Image = rangeImageForButtons; using (MemStorage storage = new MemStorage()) { Contour<Point> contour = rangeImageForButtons.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage); Contour<Point> biggest = null; double currentArea = 0, maxArea = 0; while (contour != null) { currentArea = contour.Area; if (currentArea > maxArea) { maxArea = currentArea; biggest = contour; } contour = contour.HNext; } if (biggest != null) biggestBlobForButtons = biggest.BoundingRectangle; else biggestBlobForButtons = Rectangle.Empty; } if (biggestBlobForButtons.IsEmpty) { continue; } for (int i = 0; i < rectanglesForButtons.Length; i++) { Rectangle intersection = rectanglesForButtons[i]; intersection.Intersect(biggestBlobForButtons); if (!intersection.IsEmpty && getArea(intersection) > 0.5 * getArea(rectanglesForButtons[i])) { if (!buttonPressedForButtonBar[i]) { currentSteadyFramesForButtonBar[i]++; if (currentSteadyFramesForButtonBar[i] == STEADY_FRAMES_THRESHOLD) { currentSteadyFramesForButtonBar[i]=0; buttonPressedForButtonBar[i] = true; OnBarEvent(new PropertyChangedEventArgs("BUTTON," + i)); Console.WriteLine("BUTTON," + i); } } } else { buttonPressedForButtonBar[i] = false; currentSteadyFramesForButtonBar[i] = 0; } } } else { resetButtonBar(); } Thread.Sleep(300); } }
public void TestImageViewerFrameRate() { ImageViewer viewer = new ImageViewer(null); Image<Bgr, Byte> img = new Image<Bgr, Byte>(1024, 1024); Application.Idle += delegate(Object sender, EventArgs e) { double v = DateTime.Now.Ticks % 30; img.SetValue(new Bgr(v, v, v)); viewer.Image = img; }; viewer.ShowDialog(); }
/// <summary> /// Constructor of iProcess. Its creates a viewer and a capture object. /// </summary> public iProcess() { viewer = new ImageViewer(); //create an image viewer capture = new Capture(); //create a camera captue }
public void TestPlayVideo() { Capture capture = new Capture("car.avi"); ImageViewer viewer = new ImageViewer(null); Application.Idle += delegate(Object sender, EventArgs e) { Mat m = capture.QueryFrame(); if (m != null && !m.IsEmpty) { viewer.Image = m; Thread.Sleep(300); } }; viewer.ShowDialog(); }
public static void GetFloodFillMask(String filepath) { string imagesrc = filepath; var start = System.DateTime.Now; Bitmap source = new Bitmap(imagesrc); Bitmap mask = Preprocessing.FloodFill(source, 0, 0, 110); // Display to the User var result = new Image<Bgr, Byte>(mask); ImageViewer display = new ImageViewer(result, "Mask"); var scale = Math.Min(800.0 / (double)result.Height, 800.0 / (double)result.Width); display.ImageBox.SetZoomScale(scale, new Point(10, 10)); var stop = System.DateTime.Now; var difference = stop - start; Console.WriteLine("Total Time :"+ difference.ToString() ); Console.WriteLine("Total Pixels: "+ source.Width*source.Height); display.ShowDialog(); }