Task PorcessingStep1() { return(Task.Run(() => { try { this.BeginInvoke(() => Mouse.OverrideCursor = System.Windows.Input.Cursors.Wait); Core.PResult = new ImgPResult(); /* For Simple Notation */ double cHnum = Core.PData.ChipHNum; double cWnum = Core.PData.ChipWNum; Image <Bgr, byte> targetimg = null; /* Create Data */ byte[,,] failchipDisplayData = MatPattern((int)cHnum, (int)cWnum, 3); byte[,,] passfailPosData = new byte[(int)cHnum, (int)cWnum, 1]; double[,,] estedChipP = Core.EstedChipPos(cHnum, cWnum); // [H,W,(y,x)] var passContours = Core.FindPassContour(Core.OriginImg); Core.passChipList = new List <System.Drawing.PointF>(); Core.failChipList = new List <System.Drawing.PointF>(); targetimg = Core.ColorOriImg.Clone(); /* Draw Contour and Save */ //targetimg = DrawContour( targetimg , passContours ); //targetimg.Save( TestFileSavePath.ContourName ); passContours = ImgPFunc.FnSortcontours(passContours)(); var boxlist = Core.ApplyBox(passContours); targetimg = DrawBox(targetimg, boxlist); //targetimg.Save( TestFileSavePath.BoxName ); // Draw EstedPoint on Image and Cavnas targetimg = DrawCenterPoint(targetimg, estedChipP); #region check pass fail var boximg = Core.ColorOriImg.Clone(); for (int j = 0; j < estedChipP.GetLength(0); j++) // row / H { for (int i = 0; i < estedChipP.GetLength(1); i++) // col / W { bool isFail = true; for (int k = 0; k < boxlist.Count; k++) { var inbox = ImgPFunc.FnInBox(boxlist[k], 1); // inBox(y,x) inbox(H,W) if (inbox(estedChipP[j, i, 0], estedChipP[j, i, 1])) { Core.PResult.OutData.Add(new ExResult(j, i, true, Core.SumBox(boxlist[k]), CvInvoke.ContourArea(passContours[k]))); Core.passChipList.Add(new System.Drawing.PointF(( float )estedChipP[j, i, 0], ( float )estedChipP[j, i, 1])); isFail = false; break; } } if (isFail) { double failboxInten = Core.SumAreaPoint((int)estedChipP[j, i, 0], (int)estedChipP[j, i, 1]); Core.PResult.OutData.Add(new ExResult(j, i, false, failboxInten, 0)); Core.failChipList.Add(new System.Drawing.PointF(( float )estedChipP[j, i, 0], ( float )estedChipP[j, i, 1])); SetFailColor(failchipDisplayData, j, i); } } } DisplayResult(failchipDisplayData, targetimg); #endregion this.BeginInvoke(() => { imgPro.ImageSource = BitmapSourceConvert.ToBitmapSource(targetimg); imgLT.ImageSource = BitmapSourceConvert.ToBitmapSource(Core.IndexViewImg); Mouse.OverrideCursor = null; }); } catch (Exception er) { System.Windows.Forms.MessageBox.Show(er.ToString()); } })); }
private void ProcessFrameOnTimerTick(object sender, EventArgs e) { _captureTimer.Stop(); // Get Min and MaxColorRange var minColor = _setting.RgbColorMin; var maxColor = _setting.RgbColorMax; // processed image var imgOriginal = _capture.RetrieveBgrFrame(); imgOriginal = imgOriginal.Flip(FLIP.HORIZONTAL); var imgProcessed = imgOriginal.InRange(new Bgr(minColor.Blue, minColor.Green, minColor.Red), new Bgr(maxColor.Blue, maxColor.Green, maxColor.Red)); imgProcessed = imgProcessed.SmoothGaussian(_setting.GaussianBlur); var circles = imgProcessed.HoughCircles(new Gray(_setting.CannyThreashold), new Gray(_setting.AccumulatorThreashold), 2, imgProcessed.Height / 4f, _setting.MinRadiusDetectedCircles, _setting.MaxRadiusDetectedCircles)[0]; _setting.CollectDataOfObjects = 0; _setting.TrackedObjects.Clear(); foreach (var circle in circles) { _setting.CollectDataOfObjects++; // start by 1 object var tb = new TrackedObject { Id = _setting.CollectDataOfObjects, X = circle.Center.X, Y = circle.Center.Y, Radius = circle.Radius }; /* * _setting.BallPosition = "ball position : x = " + circle.Center.X.ToString().PadLeft(4) + * ", y =" + circle.Center.Y.ToString().PadLeft(4) + * ", radius = " + circle.Radius.ToString("###.000").PadLeft(7); * */ CvInvoke.cvCircle(imgOriginal, new Point((int)circle.Center.X, (int)circle.Center.Y), 3, new MCvScalar(0, 255, 0), -1, LINE_TYPE.CV_AA, 0); var color = _setting.CollectDataOfObjects > 1 ? Color.Blue : Color.Red; imgOriginal.Draw(circle, new Bgr(color), 3); _setting.TrackedObjects.Add(tb); if (_setting.CollectDataOfObjects > 1) { continue; } foreach (var bindValue in _setting.BindValues) { switch (bindValue.OscToValue) { case BindingOscValue.Radius: bindValue.Value = circles[0].Radius; break; case BindingOscValue.X: bindValue.Value = circles[0].Center.X; break; case BindingOscValue.Y: bindValue.Value = circles[0].Center.Y; break; } } } // Set Image to Settings: _setting.OrginalImage = BitmapSourceConvert.ToBitmapSource(imgOriginal); _setting.ProcessedImage = BitmapSourceConvert.ToBitmapSource(imgProcessed); }
private void Rendering_DoWork(object sender, DoWorkEventArgs e) { Image <Gray, byte> image = new Image <Gray, byte>(frameDescription.Width, frameDescription.Height); image.Bytes = depthpixels; List <Image <Gray, byte> > ImageList = new List <Image <Gray, byte> >(); Image <Bgr, byte> BGR = new Image <Bgr, byte>(512, 424); (ImageList, BGR) = SegmentImage(15); List <System.Drawing.Rectangle> Boxes = Sliding_Window(ImageList, new System.Drawing.Size(128, 140)); int[] intersectionCount = new int[Boxes.Count]; for (int c = 0; c < Boxes.Count; c++) { for (int k = 0; k < Boxes.Count; k++) { if (Boxes[c].IntersectsWith(Boxes[k])) { intersectionCount[c]++; CvInvoke.Rectangle(BGR, Boxes[c], new MCvScalar(255, 255, 255)); } } } var maxindex = intersectionCount.Select((x, i) => new { Index = i, Value = x }).Where(x => x.Value == intersectionCount.Max()).Select(x => x.Index).ToList(); List <float> X1 = new List <float>(); List <float> X2 = new List <float>(); List <float> Y1 = new List <float>(); List <float> Y2 = new List <float>(); foreach (var box in maxindex) { X1.Add(Boxes[box].X); X2.Add(Boxes[box].X + Boxes[box].Width); Y1.Add(Boxes[box].Y); Y2.Add(Boxes[box].Y + Boxes[box].Height); // CvInvoke.Rectangle(BGR, Boxes[box], new MCvScalar(255, 255, 255)); } if (X1.Count != 0) { System.Drawing.Rectangle finalrectangle = new System.Drawing.Rectangle(new System.Drawing.Point((int)X1.Min(), (int)Y1.Min()), new System.Drawing.Size((int)(X2.Max() - X1.Max()), (int)(Y2.Max() - Y1.Max()))); BoundingBox = finalrectangle; } LoadCapture.Dispatcher.Invoke(() => { BitmapSource FrameBitmap = BitmapSourceConvert.ToBitmapSourceBgr(BGR); FrameBitmap.Freeze(); LoadCapture.Source = FrameBitmap; }); depthbitmap.Dispatcher.Invoke(() => { depthbitmap.WritePixels(new Int32Rect(0, 0, depthbitmap.PixelWidth, depthbitmap.PixelHeight), image.Bytes, depthbitmap.PixelWidth, 0); }); if (abc.IsRunning && distance < 20) { abc.Stop(); var time = abc.ElapsedMilliseconds; File.AppendAllText("C:/Users/CPT Danko/Desktop/SpeedValues.txt", time + " ms " + distance + " meranie" + Environment.NewLine); distance++; } }
/// <summary> /// Makes sure that settings are kept when switching images /// </summary> private void DisplayImagesCorrectCB() { Mat tmp = new Mat(); switch (activeButtonEnumCurr) { case ActiveButtonEnum.None: currImage.Source = BitmapSourceConvert.ToBitmapSource(currImageI); break; case ActiveButtonEnum.FFP: currImageI.Mat.CopyTo(tmp); this.currFFPImg = tmp.ToImage <Bgr, byte>(); FaceInvoke.DrawFacemarks(currFFPImg, _preprocessor.ffpCurr, new MCvScalar(255, 0, 0)); currImage.Source = BitmapSourceConvert.ToBitmapSource(currFFPImg); break; case ActiveButtonEnum.Delaunay: currImageI.Mat.CopyTo(tmp); this.currDelaunayImg = tmp.ToImage <Bgr, byte>(); foreach (Triangle2DF triangle in _preprocessor.delaunayTrianglesCurr) { System.Drawing.Point[] vertices = Array.ConvertAll <PointF, System.Drawing.Point>(triangle.GetVertices(), System.Drawing.Point.Round); using (VectorOfPoint vp = new VectorOfPoint(vertices)) { CvInvoke.Polylines(currDelaunayImg, vp, true, new Bgr(255, 255, 255).MCvScalar); } } currImage.Source = BitmapSourceConvert.ToBitmapSource(currDelaunayImg); break; default: break; } switch (activeButtonEnumNext) { case ActiveButtonEnum.None: nextImage.Source = BitmapSourceConvert.ToBitmapSource(nextImageI); break; case ActiveButtonEnum.FFP: nextImageI.Mat.CopyTo(tmp); this.nextFFPImg = tmp.ToImage <Bgr, byte>(); FaceInvoke.DrawFacemarks(nextFFPImg, _preprocessor.ffpNext, new MCvScalar(255, 0, 0)); nextImage.Source = BitmapSourceConvert.ToBitmapSource(nextFFPImg); break; case ActiveButtonEnum.Delaunay: nextImageI.Mat.CopyTo(tmp); this.nextDelaunayImg = tmp.ToImage <Bgr, byte>(); foreach (Triangle2DF triangle in _preprocessor.delaunayTrianglesNext) { System.Drawing.Point[] vertices = Array.ConvertAll <PointF, System.Drawing.Point>(triangle.GetVertices(), System.Drawing.Point.Round); using (VectorOfPoint vp = new VectorOfPoint(vertices)) { CvInvoke.Polylines(nextDelaunayImg, vp, true, new Bgr(255, 255, 255).MCvScalar); } } nextImage.Source = BitmapSourceConvert.ToBitmapSource(nextDelaunayImg); break; default: break; } }
private void OnLoaded() { _frame = new Mat(); cmdCapture.Click += (s, e) => { // This _grabbedFrame = new Mat(); _capt.Grab(); _capt.Retrieve(_grabbedFrame, 0); // Or this //_grabbedFrame = _capt.QueryFrame(); if (!_grabbedFrame.IsEmpty) { ImageProcessing.ImageProcessingInstance.SaveDirectory = "Gallery/"; ImageProcessing.ImageProcessingInstance.SaveImageToFile(new Func <string>(() => { var files = ImageProcessing.ImageProcessingInstance.GetFilesInSaveDir(ImageProcessing.FileNameSetting.WithoutExtension); if (files.Count == 0) { return(0.ToString()); } return((Convert.ToInt32(files.Last()) + 1).ToString()); }).Invoke(), _grabbedFrame.Bitmap, System.Drawing.Imaging.ImageFormat.Jpeg); OnCapture?.Invoke(this, _grabbedFrame.Bitmap); } }; try { _capt = new VideoCapture(0); _capt.ImageGrabbed += (s, e) => { if (_capt == null) { return; } if (_capt.Ptr == IntPtr.Zero) { return; } _capt.Retrieve(_frame, 0); if (_frame.IsEmpty) { return; } if (_frame.Bitmap == null) { return; } //TryFindFace(_frame); try { CvInvoke.PyrDown(_frame, _frame); } catch (Exception ex) { Debug.WriteLine(ex.Message); } Dispatcher.Invoke(() => { imgPreview.Source = BitmapSourceConvert.ToBitmapSource(_frame); }); }; _capt.FlipHorizontal = !_capt.FlipHorizontal; } catch (Exception ex) { MessageBox.Show(ex.Message, "Err", MessageBoxButton.OK, MessageBoxImage.Error); } //_capt.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps, 15); // Camera settings SetHighResolution(); //_capt.Start(); new Task(async() => { await Task.Delay(1100); Dispatcher.Invoke(() => { grdMainContainer.Children.RemoveAt(grdMainContainer.Children.Count - 1); imgPreview.Visibility = Visibility.Visible; cmdCapture.Visibility = Visibility.Visible; }); }).Start(); }
/// <summary> /// Changes active pictures /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void ChangeActivePicturesButton_Clicked(object sender, RoutedEventArgs e) { string buttonName = ((Button)sender).Uid; int currIdOld, currIdNew; switch (buttonName) { case "left": if (0 >= curr.Id) { MessageBox.Show("no next image"); } else { currIdOld = curr.Id; currIdNew = currIdOld - 1; ImageDetails newCurrImageDetails = Images.ElementAt(currIdNew); ImageDetails newNextImageDetails = Images.ElementAt(currIdNew + 1); this.curr = newCurrImageDetails; this.next = newNextImageDetails; currImage.Source = BitmapSourceConvert.ToBitmapSource(new Image <Bgr, byte>(curr.Title)); nextImage.Source = BitmapSourceConvert.ToBitmapSource(new Image <Bgr, byte>(next.Title)); RefreshDisplayedImages(); if (displayMorphOk) { DisplayImagesCorrectCB(); } } break; case "right": currIdOld = curr.Id; currIdNew = currIdOld + 1; if (Images.Count <= currIdNew + 1) { MessageBox.Show("no next image"); } else { ImageDetails newCurrImageDetails = Images.ElementAt(currIdNew); ImageDetails newNextImageDetails = Images.ElementAt(currIdNew + 1); this.curr = newCurrImageDetails; this.next = newNextImageDetails; currImage.Source = BitmapSourceConvert.ToBitmapSource(new Image <Bgr, byte>(curr.Title)); nextImage.Source = BitmapSourceConvert.ToBitmapSource(new Image <Bgr, byte>(next.Title)); RefreshDisplayedImages(); //DisplayImages(); if (displayMorphOk) { DisplayImagesCorrectCB(); } } break; default: throw new MissingFieldException(); } if (displayMorphOk) { MorphImage m = new MorphImage(_preprocessor.curr, _preprocessor.next, _preprocessor.ffpCurr, _preprocessor.ffpNext, 0.5f); morphImage.Source = m.GetMorphedImage(); mySlider.Value = 0.5; } else { var uriSource = new Uri(@"/FaceMorph;component/data/MyDefaultImage.png", UriKind.Relative); morphImage.Source = new BitmapImage(uriSource); } DisplayImages(); }
/// <summary> /// 0 is left (curr), 1 is right (next) /// </summary> /// <param name="imgLocation"></param> public void RedrawFaces(int imgLocation) { int tmpcurrentFace = 0; // todo: check if safe to remove List <Rectangle> tmpfacesList; Image <Bgr, byte> tmpImageI; if (imgLocation == (int)ImageEnum.Curr) { tmpcurrentFace = selectedFaceCurr; curr.SelectedFace = selectedFaceCurr; tmpfacesList = _preprocessor.FacesListCurr; Mat tmp = new Mat(); currImageI.Mat.CopyTo(tmp); tmpImageI = tmp.ToImage <Bgr, byte>(); } else { tmpcurrentFace = selectedFaceNext; tmpfacesList = _preprocessor.FacesListNext; Mat tmp = new Mat(); nextImageI.Mat.CopyTo(tmp); tmpImageI = tmp.ToImage <Bgr, byte>(); } if (tmpfacesList.Count > 0) { for (int i = 0; i < tmpfacesList.Count; i++) { if (i == tmpcurrentFace) { tmpImageI.Draw(tmpfacesList[i], new Bgr(0, 255, 0), RECT_WIDTH); } else if (i != tmpcurrentFace) { tmpImageI.Draw(tmpfacesList[i], new Bgr(0, 0, 255), RECT_WIDTH); } } } if (imgLocation == (int)ImageEnum.Curr) { curr.FaceLocation = _preprocessor.FacesListCurr[tmpcurrentFace]; currImage.Source = BitmapSourceConvert.ToBitmapSource(tmpImageI); selectedFaceCurr = tmpcurrentFace; curr.SelectedFace = tmpcurrentFace; _preprocessor.FacesListCurr = tmpfacesList; //currImageI = tmpImageI; } else { nextImage.Source = BitmapSourceConvert.ToBitmapSource(tmpImageI); next.FaceLocation = _preprocessor.FacesListNext[tmpcurrentFace]; selectedFaceNext = tmpcurrentFace; next.SelectedFace = tmpcurrentFace; _preprocessor.FacesListNext = tmpfacesList; } _preprocessor.UpdateSelectedFace(curr.SelectedFace, next.SelectedFace); }
////////////////////////////////// ////// Main Window Elements ////// ////////////////////////////////// /// Called when each depth frame is ready /// Does necessary processing to get our finger points and predicting gestures /// Most of the interesting stuff happens in here private void GestureDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { // Calculating and storing finger and palm positions Image <Gray, Byte> emguImg = convertToEmgu(); CalculateAndStorePos(emguImg); this.emguImage.Source = BitmapSourceConvert.ToBitmapSource(emguImg); using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.depthPixels); // Get the min and max reliable depth for the current frame int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i < this.depthPixels.Length; ++i) { int x = i % this.sensor.DepthStream.FrameWidth; int y = (int)(i / this.sensor.DepthStream.FrameWidth); // Get the depth for this pixel short depth = depthPixels[i].Depth; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). // Note: Using conditionals in this loop could degrade performance. // Consider using a lookup table instead when writing production code. // See the KinectDepthViewer class used by the KinectExplorer sample // for a lookup table example. byte intensity = (byte)(0); //if (depth >= minDepth && depth <= threshDepth) if (depth >= 180 && depth <= threshDepth) { intensity = (byte)(depth); } // Write out blue byte this.depthcolorPixels[colorPixelIndex++] = intensity; // Write out green byte this.depthcolorPixels[colorPixelIndex++] = intensity; // Write out red byte this.depthcolorPixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } // If read is enabled if (_video.Count > MinimumFrames && _capturing == true) { sample = new Matrix <float>(1, _dimension); string[] features = _dtw.ExtractFeatures().Split(' '); for (int i = 0; i < features.Length; i++) { int featureIndex; if (Int32.TryParse(features[i], out featureIndex)) { sample[0, i] = (float)featureIndex; } } Gestures recordedGesture = EmguCVKNearestNeighbors.Predict(sample); results.Text = "Recognised as: " + recordedGesture.ToString(); if (SerialSender.GetSendState() && recordedGesture != Gestures.ReadySignal) { SerialSender.SendGesture(recordedGesture, pulse); SerialSender.SetSendState(false); recordedGesture = Gestures.None; imageBorder.BorderThickness = new Thickness(0); } if (recordedGesture == Gestures.ReadySignal) { imageBorder.BorderThickness = new Thickness(10); SerialSender.SetSendState(true); } if (recordedGesture == Gestures.None) { // There was no match so reset the buffer _video = new ArrayList(); } } // Ensures that we remember only the last x frames if (_video.Count > BufferSize) { // If we are currently capturing and we reach the maximum buffer size then automatically store if (_capturing) { DtwStoreClick(null, null); } else { // Remove the first // 2 frame in the buffer for (int i = 0; i < 3; ++i) { _video.RemoveAt(0); } } } // Write the pixel data into our bitmap this.depthBitmap.WritePixels( new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight), this.depthcolorPixels, this.depthBitmap.PixelWidth * sizeof(int), 0); } } }
public Mat ImageToMat(System.Drawing.Image img) { return(BitmapSourceConvert.ToMat( ImageProcessing.ImageProcessingInstance.ConvertImageToBitmapImage(img))); }
public override void GetLiveImage() { lock (_locker) { try { LiveViewData = LiveViewManager.GetLiveViewImage(CameraDevice); } catch (Exception) { return; } if (LiveViewData == null || LiveViewData.ImageData == null) { return; } MemoryStream stream = new MemoryStream(LiveViewData.ImageData, LiveViewData.ImageDataPosition, LiveViewData.ImageData.Length - LiveViewData.ImageDataPosition); using (var bmp = new Bitmap(stream)) { Bitmap res = bmp; var preview = BitmapFactory.ConvertToPbgra32Format(BitmapSourceConvert.ToBitmapSource(res)); var zoow = preview.Crop((int)(CentralPoint.X - (StarWindowSize / 2)), (int)(CentralPoint.Y - (StarWindowSize / 2)), StarWindowSize, StarWindowSize); CalculateStarSize(zoow); zoow.Freeze(); StarWindow = zoow; if (CameraDevice.LiveViewImageZoomRatio.Value == "All") { preview.Freeze(); Preview = preview; } if (Brightness != 0) { BrightnessCorrection filter = new BrightnessCorrection(Brightness); res = filter.Apply(res); } if (EdgeDetection) { var filter = new FiltersSequence( Grayscale.CommonAlgorithms.BT709, new HomogenityEdgeDetector() ); res = filter.Apply(res); } var _bitmap = BitmapFactory.ConvertToPbgra32Format(BitmapSourceConvert.ToBitmapSource(res)); AverageImage(_bitmap); DrawGrid(_bitmap); if (ZoomFactor > 1) { double d = _bitmap.PixelWidth / (double)ZoomFactor; double h = _bitmap.PixelHeight / (double)ZoomFactor; _bitmap = _bitmap.Crop((int)(CentralPoint.X - (d / 2)), (int)(CentralPoint.Y - (h / 2)), (int)d, (int)h); } _bitmap.Freeze(); Bitmap = _bitmap; } } }