private void frmMain_Load(object sender, EventArgs e) { OpenFileDialog dlgSourcePicture = new OpenFileDialog(); dlgSourcePicture.Filter = "Image Files(*.BMP;*.JPG;*.GIF;*.PNG)|*.BMP;*.JPG;*.GIF;*.PNG|All files (*.*)|*.*"; dlgSourcePicture.Multiselect = false; if (dlgSourcePicture.ShowDialog() != System.Windows.Forms.DialogResult.OK) Application.Exit(); m_imgSource = new Image<Bgr, Byte>((Bitmap) Image.FromFile(dlgSourcePicture.FileName)).Convert<Hsv, Byte>(); m_imgThreshold = new Image<Gray, Byte>(m_imgSource.Size); m_frmSourceImage = new ImageViewer(m_imgSource, "Original Image"); m_frmSourceImage.ShowIcon = false; m_frmSourceImage.MaximizeBox = false; m_frmSourceImage.Show(); m_frmSourceImage.SetDesktopLocation(100, 0); m_frmSourceImage.SizeChanged += m_frmSourceImage_SizeChanged; m_frmThresholdImage = new ImageViewer(m_imgThreshold, "Threshold Image"); m_frmThresholdImage.ShowIcon = false; m_frmThresholdImage.MaximizeBox = false; m_frmThresholdImage.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; m_frmThresholdImage.Show(); m_frmThresholdImage.SetDesktopLocation(m_frmSourceImage.DesktopLocation.X + m_frmSourceImage.Size.Width + 100, m_frmSourceImage.DesktopLocation.Y); ProduceThresholdImage(); }
protected VideoProcessor() { m_opencv = OpenCVController.GetSingletonInstance(); viewer = new ImageViewer(); vs = VisualData.GetSingleton(); viewer.Show(); this.ColorWriteBitmap = new WriteableBitmap(FrameWidth, FrameHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Bgr24, null); this.DepthWriteBitmap = new WriteableBitmap(FrameWidth, FrameHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Bgr24, null); this.WrtBMP_RightHandFront = new WriteableBitmap(handShapeWidth, handShapeHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Gray8, null); this.WrtBMP_LeftHandFront = new WriteableBitmap(handShapeWidth, handShapeHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Gray8, null); rightHandPosition = new System.Drawing.Point(); }
private void Form1_Load(object sender, EventArgs e) { int AllMarkCount = 6; //原始影像=照片, 目標影像=Mark輸入影像 //原始影像長寬 int orgImgWidth = 640; int orgImgHeight = 480; //滑動窗掃描間隔 int slidingWindowStepX = 3; int slidingWindowStepY = 3; //滑動窗長寬(目標影像長寬) int tarImgWidth = 50; int tarImgHeight = 50; Size MarkSizeByCell = new Size(5, 5); //以子區塊為單位的mark長寬 Size CellSize = new Size(tarImgWidth / MarkSizeByCell.Width, tarImgHeight / MarkSizeByCell.Height); // ------------------ //KNN分類器訓練 // Mark類別 載入標籤影像 List <string> sampleImgPath = Directory.GetFiles(@"Image\").ToList(); List <Mark> sampleMark = new List <Mark>(); // 擷取特徵 // KNN分類器 監督學習 for (int i = 0; i < sampleImgPath.Count; i++) { FileInfo curFile = new FileInfo(sampleImgPath[i]); string exName = curFile.Extension; if (exName == ".png") { Image <Gray, byte> imageTmp = new Image <Gray, byte>(sampleImgPath[i]); //threshold ---------------------- Emgu.CV.CvInvoke.cvThreshold(imageTmp.Ptr, imageTmp.Ptr, -1, 255d, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY | Emgu.CV.CvEnum.THRESH.CV_THRESH_OTSU); Mark markTmp = new Mark(imageTmp, MarkSizeByCell, CellSize); sampleMark.Add(markTmp); sampleMark[i].GetFeatures(); int length = AllMarkCount; for (int j = 0; j < length; j++) { if (curFile.Name.Contains("training0" + (j + 1).ToString())) { sampleMark[i].MarkIndex = j + 1; break; } } new ImageViewer(sampleMark[i].iptImg, "[" + sampleMark[i].MarkIndex.ToString() + "]FrameMean" + sampleMark[i].FrameMean.ToString() + ",StdDivSum" + sampleMark[i].StdDivSum.ToString()).Show(); } } // KNN分類器 // ---------------- //攝影機載入影像 List <string> orgImgPath = Directory.GetFiles(@"D:\Phisten\GoogleCloud\圖訊識別\圖訊testdata\").ToList(); int imgCoung = orgImgPath.Count; //imgCoung = imgCoung > 5 ? 5 : imgCoung; //imgCoung = 1; for (int imgIdx = 0; imgIdx < imgCoung; imgIdx++) { string imgPath = orgImgPath[imgIdx]; Image <Rgb, byte> orgImg = new Image <Rgb, byte>(imgPath); //正規化 Image <Gray, byte> norImg; norImg = orgImg.Convert <Gray, byte>(); //norImg = norImg.ConvertScale<byte>(3d, -100d); // 影像長寬 // 亮度 //SlidingWindow擷取輸入影像 List <IImage> iptImgList = new List <IImage>(); List <Rectangle> iptImgRectList = new List <Rectangle>(); Rectangle tmpRect = new Rectangle(0, 0, 50, 50); norImg.ROI = tmpRect; int StepWidthLimit = orgImgWidth - tarImgWidth - (orgImgWidth - tarImgWidth) % slidingWindowStepX; int StepHeightLimit = orgImgHeight - tarImgHeight - (orgImgHeight - tarImgHeight) % slidingWindowStepY; for (int i = 0; i < StepWidthLimit; i += slidingWindowStepX) { tmpRect.Y = 0; for (int j = 0; j < StepHeightLimit; j += slidingWindowStepY) { Image <Gray, byte> curMarkImg = norImg.CopyBlank(); //threshold ---------------------- int greyThreshValue = (int)Emgu.CV.CvInvoke.cvThreshold(norImg.Ptr, curMarkImg.Ptr, -1, 255d, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY | Emgu.CV.CvEnum.THRESH.CV_THRESH_OTSU); iptImgList.Add(curMarkImg); //iptImgList.Add(norImg.Copy()); iptImgRectList.Add(norImg.ROI); tmpRect.Offset(0, slidingWindowStepY); norImg.ROI = tmpRect; } tmpRect.Offset(slidingWindowStepX, 0); norImg.ROI = tmpRect; } List <Mark> markList = new List <Mark>(); for (int imgIndex = 0; imgIndex < iptImgList.Count; imgIndex++) { //擷取特徵 Mark curMark = new Mark(iptImgList[imgIndex] as Image <Gray, byte>, MarkSizeByCell, CellSize); curMark.GetFeatures(); //特徵匹配 double KNNdistanceThreshold = 256d; // 過濾外框平均值過高 // 過濾標準差總和過高 if (curMark.FrameFilter(96) && curMark.StdDivSumFilter(1024d)) { // 最近鄰分類 SortedList <double, int> distanceSList = new SortedList <double, int>(); for (int i = 0, length = sampleMark.Count; i < length; i++) { double curDis = sampleMark[i].Distance(curMark); if (curDis < KNNdistanceThreshold) { distanceSList.Add(curDis, sampleMark[i].MarkIndex); } } //合格的Mark //int markIndex = Phisten.Classifier.KNN(1, distanceSList);. if (distanceSList.Count > 0)// && distanceSList.Keys[0] < KNNdistanceThreshold) { curMark.MarkIndex = distanceSList.Values[0]; curMark.MarkIndexDistance = distanceSList.Keys[0]; Rectangle markRect = iptImgRectList[imgIndex]; curMark.MarkRectangle = markRect; bool IsNewMark = true; //重疊過濾 for (int i = 0, length = markList.Count; i < length; i++) { if (markRect.IntersectsWith(markList[i].MarkRectangle)) //若區域重疊則不新增mark { if (distanceSList.Keys[0] < markList[i].MarkIndexDistance) //且若curMark距離較近 { //則替換Mark markList[i] = curMark; } IsNewMark = false; break; } } if (IsNewMark) { //否則新增Mark markList.Add(curMark); } } } } Image <Rgb, byte> outputImg = orgImg.Convert <Gray, byte>().Convert <Rgb, byte>(); outputImg = outputImg.SmoothGaussian(3); MCvFont pen1 = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_SIMPLEX, 0.5d, 0.5d); //繪製分類結果 for (int i = 0; i < markList.Count; i++) { string fileName1 = i.ToString() + ".jpg"; //markList[i].iptImg.Save(@"opt\" + fileName1); outputImg.Draw(markList[i].MarkRectangle, new Rgb(255, 0, 0), 1); outputImg.Draw("[" + markList[i].MarkIndex + "]" + Math.Round(markList[i].MarkIndexDistance), ref pen1, markList[i].MarkRectangle.Location, new Rgb(50, 0, 200)); } //輸出影像 this.Width = 0; this.Height = 0; ImageViewer imgViewer = new Emgu.CV.UI.ImageViewer(outputImg); imgViewer.Show(); } }
private void btnDetect_Click(object sender, EventArgs e) { Image<Bgr, Byte> frame = currentFrame; List<Rectangle> faceList = new List<Rectangle>(); Rectangle eyeLeft = new Rectangle(), eyeRight = new Rectangle(); long detectionTime; DetectFace.detectFaceGPU(frame, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faceList, out eyeLeft, out eyeRight, out detectionTime); foreach (var item in faceList) { frame.Draw(item, new Bgr(Color.Green), 1); frame.Draw(eyeLeft, new Bgr(Color.Red), 1); frame.Draw(eyeRight, new Bgr(Color.Yellow), 1); #region źrenice //frame2.ROI = item; //double cannyThreshold = 180.0; //double circleAccumulatorThreshold = 120; //Image<Gray, Byte> frameGray = frame2.Convert<Gray, Byte>(); //CircleF[] circles = frameGray.HoughCircles( // new Gray(cannyThreshold), // new Gray(circleAccumulatorThreshold), // 2.0, //Resolution of the accumulator used to detect centers of the circles // 20.0, //min distance // 5, //min radius // 0 //max radius // )[0]; //Get the circles from the first channel //foreach (var item2 in circles) //{ // frame2.Draw(new CircleF(item2.Center, item2.Radius), new Bgr(Color.White), 1); //} #endregion } //Image<Bgr, Byte> eyeLeftCopy = currentFrame.Clone(); //Image<Gray, Byte> frameGray = frame.Convert<Gray, Byte>(); //eyeLeftCopy.ROI = eyeLeft; #region test // eyeLeftCopy.Resize(eyeLeftCopy.Width * 4, eyeLeftCopy.Height * 4, Emgu.CV.CvEnum.INTER.CV_INTER_LANCZOS4); //Image<Bgr, Byte> img; //OpenFileDialog OF = new OpenFileDialog(); //if (OF.ShowDialog() == System.Windows.Forms.DialogResult.OK) //{ // img = new Image<Bgr, byte>(OF.FileName); // //crop image so we remove eyebrow // //30-85 // Image<Gray, Byte> img_crop = img.Convert<Gray, Byte>().Copy(new Rectangle(10, 30, img.Width - 10, 55)); // //smooth // img_crop.PyrDown().PyrUp(); // //apply inverse suppression // img_crop = img_crop.ThresholdBinaryInv(new Gray(20), new Gray(255)); // //lets see what we have // CvInvoke.cvShowImage("Threshold Image", img_crop); // /*So lets fiind the iris*/ // //thios doesn't matter really as we have threshold the image to a binary type // double cannyThreshold = 180.0; // //use this to calibarte size // double circleAccumulatorThreshold = 20; // CircleF[] circles = img_crop.HoughCircles( // new Gray(cannyThreshold), // new Gray(circleAccumulatorThreshold), // 2.0, //Resolution of the accumulator used to detect centers of the circles // 20.0, //min distance // 5, //min radius // 0 //max radius // )[0]; //Get the circles from the first channel // //lets store it for pupil detection // CircleF Iris = new CircleF(); // foreach (CircleF circle in circles) // { // //lets do some post-processing of the data so we only look at circles that an be the write size and not to large // if (circle.Radius < 18) // { // //show it on the image by setting ROI to account for crop offset // img.ROI = new Rectangle(10, 30, img.Width - 10, 55); // img.Draw(circle, new Bgr(Color.Red), 2); // //reset ROI // img.ROI = new Rectangle(); // //store it // Iris = circle; // } // } // /*Lets try and find the pupil*/ // using (Image<Gray, Byte> smaller_crop_img = img_crop.Copy(new Rectangle((int)Iris.Center.X - (int)Iris.Radius, (int)Iris.Center.Y - (int)Iris.Radius, (int)Iris.Radius * 2, (int)Iris.Radius * 2))) // { // //lets see what we have // CvInvoke.cvShowImage("Iris Image", smaller_crop_img); // //reset our threshold // circleAccumulatorThreshold = 5; // //overwite the circles detected // circles = smaller_crop_img.HoughCircles( // new Gray(cannyThreshold), // new Gray(circleAccumulatorThreshold), // 2.0, //Resolution of the accumulator used to detect centers of the circles // 20.0, //min distance // 5, //min radius // 0 //max radius // )[0]; //Get the circles from the first channel // //draw it on the image // foreach (CircleF circle in circles) // { // //Include some post-processing of the data so we only look at circles // //that can be the centre example if (circle.Center.X > Iris.Center.X + 5 && circle.Center.X < Iris.Center.X - 5 ) // //show it on the image don't forget to account for offset of crop (10,30, img.Width - 10, 55); // img.ROI = new Rectangle(((int)Iris.Center.X - (int)Iris.Radius) + 10, ((int)Iris.Center.Y - (int)Iris.Radius) + 30, (int)Iris.Radius * 2, (int)Iris.Radius * 2); // img.Draw(circle, new Bgr(Color.Blue), 2); // //reset ROI // img.ROI = new Rectangle(); // } // } // //diplay Image // ImageViewer test = new ImageViewer(img); // test.Text = "test"; // test.Show(); //} #endregion ImageViewer okno2 = new ImageViewer(frame, detectionTime.ToString()); okno2.Show(); #region zabawa z warstwami //Image<Bgr, Byte> redImg = new Image<Bgr, Byte>(new Image<Gray, Byte>[] //{ // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height), //Blue channel, set to zero // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height), //Green channel, set to zero // currentFrame[2] //}); //Red channel from img //Image<Bgr, Byte> blueImg = new Image<Bgr, Byte>(new Image<Gray, Byte>[] //{ // currentFrame[0],//Blue channel, set to zero // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height), //Green channel, set to zero // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height) //Red channel from img //}); //Image<Bgr, Byte> greenImg = new Image<Bgr, Byte>(new Image<Gray, Byte>[] //{ // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height), //Blue channel, set to zero // currentFrame[1],//Green channel, set to zero // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height) //Red channel from img //}); //Image<Bgr, Byte> mix = new Image<Bgr, byte>(new Image<Gray, Byte>[] //{ // currentFrame[0], // new Image<Gray, Byte>(currentFrame.Width, currentFrame.Height), // currentFrame[2] //}); //ImageViewer red = new ImageViewer(redImg); //red.Show(); //ImageViewer blue = new ImageViewer(blueImg); //blue.Show(); //ImageViewer green = new ImageViewer(greenImg); //green.Show(); //ImageViewer mixer = new ImageViewer(mix); //mixer.Show(); #endregion }