Esempio n. 1
0
        public ConvertToBitmap()
        {
            Bitmap bitmap = null;

            // do cvThreshold
            using (IplImage src = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1))
            {
                src.Smooth(src, SmoothType.Gaussian, 5);
                src.Threshold(dst, 0, 255, ThresholdType.Otsu);
                // IplImage -> Bitmap
                bitmap = dst.ToBitmap();
                //bitmap = BitmapConverter.ToBitmap(dst);
            }

            // visualize using WindowsForm
            Form form = new Form
            {
                Text = "from IplImage to Bitmap",
                ClientSize = bitmap.Size,
            };
            PictureBox pictureBox = new PictureBox
            {
                Dock = DockStyle.Fill,
                SizeMode = PictureBoxSizeMode.StretchImage,
                Image = bitmap
            };

            form.Controls.Add(pictureBox);
            form.ShowDialog();

            form.Dispose();
            bitmap.Dispose();
        }
Esempio n. 2
0
 private void mainCameraLoop()
 {
     while ((mainCameraImage = Cv.QueryFrame(mainCameraCapture)) != null)
     {
         pbxMainCamera.Image = mainCameraImage.ToBitmap();
         Application.DoEvents();
     }
 }
        private void showImageUsingPictureBox()
        {
            Bitmap bitmap;
            using (var iplImage = new IplImage(@"..\..\Images\Penguin.png", LoadMode.Color))
            {
                bitmap = iplImage.ToBitmap(); // BitmapConverter.ToBitmap()
            }

            var pictureBox = new PictureBox
            {
                Image = bitmap,
                ClientSize = bitmap.Size
            };

            //How to redraw:
            //iplImage.ToBitmap(dst: (Bitmap)pictureBox.Image);

            flowLayoutPanel1.Controls.Add(pictureBox);
        }
Esempio n. 4
0
        public ConvertToBitmap()
        {
            Bitmap bitmap = null;

            // OpenCVによる画像処理 (Threshold)
            using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1))
            {
                src.Smooth(src, SmoothType.Gaussian, 5);
                src.Threshold(dst, 0, 255, ThresholdType.Otsu);
                // IplImage -> Bitmap
                bitmap = dst.ToBitmap();
                //bitmap = BitmapConverter.ToBitmap(dst);
            }

            // WindowsFormに表示してみる
            Form form = new Form
            {
                Text = "from IplImage to Bitmap",
                ClientSize = bitmap.Size,
            };
            PictureBox pictureBox = new PictureBox
            {
                Dock = DockStyle.Fill,
                SizeMode = PictureBoxSizeMode.StretchImage,
                Image = bitmap
            };
            /*
            Imageプロパティに設定するのはもしかするとちょっと微妙、できればこのように
            pictureBox.Paint += delegate(object sender, PaintEventArgs e) {
                e.Graphics.DrawImage(bitmap, new Rectangle(new Point(0, 0), form.ClientSize));
            };
            */
            form.Controls.Add(pictureBox);
            form.ShowDialog();

            form.Dispose();
            bitmap.Dispose();
        }
        /// <summary>
        /// Обработка изображения выполняющаяся в отдельном потоке
        /// </summary>
        private void routine()
        {
            IplImage cam = new IplImage(Camera.FrameSize, BitDepth.U8, 3);
            CvPoint[] mostLengthHole = new CvPoint[0];
            RoutineAction innerAction;
            bool innerIsRunOnce;

            while(isRunning)
            {
                // Копируем новое значение состояния
                innerAction = action;
                innerIsRunOnce = isRunOnce;

                // Определяем что будем делать в следующем цикле
                if (innerIsRunOnce) action = RoutineAction.Pause;

                // Если поток на паузе, то ничего не делаем
                if (innerAction == RoutineAction.Pause) continue;

                if (isRunning && BeforeProcessingEvent != null) BeforeProcessingEvent();

                // Захватываем изображение с камеры
                Camera.GetIplImage(ref cam);

                if (innerAction == RoutineAction.FindContour || innerAction == RoutineAction.Measure)
                {
                    mostLengthHole = Finder.FindMostLengthHole(cam);
                }

                if (innerAction == RoutineAction.DrawCorners || innerAction == RoutineAction.Calibrate)
                {
                    Calibrator.SetImage(cam);
                }

                switch (innerAction)
                {
                    case RoutineAction.GetImage:
                    {
                        if (isRunning && GetImageEvent != null) GetImageEvent(cam.ToBitmap());
                        break;
                    }

                    case RoutineAction.DrawCorners:
                    {
                        Calibrator.FindCorners();
                        if (isRunning && DrawCornersEvent != null) DrawCornersEvent(cam.ToBitmap());
                        break;
                    }

                    case RoutineAction.Calibrate:
                    {
                        bool result = Calibrator.TryToCalibrate(out Transformer);
                        if (isRunning && CalibrateEvent != null) CalibrateEvent(result);
                        break;
                    }

                    case RoutineAction.FindContour:
                    {
                        if (mostLengthHole.Length > 0) cam.DrawPolyLine(new CvPoint[][] { mostLengthHole }, true, Cv.RGB(0, 255, 0), 2);
                        if (isRunning && FindContoursEvent != null) FindContoursEvent(cam.ToBitmap());
                        break;
                    }

                    case RoutineAction.Measure:
                    {
                        if (mostLengthHole.Length > 0)
                        {
                            CvPoint2D32f[] realPoints = Transformer.GetRealPoints(mostLengthHole);
                            CvCircleSegment realCircle = CircleApproximator.Approximate(realPoints);
                            CvCircleSegment imageCircle = CircleApproximator.Approximate(mostLengthHole);

                            // Вносим поправки
                            float ratio = imageCircle.Radius / realCircle.Radius;
                            realCircle.Radius = realCircle.Radius * CorrectionMultiplier + CorrectionOffset;
                            imageCircle.Radius = imageCircle.Radius * CorrectionMultiplier + CorrectionOffset * ratio;

                            int circleRadius = (int)Math.Round(imageCircle.Radius);
                            CvPoint circleCenter = new CvPoint
                            (
                                (int)Math.Round(imageCircle.Center.X),
                                (int)Math.Round(imageCircle.Center.Y)
                            );

                            // Обводим внутреннюю границу кольца
                            if (circleRadius > 0) cam.DrawCircle(circleCenter, circleRadius, Cv.RGB(0, 0, 255), 2);

                            // Отмечаем откалиброванную зону
                            cam.DrawPolyLine(new CvPoint[][] { Calibrator.CalibratedZone }, true, Cv.RGB(255, 255, 0), 2);

                            if (isRunning && MeasureEvent != null) MeasureEvent(cam.ToBitmap(), realCircle.Radius);
                        }

                        break;
                    }
                }
            }

            Camera.Close();
        }
Esempio n. 6
0
        private int RecognizeImage(IplImage detected)
        {
            int id = -1;
            ImageData bestest = null;
            using (System.Drawing.Bitmap detected_Bitmap = detected.ToBitmap())
            {
                ImageData inputImgData = PictureSOM.SOMHelper.GetImageData(detected_Bitmap);
                Neuron neuronFired = _som.Recognize(inputImgData.inputVector);
                float lowestDistance = float.MaxValue;

                foreach (ImageData imgData in neuronFired.ImageDataList)
                {
                    //float dist = SOMHelper.Calculate_Distance(imgData.inputVector.weights, inputVec.weights);
                    float dist = SOMHelper.Calculate_Distance(imgData.inputVector.weights, inputImgData.inputVector.weights);

                    if (dist < lowestDistance)
                    {
                        lowestDistance = dist;
                        bestest = imgData;
                    }
                }

                if (bestest != null && (100-lowestDistance) > 50)
                {
                    id = bestest.MapTo3DModelID;

                    // recharge
                    stopwatch_model.Restart();

                    // Show the confidence level (Work only if the Taninoto similarity metric)
                    this.progressBar_confidence.Value = 100-lowestDistance;
                    this.label_tanimoto.Content = 100-lowestDistance;

                    BitmapImage recognized = new BitmapImage(new Uri(bestest.m_fullName));
                    if (recognized != null)
                    {
                        // Show the image that is recognized by the SOM
                        this.recognizedImage.Source = recognized;
                    }
                }
                _somVisual.HighLightCell(neuronFired.X, neuronFired.Y);
            }

            return id;
        }
        public CaptureByVideoInputSharp()
        {
            const int DeviceID = 0;
            const int CaptureFps = 30;
            const int CaptureWidth = 640;
            const int CaptureHeight = 480;

            using (VideoInput vi = new VideoInput())
            {
                vi.SetIdealFramerate(DeviceID, CaptureFps);
                vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight);

                int width = vi.GetWidth(DeviceID);
                int height = vi.GetHeight(DeviceID);

                using (IplImage img = new IplImage(width, height, BitDepth.U8, 3))
                using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb))
                using (Form form = new Form() { Text = "VideoInputSharp sample", ClientSize = new Size(width, height) })
                using (PictureBox pb = new PictureBox() { Dock = DockStyle.Fill, Image = bitmap })
                {
                    if (vi.IsFrameNew(DeviceID))
                    {
                        vi.GetPixels(DeviceID, img.ImageData, false, true);
                    }

                    form.Controls.Add(pb);
                    form.Show();

                    while (form.Created)
                    {
                        if (vi.IsFrameNew(DeviceID))
                        {
                            vi.GetPixels(DeviceID, img.ImageData, false, true);
                        }
                        img.ToBitmap(bitmap);
                        pb.Refresh();
                        Application.DoEvents();
                    }

                    vi.StopDevice(DeviceID);
                }
            }
            /*
            const int DeviceID1 = 0;
            const int DeviceID2 = 1;
            const int DeviceID3 = 2;
            const int CaptureFps = 30;
            const int CaptureWidth = 640;
            const int CaptureHeight = 480;
            
            // lists all capture devices
            //ListDevices();

            using (VideoInput vi = new VideoInput())
            {
                // initializes settings
                vi.SetIdealFramerate(DeviceID1, CaptureFps);
                vi.SetupDevice(DeviceID1, CaptureWidth, CaptureHeight);
                vi.SetupDevice(DeviceID2);
                vi.SetupDevice(DeviceID3);

                using (IplImage img1 = new IplImage(vi.GetWidth(DeviceID1), vi.GetHeight(DeviceID1), BitDepth.U8, 3))
                using (IplImage img2 = new IplImage(vi.GetWidth(DeviceID2), vi.GetHeight(DeviceID2), BitDepth.U8, 3))
                using (IplImage img3 = new IplImage(vi.GetWidth(DeviceID3), vi.GetHeight(DeviceID3), BitDepth.U8, 3))
                using (CvWindow window1 = new CvWindow("Camera 1"))
                using (CvWindow window2 = new CvWindow("Camera 2"))
                using (CvWindow window3 = new CvWindow("Camera 3"))
                {
                    // to get the data from the device first check if the data is new
                    if (vi.IsFrameNew(DeviceID1))
                    {
                        vi.GetPixels(DeviceID1, img1.ImageData, false, true);
                    }
                    if (vi.IsFrameNew(DeviceID2))
                    {
                        vi.GetPixels(DeviceID2, img2.ImageData, false, true);
                    }
                    if (vi.IsFrameNew(DeviceID3))
                    {
                        vi.GetPixels(DeviceID3, img3.ImageData, false, true);
                    }

                    // captures until the window is closed
                    while (true)
                    {
                        if (vi.IsFrameNew(DeviceID1))
                        {
                            vi.GetPixels(DeviceID1, img1.ImageData, false, true);
                        }
                        if (vi.IsFrameNew(DeviceID2))
                        {
                            vi.GetPixels(DeviceID2, img2.ImageData, false, true);
                        }
                        if (vi.IsFrameNew(DeviceID3))
                        {
                            vi.GetPixels(DeviceID3, img3.ImageData, false, true);
                        }
                        window1.Image = img1;
                        window2.Image = img2;
                        window3.Image = img3;

                        if (Cv.WaitKey(1) > 0)
                            break;
                    }

                    // stops capturing
                    vi.StopDevice(DeviceID1);
                    vi.StopDevice(DeviceID2);
                    vi.StopDevice(DeviceID3);
                }
            }
            //*/
        }
Esempio n. 8
0
 public static Bitmap BitmapFromIplImage(IplImage img)
 {
     return img.ToBitmap();
 }
Esempio n. 9
0
 private void prev_Click(object sender, EventArgs e)
 {
     if (uCurrentFrameNo > 1)
     {
         uCurrentFrameNo--;
         frameText.Text = uCurrentFrameNo.ToString();
         cap.SetCaptureProperty(CaptureProperty.PosFrames, (Double)uCurrentFrameNo);
         img = cap.QueryFrame();
         ekran.Image = img.ToBitmap();
     }
     else
     {
         MessageBox.Show("Video başı!");
     }
 }
Esempio n. 10
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // キャプチャの開始. Capture starts.
            IplImage ipl1 = capture.QueryFrame();
            IplImage ipl2 = new IplImage(ipl1.Size, BitDepth.U8, 1);
            IplImage ipl3 = ipl1.Clone();
            //IplImage ipl2 = capture.QueryFrame();
            //Cv.CvtColor(ipl1, ipl1, ColorConversion.BgrToHsv);
            //Mat mat = new Mat(ipl1, true);

            // 取得したカメラ画像の高さと幅を取得し、labelに表示. Height and width of camera are shown in label.
            labelWidth.Text = capture.FrameWidth.ToString();
            labelHeight.Text = capture.FrameHeight.ToString();

            if (ipl1 != null)
            {
                // pictureBoxに取得した画像を表示. Show the captured image.
                pictureBox1.Image = ipl1.ToBitmap();
                // メモリリークが発生するらしいので
                // プログラムが動的に確保したメモリ領域のうち、
                // 不要になった領域を定期的に自動解放する

                if (GC.GetTotalMemory(false) > 600000)
                {
                    GC.Collect();
                }

                /*
                // Image processing should be written from here.

                // Extract red color
                for (int y = 0; y < ipl1.Height; y++)
                {
                    for (int x = 0; x < ipl1.Width; x++)
                    {
                        CvColor c = ipl1[y, x];

                        // Red color extraction
                        // If the pixel is red-like, the image is white, else black.
                        if (c.R > 80 && c.B < 70 && c.G < 70)
                        {
                            ipl1[y, x] = new CvColor()
                            {
                                B = 255,
                                G = 255,
                                R = 255,
                            };
                        }
                        else
                        {
                            ipl1[y, x] = new CvColor()
                            {
                                // Red color extraction
                                B = 0,
                                G = 0,
                                R = 0,
                            };

                        }
                    }
                }
                */

                // 追加した記述
                Cv.CvtColor(ipl1, ipl2, ColorConversion.BgrToGray);
                Cv.Smooth(ipl2, ipl2, SmoothType.Gaussian, 9);
                using (CvMemStorage storage = new CvMemStorage())
                {
                    CvSeq<CvCircleSegment> seq = ipl2.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, 100, 150, 55, 0, 0);
                    foreach (CvCircleSegment item in seq)
                    {
                        ipl3.Circle(item.Center, (int)item.Radius, CvColor.Red, 3);
                        ipl3.Circle(item.Center, 1, CvColor.Red, 3);
                        labelCenter.Text = item.Center.ToString();
                    }
                }

                //Cv.Laplace(ipl1, ipl2, ApertureSize.Size3);
                // Show the image to picturebox2.
                pictureBox2.Image = ipl2.ToBitmap();
                pictureBox3.Image = ipl3.ToBitmap();

            }
            else
            {
                timer1.Stop();
            }
        }
Esempio n. 11
0
        unsafe void SearchFace()
        {
            while (true)
            {
                Frame[] frames = null;
                lock (locker)
                {
                    if (frameQueue.Count > 0)
                    {
                        frames = frameQueue.Dequeue();
                    }
                }

                if (frames != null)
                {
                    for (int i = 0; i < frames.Length; ++i)
                    {
                        Debug.WriteLine("addin frame");
                        Frame frame = frames[i];
                        NativeMethods.AddInFrame(ref frame);
                        IplImage ipl = new IplImage(frame.image);
                        Bitmap bmp = BitmapConverter.ToBitmap(ipl);
                        ipl.IsEnabledDispose = false;

                        this.pictureFiltered.Image = bmp;

                    }

                    IntPtr target = IntPtr.Zero;

                    int count = NativeMethods.SearchFaces(ref target);
                    Target* pTarget = (Target*)target;

                    for (int i = 0; i < count; i++)
                    {
                        Target face = pTarget[i];
                        for (int j = 0; j < face.FaceCount; ++j)
                        {
                            IntPtr* f = ((IntPtr*)(face.FaceData)) + j;
                            IplImage ipl = new IplImage(*f);
                            ipl.IsEnabledDispose = false;

                            Bitmap faceBmp = ipl.ToBitmap();
                            pictureFace.Image = faceBmp;

                        }
                    }

                    NativeMethods.ReleaseMem();

                    Array.ForEach(frames, f => Cv.Release(ref f.image));

                }
                else
                {
                    Debug.WriteLine("zzzzzzzzzzzzz.....");
                    go.WaitOne();
                }
            }
        }
Esempio n. 12
0
        private void button1_Click_2(object sender, EventArgs e)
        {
            string[] files = Directory.GetFiles(@"D:\pictures in hall");
            foreach (string file in files)
            {
                string ext = Path.GetExtension(file);
                if (ext != ".jpg") continue;

                Bitmap img1 = (Bitmap)Bitmap.FromFile(file);

                IplImage ipl = BitmapConverter.ToIplImage(img1);

                IplImage ipl1 = new IplImage(ipl.CvPtr);

                Bitmap bmp = ipl1.ToBitmap();
                this.pictureFace.Image = bmp;

                ipl1.SaveImage(@"d:\iplimg.jpg");

                return;

                //                 byte[] data = File.ReadAllBytes(file);
                //                 Frame f = new Frame();
                //                 f.data = IntPtr.Zero;// Marshal.AllocCoTaskMem(data.Length);
                //                 //Marshal.Copy(data, 0, f.data, data.Length);
                //                 f.dataLength = 0;// data.Length;
                //                 f.image = IntPtr.Zero;
                //                 f.timeStamp = 0;
                //                 f.searchRect = IntPtr.Zero;
                //                 f.fileName = Marshal.StringToCoTaskMemAnsi(file);
                //
                //                 bool group = NativeMethods.PreProcessFrame(ref f);

            }
        }
Esempio n. 13
0
 private void right_camera_roop()
 {
     while ((right_camera_image = Cv.QueryFrame(right_camera_capture)) != null)
     {
         if (warai_flg) pictureBox2.Image = (HmfCameraAssist.FaceDe(right_camera_image)).ToBitmap();
         else pictureBox2.Image = right_camera_image.ToBitmap();
         System.Threading.Thread.Sleep(30);
         Application.DoEvents();
     }
 }
Esempio n. 14
0
        private void frameText_KeyUp(object sender, KeyEventArgs e)
        {
            if (e.KeyCode == Keys.Enter)
            {
                UInt16 enteredFrame = (UInt16)Convert.ToInt16(frameText.Text);

                if ((enteredFrame >= 1) && (enteredFrame <= uLastFrame))
                {
                    uCurrentFrameNo = (UInt16)Convert.ToInt16(frameText.Text);
                    cap.SetCaptureProperty(CaptureProperty.PosFrames, (Double)uCurrentFrameNo);
                    img = cap.QueryFrame();
                    ekran.Image = img.ToBitmap();
                }
                else
                {
                    frameText.Text = uCurrentFrameNo.ToString();
                    MessageBox.Show("Geçersiz Frame No");
                }
            }
        }
        public System.Drawing.Bitmap FaceDetect(IplImage src)
        {
            
            // CvHaarClassifierCascade, cvHaarDetectObjects
            // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다

            CvColor[] colors = new CvColor[]{
                new CvColor(0,0,255),
                new CvColor(0,128,255),
                new CvColor(0,255,255),
                new CvColor(0,255,0),
                new CvColor(255,128,0),
                new CvColor(255,255,0),
                new CvColor(255,0,0),
                new CvColor(255,0,255),
            };

            const double scale = 1.04;
            const double scaleFactor = 1.139;
            const int minNeighbors = 1;

            using (IplImage img = src.Clone())
            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1))
            {
                // 얼굴 검출을 위한 화상을 생성한다.
                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                    Cv.EqualizeHist(smallImg, smallImg);
                }

                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Environment.CurrentDirectory + "\\" + "haarcascade_frontalface_alt.xml"))
                using (CvMemStorage storage = new CvMemStorage())
                {
                    storage.Clear();

                    // 얼굴을 검출한다.
                    CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(20, 20));

                    // 검출한 얼굴에 검은색 원을 덮어씌운다.
                    for (int i = 0; i < faces.Total; i++)
                    {
                        CvRect r = faces[i].Value.Rect;
                        CvPoint center = new CvPoint
                        {
                            X = Cv.Round((r.X + r.Width * 0.5) * scale),
                            Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
                        };
                        int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
                        img.Circle(center, radius, new CvColor(0, 0, 0), -1, LineType.Link8, 0);
                    }
                }
                FindFace = img.Clone();

                //생성한 IplImage 화상을 비트맵으로 변환해 반환한다.
                return FindFace.ToBitmap(System.Drawing.Imaging.PixelFormat.Format24bppRgb);
            }
        }
Esempio n. 16
0
        private void next_Click(object sender, EventArgs e)
        {
            if (uCurrentFrameNo < uLastFrame)
            {
                uCurrentFrameNo++;
                frameText.Text = uCurrentFrameNo.ToString();
                cap.SetCaptureProperty(CaptureProperty.PosFrames, (Double)uCurrentFrameNo);
                img = cap.QueryFrame();
                ekran.Image = img.ToBitmap();

            }
            else
            {
                MessageBox.Show("Tebrikler! Video sonu!");
            }
        }
Esempio n. 17
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // キャプチャの開始. Capture starts.
            IplImage ipl1 = capture.QueryFrame();
            int l = 0;
            int r = 0;
            int m = 0;
            // 取得したカメラ画像の高さと幅を取得し、labelに表示. Height and width of camera are shown in label.
            //labelWidth.Text = capture.FrameWidth.ToString();
            //labelHeight.Text = capture.FrameHeight.ToString();

            if (ipl1 != null)
            {
                fig = true;
                // pictureBoxに取得した画像を表示. Show the captured image.
                pictureBox1.Image = ipl1.ToBitmap();
                // メモリリークが発生するらしいので
                // プログラムが動的に確保したメモリ領域のうち、
                // 不要になった領域を定期的に自動解放する
                if (GC.GetTotalMemory(false) > 600000)
                {
                    GC.Collect();
                }

                // Image processing should be written from here.
                // Extract edge of circle
                CvMemStorage storage = new CvMemStorage();
                IplImage gray = new IplImage(ipl1.Size, BitDepth.U8, 1);
                IplImage binary = new IplImage(ipl1.Size, BitDepth.U8, 1);
                //transform grayscale image from caputure image
                Cv.CvtColor(ipl1, gray, ColorConversion.BgrToGray);
                //グレイスケールの平滑化 誤検出を減らすため. Decrease the error recognition by Smoothing of the gray scale.
                //Cv.Smooth(gray, gray, SmoothType.Gaussian, 9, 9, 1, 1);
                //二値化した画像から円検出を行う(ハフ関数を使う). Using Hough function from grayscale.
                CvSeq<CvCircleSegment> circl = Cv.HoughCircles(gray, storage, HoughCirclesMethod.Gradient, 2,200, 100, 100, 20, 200);
                //HougCircles(input picture, memory space,Binarization method,inverse of graphic mode,shortest distance from other circle,)
                foreach (CvCircleSegment crcl in circl)
                {
                    ipl1.Circle(crcl.Center, (int)crcl.Radius, CvColor.Blue, 3);
                    labelCenter.Text = crcl.Center.ToString();
                    //labelCenter.Text = crcl.Center.ToString();
                    //double Area;
                    Area = crcl.Radius;// * crcl.Radius*3.14;
                    labelRadius.Text = Area.ToString();
                    x_pos = crcl.Center.X;
                    //labelArea.Text = Area.ToString();
                }
                //circl.Dispose();
                storage.Dispose();
                pictureBox1.Invalidate();

                // Show the binaryimage to picturebox3.
                Cv.Canny(gray,binary , 100, 100);
                pictureBox3.Image = binary.ToBitmap();
                //Show the capture image to picturebox2
                pictureBox2.Image = ipl1.ToBitmap();

                //
                if (Area < 45)
                {
                    textBox1.Text = br.Forward();
                }
                else if (Area > 80)
                {
                    textBox1.Text = br.Back();
                }
                else
                {
                    if (x_pos < 100 && Area > 0)
                    {
                        textBox1.Text = br.TurnLeft();
                    }
                    else if (x_pos > 220 && Area > 0)
                    {
                        textBox1.Text = br.TurnRight();
                    }
                    else
                    {
                        textBox1.Text = br.Stop();
                    }
                }

            }
            else
            {
                timer1.Stop();
            }
        }
Esempio n. 18
0
        private void videoAçToolStripMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog opd = new OpenFileDialog();
            if (opd.ShowDialog() == DialogResult.OK && opd.FileName.Length > 3)
            {
                cap = CvCapture.FromFile(opd.FileName);
                img = cap.QueryFrame();
                uLastFrame = (UInt16)cap.FrameCount;

                Size newSize = new Size(img.Width, img.Height);
                ekran.Size = newSize;

                ekran.Image = img.ToBitmap();
                uCurrentFrameNo = 1;
                frameText.Text = uCurrentFrameNo.ToString();
                newVideoOpened = true;
            }
        }
Esempio n. 19
0
        /// <summary>
        /// Convert an IImage to a WPF BitmapSource. The result can be used in the Set Property of Image.Source
        /// </summary>
        /// <param name="image">OpevnCV Image</param>
        /// <returns>The equivalent BitmapSource</returns>
        public static BitmapSource ToBitmapSource(IplImage image)
        {
            using (Bitmap source = (Bitmap)image.ToBitmap().Clone())
            {
                IntPtr ptr = source.GetHbitmap(); //obtain the Hbitmap
                BitmapSource bs = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
                    ptr,
                    IntPtr.Zero,
                    System.Windows.Int32Rect.Empty,
                    System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());

                DeleteObject(ptr); //release the HBitmap
                return bs;
            }
        }