コード例 #1
0
        private static void ConvertToBitmapSourceTest()
        {
            BitmapSource bs;

            // OpenCVによる画像処理 (Threshold)
            using (var src = new IplImage("data/lenna.png", LoadMode.GrayScale))
                using (var dst = new IplImage(src.Size, BitDepth.U8, 1))
                {
                    src.Smooth(src, SmoothType.Gaussian, 5);
                    src.Threshold(dst, 0, 255, ThresholdType.Otsu);
                    bs = BitmapSourceConverter.ToBitmapSource(dst.ToBitmap());
                }

            // WPFのWindowに表示してみる
            var image = new System.Windows.Controls.Image {
                Source = bs
            };
            var window = new System.Windows.Window
            {
                Title   = "from IplImage to BitmapSource",
                Width   = bs.PixelWidth,
                Height  = bs.PixelHeight,
                Content = image
            };

            var app = new Application();

            app.Run(window);
        }
コード例 #2
0
ファイル: MainWindow.xaml.cs プロジェクト: hjy1210/MvvmLight1
 private void DoOcr()
 {
     if (canvas.Children.Count >= 2 && (canvas.Children[1] as Rectangle) != null)
     {
         Rectangle rect = canvas.Children[1] as Rectangle;
         GetRoi(rect, out int left, out int top, out int right, out int bottom);
         OpenCvSharp.Mat m = new OpenCvSharp.Mat();
         if (top < bottom && left < right && left >= 0 && right < viewModel.SourceMat.Cols && top >= 0 && bottom < viewModel.SourceMat.Rows)
         {
             viewModel.SourceMat.SubMat(top, bottom, left, right).CopyTo(m);
             OpenCvSharp.Cv2.HConcat(new OpenCvSharp.Mat[] { m, m, m, m }, m);
             viewModel.BitmapRoi = BitmapSourceConverter.ToBitmapSource(m);
             using (var tesseract = OCRTesseract.Create(MainViewModel.TessData, "eng", "0123456789-", 3, 7)) // 7: Page Segmentation Mode 7:single text line
             {
                 OpenCvSharp.Cv2.GaussianBlur(m, m, new OpenCvSharp.Size(5, 5), 0);
                 tesseract.Run(m,
                               out var outputText, out var componentRects, out var componentTexts, out var componentConfidences, ComponentLevels.TextLine);
                 string data = "(";
                 data += outputText + ")\n";
                 for (int i = 0; i < componentRects.Length; i++)
                 {
                     data += $"({componentTexts[i]}) apperred at {componentRects[i]} with confidence {componentConfidences[i]}\n";
                 }
                 tbxData.Text = outputText;
             }
         }
     }
 }
コード例 #3
0
ファイル: ComputerVision.cs プロジェクト: gitllama/Tips
        /// <summary>
        /// AdaptiveThreshold 大津より精度高い
        /// </summary>
        /// <param name="bitmap"></param>
        /// <param name="s"></param>
        /// <returns>true : Pass false:Fail</returns>
        public static bool AdaptiveThreshold(BitmapSource src, out BitmapSource dst)
        {
            using (Mat mat = BitmapSourceConverter.ToMat(src))
                using (Mat matbuf = new Mat())
                {
                    //Cv2.CvtColor
                    //(
                    //    mat,
                    //    matbuf,
                    //    ColorConversionCodes.BGR2GRAY
                    //);

                    Cv2.AdaptiveThreshold
                    (
                        mat,
                        matbuf,
                        255,
                        AdaptiveThresholdTypes.GaussianC,
                        ThresholdTypes.Binary,
                        9,
                        128
                    );

                    dst = matbuf.ToBitmapSource();
                }

            return(true);
        }
コード例 #4
0
        private Task b2()
        {
            sw.Start();
            while (true)
            {
                using (Mat r = new Mat(2160, 3840, MatType.CV_8UC1, r_data))
                    using (Mat g = new Mat(2160, 3840, MatType.CV_8UC1, g_data))
                        using (Mat b = new Mat(2160, 3840, MatType.CV_8UC1, b_data))
                            using (Mat dstmat = new Mat())
                            {
                                //

                                Cv2.Merge(new Mat[] { b, g, r }, dstmat);
                                var i = BitmapSourceConverter.ToBitmapSource(dstmat);

                                i.Freeze();
                                image = i;


                                fps = (1000 / sw.Elapsed.TotalMilliseconds).ToString();
                                sw.Restart();

                                GC.Collect();
                                GC.WaitForPendingFinalizers();
                                GC.Collect();
                            }
            }
        }
コード例 #5
0
 private void DiffExecute()
 {
     using (Mat image1 = Cv2.ImRead(InputImageName1))
         using (Mat image2 = Cv2.ImRead(InputImageName2))
             using (Mat diffImage = new Mat(new OpenCvSharp.Size(image1.Cols, image1.Rows), MatType.CV_8UC3))
             {
                 Cv2.Absdiff(image1, image2, diffImage);
                 DiffImage = BitmapSourceConverter.ToBitmapSource(diffImage);
             }
 }
コード例 #6
0
 private void videoCaptureManager_OnNewImageComplete(object sender, VideoGrabEventArgs e)
 {
     if (SettingsViewModel.IsShowCameraPreview && _videoCaptureManager.IsCaptureInProgress)
     {
         Dispatcher.CurrentDispatcher.Invoke(() => GrabImageSource = BitmapSourceConverter.ToBitmapSource(e.Image));
     }
     else
     {
         Dispatcher.CurrentDispatcher.Invoke(() => GrabImageSource = null);
     }
 }
コード例 #7
0
 private void gray_button_MouseDoubleClick(object sender, MouseButtonEventArgs e)
 {
     using (Mat mat = new Mat(ImageSourcePath))
         using (Mat matGray = mat.CvtColor(ColorConversionCodes.BGR2GRAY))
         {
             //Cv2.ImShow("grayscale_show", matGray);
             //imageBox01.Source = matGray;
             BitmapSource bitmapSource = BitmapSourceConverter.ToBitmapSource(matGray);
             imageBox01.Source = bitmapSource;
         }
 }
コード例 #8
0
ファイル: ComputerVision.cs プロジェクト: gitllama/Tips
        public static BitmapSource AdaptiveThreshold(BitmapSource src)
        {
            using (Mat mat = BitmapSourceConverter.ToMat(src))
                using (Mat matbuf = new Mat())
                    using (Mat matbuf2 = new Mat())
                    {
                        //グレイスケール化
                        Cv2.CvtColor
                        (
                            mat,
                            matbuf,
                            ColorConversionCodes.BayerBG2GRAY
                        );

                        //強めのフィルタ処理
                        //Cv2.BilateralFilter
                        //(
                        //    matbuf,
                        //    mat,
                        //    7,
                        //    35,
                        //    5
                        //);

                        Cv2.FastNlMeansDenoising
                        (
                            matbuf,
                            mat
                        );

                        Cv2.AdaptiveThreshold
                        (
                            mat,
                            matbuf,
                            255,
                            AdaptiveThresholdTypes.GaussianC,
                            ThresholdTypes.Binary,
                            9,
                            5
                        );

                        //Cv2.FastNlMeansDenoising
                        //(
                        //    matbuf,
                        //    mat

                        //);

                        return(matbuf.ToBitmapSource());;
                    }
        }
コード例 #9
0
        public static BitmapSource ToCvColor(this BitmapSource bitmap, int bayertype, double RGain, double BGain)
        {
            using (var in_mat = BitmapSourceConverter.ToMat(bitmap))
            {
                var rgb_mat = new Mat();
                switch (bayertype)
                {
                case 1:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerBG2RGB);
                    break;

                case 0:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGB2BGR);
                    break;

                case 3:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGR2BGR);
                    break;

                case 2:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerRG2BGR);
                    break;
                }

                var split_mat = rgb_mat.Split();
                var rgb2_mat  = new Mat();

                Cv2.Merge(new Mat[] { split_mat[0] * BGain, split_mat[1], split_mat[2] * RGain }, rgb2_mat);



                //var a = new Mat(a.Rows, a.Cols, MatType.CV_8UC1);
                //var b = new Mat(a.Rows,a.Cols,MatType.CV_8UC3);

                //var b = mat[0];
                //var c = new Mat(a.Rows, a.Cols, MatType.CV_8UC1);


                //Cv2.CvtColor(b, c, ColorConvers);
                //Cv2.CvtColor(mat[0], c, ColorConversionCodes.BayerBG2RGB);

                //pictureBox1.Image = mat[0].ToBitmap();
                //pictureBox2.Image =c.ToBitmap();
                //pictureBox1.Image = b.ToBitmap();
                //pictureBox1.Image = mat[0].ToBitmap();

                return(rgb2_mat.ToBitmapSource());
            }
        }
コード例 #10
0
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            // Image コントロールから画像データを BitmapSource 形式で取得する。
            var bitmapSource = (BitmapSource)image.Source;
            // BitmapSource 形式を OpenCV の Mat 形式に変換する。
            var mat = BitmapSourceConverter.ToMat(bitmapSource);

            // OpenCV で グレースケール化する。
            Cv2.CvtColor(mat, mat, ColorConversionCodes.RGB2GRAY);
            // OpenCV の Mat 形式を BitmapSource 形式に変換する。
            var bitmapSource_gray = BitmapSourceConverter.ToBitmapSource(mat);

            // Image コントロールに BitmapSource 形式の画像データを設定する。
            image.Source = bitmapSource_gray;
        }
コード例 #11
0
        public static BitmapSource ToCvColor(this BitmapSource bitmap, int bayertype, double RGain, double BGain, double[] Matrix, double Gamma)
        {
            using (var in_mat = BitmapSourceConverter.ToMat(bitmap))
            {
                var rgb_mat = new Mat();
                switch (bayertype)
                {
                case 1:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerBG2RGB);
                    break;

                case 0:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGB2BGR);
                    break;

                case 3:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerGR2BGR);
                    break;

                case 2:
                    Cv2.CvtColor(in_mat, rgb_mat, ColorConversionCodes.BayerRG2BGR);
                    break;
                }

                var split_mat = rgb_mat.Split();
                var rgb2_mat  = new Mat();

                Cv2.Merge(new Mat[]
                {
                    split_mat[2] * Matrix[6] + split_mat[1] * Matrix[7] + split_mat[0] * Matrix[8] * BGain,
                    split_mat[2] * Matrix[3] + split_mat[1] * Matrix[4] + split_mat[0] * Matrix[5],
                    split_mat[2] * Matrix[0] * RGain + split_mat[1] * Matrix[1] + split_mat[0] * Matrix[2]
                }, rgb2_mat);

                var    rgb3_mat = new Mat();
                byte[] lut      = new byte[256];
                double gm       = 1.0 / Gamma;
                for (int i = 0; i < 256; i++)
                {
                    lut[i] = (byte)(Math.Pow(1.0 * i / 255, gm) * 255);
                }
                Cv2.LUT(rgb2_mat, lut, rgb3_mat);

                var buf = rgb3_mat.ToBitmapSource();
                buf.Freeze();
                return(buf);
            }
        }
コード例 #12
0
        public AnalysisWindow()
        {
            InitializeComponent();

            this.DataContext = data;

            try
            {
                cap_Face.Connect("127.0.0.1", 8456);
                //cap_LeftEye.Connect("127.0.0.1", 8457);
                //cap_RightEye.Connect("127.0.0.1", 8458);
            }
            catch (SocketException err)
            {
                MessageBox.Show(err.Message);
            }


            cap_Face.ReceivedFrame += (object obj, EventArgs arg) =>
            {
                Dispatcher.Invoke((Action)(() =>
                {
                    img_Face.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame);
                }));
            };

            cap_LeftEye.ReceivedFrame += (object obj, EventArgs arg) =>
            {
                Dispatcher.Invoke((Action)(() =>
                {
                    img_LeftEye.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame);
                }));
            };

            cap_RightEye.ReceivedFrame += (object obj, EventArgs arg) =>
            {
                Dispatcher.Invoke((Action)(() =>
                {
                    img_RightEye.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame);
                }));
            };


            td_recvFrame = new Thread(ThreadFunc_RecvFrame);
            td_recvFrame.IsBackground = true;
            td_recvFrame.Start();
        }
コード例 #13
0
        private async void ConvertToImage()
        {
            VisualStateManager.GoToElementState(GridMain, "ImageShown", false);

            DrawingMode mode = GetDrawingMode();

            BitmapSourceConverter converter = new BitmapSourceConverter(CreateOptions(), mode);

            try
            {
                Progress <double> progress = new Progress <double>(UpdateProgress);
                ImgAscii.Source = await converter.ConvertAsync(_source, progress);
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Operation failed!");
            }
        }
コード例 #14
0
ファイル: MainWindow.xaml.cs プロジェクト: EZroot/EZSecCam
        private void StartWebcamMenuItem_Click(object sender, RoutedEventArgs e)
        {
            //Start webcam thread
            ThreadHandler.Instance.ProcessWithThreadPoolMethod(new WaitCallback(delegate(object state)
            {
                Camera.Instance.StartWebcam();

                this.Dispatcher.BeginInvoke((Action)(() =>
                {
                    //Update webcam image
                    DispatcherTimer Timer = new DispatcherTimer();
                    Timer.Tick += (sender, e) =>
                    {
                        Mat nextFrame = Camera.Instance.GetNextFrame();
                        BitmapSource frame = BitmapSourceConverter.ToBitmapSource(nextFrame);

                        /*if (Client.isConnected)
                         * {
                         *  Stopwatch s = new Stopwatch();
                         *  s.Start();
                         *  Cv2.Resize(nextFrame, nextFrame, new OpenCvSharp.Size(640, 480));
                         *  Client.SendData(Client.BitmapSourceToArray(BitmapSourceConverter.ToBitmapSource(nextFrame)));
                         *  s.Stop();
                         *  Log.Debug("Resize Client data runtime: {0}ms", s.ElapsedMilliseconds);
                         *  Client.Disconnect();
                         * }*/
                        WebcamImage.Source = frame;
                    };
                    Timer.Interval = TimeSpan.FromMilliseconds(1000);
                    Timer.Start();
                }));
            }));

            EnableDetectorButtons();
            EnableFilterButtons();
            StopServerMenuItem.IsEnabled  = false;
            StartWebcamMenuItem.IsEnabled = false;
        }
コード例 #15
0
        private Task a()
        {
            sw.Start();
            while (true)
            {
                using (Mat buf = new Mat(2160, 3840, MatType.CV_8UC1, dst))
                    using (Mat dstmat = new Mat())
                    {
                        //18fps
                        Cv2.CvtColor(buf, dstmat, ColorConversionCodes.BayerBG2BGR);
                        var i = BitmapSourceConverter.ToBitmapSource(dstmat);

                        //35fps
                        //var i = BitmapSourceConverter.ToBitmapSource(buf);

                        //Cv2.CvtColor()

                        //var grayMat = buf.CvtColor(ColorConversionCodes.BayerBG2BGR);
                        //grayMat.Dispose();

                        i.Freeze();
                        //Cv2.ImShow("dst", dstmat);

                        //image.Do
                        image = i;

                        //dstmat.Dispose();
                        //buf.Dispose();

                        fps = (1000 / sw.Elapsed.TotalMilliseconds).ToString();
                        sw.Restart();

                        GC.Collect();
                        GC.WaitForPendingFinalizers();
                        GC.Collect();
                    }
            }
        }
コード例 #16
0
ファイル: Toast.cs プロジェクト: RobertCurry0216/UtilityForms
        private static void ToastBase(string title, System.Drawing.Bitmap bitmap, Brush color, string msg = null, Action func = null)
        {
            IncrementToastCount();
            var toast = new ToastView();

            var desktopWorkingArea = System.Windows.SystemParameters.WorkArea;

            var toastHeight   = (int)(toast.Height + 10);
            var desktopHeight = (int)(desktopWorkingArea.Height / toastHeight);

            var XOffset = (ToastController.Instance.Offset / desktopHeight) + 1;
            var YOffset = XOffset > 1
                ? (ToastController.Instance.Offset % desktopHeight) + 1
                : (ToastController.Instance.Offset % desktopHeight);

            toast.Left = desktopWorkingArea.Right - XOffset * toast.Width;
            toast.Top  = desktopWorkingArea.Bottom - YOffset * toastHeight;

            toast.Image.Source        = BitmapSourceConverter.ConvertFromImage(bitmap);
            toast.bgColour.Background = color;
            toast.Title.Text          = title;
            if (msg == null)
            {
                toast.Message.Visibility = System.Windows.Visibility.Collapsed;
            }
            else
            {
                toast.Message.Text = msg;
            }

            if (func != null)
            {
                toast.OnClickEvent += new ToastView.OnClick(func);
            }

            toast.Show();
        }
コード例 #17
0
        private void Btn_Open(object sender, RoutedEventArgs e)
        {
            OpenFileDialog openDialog = new OpenFileDialog();

            if (openDialog.ShowDialog() == true)
            {
                if (File.Exists(openDialog.FileName))
                {
                    Global.SourceImage = new Bitmap(openDialog.FileName);

                    inkCanvas.Width  = CanvasBorder.Width = Global.SourceImage.Width;
                    inkCanvas.Height = CanvasBorder.Height = Global.SourceImage.Height;

                    MessageBox.Show(Global.SourceImage.Width + ":" + Global.SourceImage.Height);

                    ImageBrush imgBrush = new ImageBrush
                    {
                        ImageSource = BitmapSourceConverter.ToBitmapSource(Global.SourceImage),
                        Stretch     = Stretch.Uniform,
                    };
                    inkCanvas.Background = imgBrush;
                }
            }
        }
コード例 #18
0
 public void Setup()
 {
     m_Converter = new BitmapSourceConverter();
 }
コード例 #19
0
        public void VideoDrawWork_ProgressChanged(object sender, ProgressChangedEventArgs e)
        {
            capturedImg.Source = BitmapSourceConverter.ToBitmapSource(frame);


        }
コード例 #20
0
 public Button SetSmallImage(Bitmap smallImage)
 {
     _smallImage = BitmapSourceConverter.ConvertFromImage(smallImage);
     return(this);
 }
コード例 #21
0
        private void MouseUp(MouseEventArgs e)
        {
            IsDragging = false;

            var image = e.Source as Image;

            if (image != null)
            {
                var originalPhosImage   = _phosImageList[PhosImageListIndex - 1];
                var pixelMousePositionX = _imagePixelPos.X * originalPhosImage.PixelWidth / image.ActualWidth;
                var pixelMousePositionY = _imagePixelPos.Y * originalPhosImage.PixelHeight / image.ActualHeight;
                var pixelWidth          = RoiWidth * originalPhosImage.PixelWidth / image.ActualWidth;
                var pixelHeight         = RoiHeight * originalPhosImage.PixelHeight / image.ActualHeight;

                if (pixelHeight > 0 && pixelWidth > 0)
                {
                    Mat src = BitmapSourceConverter.ToMat(originalPhosImage);

                    int rectX = (int)Math.Round(pixelMousePositionX);
                    if (rectX < 0)
                    {
                        rectX = 0;
                    }
                    if (rectX + pixelWidth > originalPhosImage.PixelWidth)
                    {
                        rectX = (int)(originalPhosImage.PixelWidth - pixelWidth);
                    }
                    int rectY = (int)Math.Round(pixelMousePositionY);
                    if (rectY < 0)
                    {
                        rectY = 0;
                    }
                    if (rectY + pixelHeight > originalPhosImage.PixelHeight)
                    {
                        rectY = (int)(originalPhosImage.PixelHeight - pixelHeight);
                    }

                    var roi = new OpenCvSharp.Rect(rectX, rectY,
                                                   (int)pixelWidth, (int)pixelHeight);
                    _roiMat = new Mat(src, roi).Clone();

                    int rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelWidth, 0);
                    if (originalPhosImage.PixelHeight < originalPhosImage.PixelWidth)
                    {
                        rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelHeight, 0);
                    }

                    Cv2.Rectangle(src, new OpenCvSharp.Point(rectX, rectY),
                                  new OpenCvSharp.Point(pixelWidth + rectX, pixelHeight + rectY),
                                  new Scalar(0, 0, 255, 255), rectThick);

                    //Cv2.NamedWindow("src", WindowMode.Normal);
                    //Cv2.ImShow("src", src);
                    //Cv2.ResizeWindow("src", 400, 300);
                    //Cv2.WaitKey();
                    //Cv2.DestroyAllWindows();
                    PhosImage = BitmapSourceConverter.ToBitmapSource(src);
                }
            }

            RoiX = RoiY = RoiWidth = RoiHeight = 0;
        }
コード例 #22
0
        private async void DetectFace()
        {
            try
            {
                if (!_videoCaptureManager.IsCaptureInProgress)
                {
                    return;
                }

                var snaphsot = _videoCaptureManager.TakeSnapshot();
                if (snaphsot == null)
                {
                    return;
                }

                DetectionInfo detectionInfo = null;
                switch (DetectionMode)
                {
                case DetectionModeEnum.OpenCV:
                    detectionInfo = DetectFacesOpenCV(snaphsot);
                    break;

                case DetectionModeEnum.Bing:
                    detectionInfo = await DetectFacesBing(snaphsot);

                    break;
                }

                if (detectionInfo != null)
                {
                    DetectionInfo = detectionInfo;
                    if (SettingsViewModel.IsShowDetectionFacePreview)
                    {
                        var detectionImage = detectionInfo.Image;
                        foreach (var faceRectangle in detectionInfo.Rectangles)
                        {
                            detectionImage.Draw(faceRectangle, new Bgr(0, double.MaxValue, 0), 3);
                        }
                        Dispatcher.CurrentDispatcher.Invoke(() => FaceDetectionImageSource = BitmapSourceConverter.ToBitmapSource(detectionImage));
                    }
                    else
                    {
                        Dispatcher.CurrentDispatcher.Invoke(() => FaceDetectionImageSource = null);
                    }

                    if (SettingsViewModel.IsSendToIoTHub)
                    {
                        DeviceNotification facesAnalysis = new DeviceNotification(DeviceId,
                                                                                  detectionInfo.Rectangles.Count,
                                                                                  detectionInfo.MaleCount,
                                                                                  detectionInfo.FemaleCount,
                                                                                  detectionInfo.SmileCount,
                                                                                  detectionInfo.SunGlassesCount,
                                                                                  detectionInfo.ReadingGlassesCount,
                                                                                  detectionInfo.AgeAverage,
                                                                                  detectionInfo.EmotionHappyCount,
                                                                                  detectionInfo.EmotionNeutralCount,
                                                                                  detectionInfo.EmotionDisgustCount,
                                                                                  detectionInfo.EmotionAngerCount,
                                                                                  detectionInfo.HappyRatio,
                                                                                  detectionInfo.HearyCount);

                        _iotHubPublisher.SendDataAsync(facesAnalysis);
                    }
                }
                else
                {
                    DetectionInfo = new DetectionInfo();
                }
            }
            catch (Exception ex)
            {
                App.Log.Error(ex.FlattenException());
            }
        }
コード例 #23
0
 public static void Save(this BitmapSource bmp, string path) => BitmapSourceConverter.ToMat(bmp).Save(path);
コード例 #24
0
 public Button SetLargeImage(Bitmap largeImage)
 {
     _largeImage = BitmapSourceConverter.ConvertFromImage(largeImage);
     return(this);
 }
コード例 #25
0
        public static void GetHSL(BitmapSource bsrc, out uint l, out uint s, out uint h)
        {
            Mat src = BitmapSourceConverter.ToMat(bsrc);

            GetHSL(src, out l, out s, out h);
        }