private void DoOcr() { if (canvas.Children.Count >= 2 && (canvas.Children[1] as Rectangle) != null) { Rectangle rect = canvas.Children[1] as Rectangle; GetRoi(rect, out int left, out int top, out int right, out int bottom); OpenCvSharp.Mat m = new OpenCvSharp.Mat(); if (top < bottom && left < right && left >= 0 && right < viewModel.SourceMat.Cols && top >= 0 && bottom < viewModel.SourceMat.Rows) { viewModel.SourceMat.SubMat(top, bottom, left, right).CopyTo(m); OpenCvSharp.Cv2.HConcat(new OpenCvSharp.Mat[] { m, m, m, m }, m); viewModel.BitmapRoi = BitmapSourceConverter.ToBitmapSource(m); using (var tesseract = OCRTesseract.Create(MainViewModel.TessData, "eng", "0123456789-", 3, 7)) // 7: Page Segmentation Mode 7:single text line { OpenCvSharp.Cv2.GaussianBlur(m, m, new OpenCvSharp.Size(5, 5), 0); tesseract.Run(m, out var outputText, out var componentRects, out var componentTexts, out var componentConfidences, ComponentLevels.TextLine); string data = "("; data += outputText + ")\n"; for (int i = 0; i < componentRects.Length; i++) { data += $"({componentTexts[i]}) apperred at {componentRects[i]} with confidence {componentConfidences[i]}\n"; } tbxData.Text = outputText; } } } }
private Task b2() { sw.Start(); while (true) { using (Mat r = new Mat(2160, 3840, MatType.CV_8UC1, r_data)) using (Mat g = new Mat(2160, 3840, MatType.CV_8UC1, g_data)) using (Mat b = new Mat(2160, 3840, MatType.CV_8UC1, b_data)) using (Mat dstmat = new Mat()) { // Cv2.Merge(new Mat[] { b, g, r }, dstmat); var i = BitmapSourceConverter.ToBitmapSource(dstmat); i.Freeze(); image = i; fps = (1000 / sw.Elapsed.TotalMilliseconds).ToString(); sw.Restart(); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); } } }
private static void ConvertToBitmapSourceTest() { BitmapSource bs; // OpenCVによる画像処理 (Threshold) using (var src = new IplImage("data/lenna.png", LoadMode.GrayScale)) using (var dst = new IplImage(src.Size, BitDepth.U8, 1)) { src.Smooth(src, SmoothType.Gaussian, 5); src.Threshold(dst, 0, 255, ThresholdType.Otsu); bs = BitmapSourceConverter.ToBitmapSource(dst.ToBitmap()); } // WPFのWindowに表示してみる var image = new System.Windows.Controls.Image { Source = bs }; var window = new System.Windows.Window { Title = "from IplImage to BitmapSource", Width = bs.PixelWidth, Height = bs.PixelHeight, Content = image }; var app = new Application(); app.Run(window); }
private void DiffExecute() { using (Mat image1 = Cv2.ImRead(InputImageName1)) using (Mat image2 = Cv2.ImRead(InputImageName2)) using (Mat diffImage = new Mat(new OpenCvSharp.Size(image1.Cols, image1.Rows), MatType.CV_8UC3)) { Cv2.Absdiff(image1, image2, diffImage); DiffImage = BitmapSourceConverter.ToBitmapSource(diffImage); } }
private void videoCaptureManager_OnNewImageComplete(object sender, VideoGrabEventArgs e) { if (SettingsViewModel.IsShowCameraPreview && _videoCaptureManager.IsCaptureInProgress) { Dispatcher.CurrentDispatcher.Invoke(() => GrabImageSource = BitmapSourceConverter.ToBitmapSource(e.Image)); } else { Dispatcher.CurrentDispatcher.Invoke(() => GrabImageSource = null); } }
private void gray_button_MouseDoubleClick(object sender, MouseButtonEventArgs e) { using (Mat mat = new Mat(ImageSourcePath)) using (Mat matGray = mat.CvtColor(ColorConversionCodes.BGR2GRAY)) { //Cv2.ImShow("grayscale_show", matGray); //imageBox01.Source = matGray; BitmapSource bitmapSource = BitmapSourceConverter.ToBitmapSource(matGray); imageBox01.Source = bitmapSource; } }
private void Button_Click(object sender, RoutedEventArgs e) { // Image コントロールから画像データを BitmapSource 形式で取得する。 var bitmapSource = (BitmapSource)image.Source; // BitmapSource 形式を OpenCV の Mat 形式に変換する。 var mat = BitmapSourceConverter.ToMat(bitmapSource); // OpenCV で グレースケール化する。 Cv2.CvtColor(mat, mat, ColorConversionCodes.RGB2GRAY); // OpenCV の Mat 形式を BitmapSource 形式に変換する。 var bitmapSource_gray = BitmapSourceConverter.ToBitmapSource(mat); // Image コントロールに BitmapSource 形式の画像データを設定する。 image.Source = bitmapSource_gray; }
public AnalysisWindow() { InitializeComponent(); this.DataContext = data; try { cap_Face.Connect("127.0.0.1", 8456); //cap_LeftEye.Connect("127.0.0.1", 8457); //cap_RightEye.Connect("127.0.0.1", 8458); } catch (SocketException err) { MessageBox.Show(err.Message); } cap_Face.ReceivedFrame += (object obj, EventArgs arg) => { Dispatcher.Invoke((Action)(() => { img_Face.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame); })); }; cap_LeftEye.ReceivedFrame += (object obj, EventArgs arg) => { Dispatcher.Invoke((Action)(() => { img_LeftEye.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame); })); }; cap_RightEye.ReceivedFrame += (object obj, EventArgs arg) => { Dispatcher.Invoke((Action)(() => { img_RightEye.Source = BitmapSourceConverter.ToBitmapSource((arg as FrameCallbackArg).frame); })); }; td_recvFrame = new Thread(ThreadFunc_RecvFrame); td_recvFrame.IsBackground = true; td_recvFrame.Start(); }
private void StartWebcamMenuItem_Click(object sender, RoutedEventArgs e) { //Start webcam thread ThreadHandler.Instance.ProcessWithThreadPoolMethod(new WaitCallback(delegate(object state) { Camera.Instance.StartWebcam(); this.Dispatcher.BeginInvoke((Action)(() => { //Update webcam image DispatcherTimer Timer = new DispatcherTimer(); Timer.Tick += (sender, e) => { Mat nextFrame = Camera.Instance.GetNextFrame(); BitmapSource frame = BitmapSourceConverter.ToBitmapSource(nextFrame); /*if (Client.isConnected) * { * Stopwatch s = new Stopwatch(); * s.Start(); * Cv2.Resize(nextFrame, nextFrame, new OpenCvSharp.Size(640, 480)); * Client.SendData(Client.BitmapSourceToArray(BitmapSourceConverter.ToBitmapSource(nextFrame))); * s.Stop(); * Log.Debug("Resize Client data runtime: {0}ms", s.ElapsedMilliseconds); * Client.Disconnect(); * }*/ WebcamImage.Source = frame; }; Timer.Interval = TimeSpan.FromMilliseconds(1000); Timer.Start(); })); })); EnableDetectorButtons(); EnableFilterButtons(); StopServerMenuItem.IsEnabled = false; StartWebcamMenuItem.IsEnabled = false; }
private Task a() { sw.Start(); while (true) { using (Mat buf = new Mat(2160, 3840, MatType.CV_8UC1, dst)) using (Mat dstmat = new Mat()) { //18fps Cv2.CvtColor(buf, dstmat, ColorConversionCodes.BayerBG2BGR); var i = BitmapSourceConverter.ToBitmapSource(dstmat); //35fps //var i = BitmapSourceConverter.ToBitmapSource(buf); //Cv2.CvtColor() //var grayMat = buf.CvtColor(ColorConversionCodes.BayerBG2BGR); //grayMat.Dispose(); i.Freeze(); //Cv2.ImShow("dst", dstmat); //image.Do image = i; //dstmat.Dispose(); //buf.Dispose(); fps = (1000 / sw.Elapsed.TotalMilliseconds).ToString(); sw.Restart(); GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); } } }
private void Btn_Open(object sender, RoutedEventArgs e) { OpenFileDialog openDialog = new OpenFileDialog(); if (openDialog.ShowDialog() == true) { if (File.Exists(openDialog.FileName)) { Global.SourceImage = new Bitmap(openDialog.FileName); inkCanvas.Width = CanvasBorder.Width = Global.SourceImage.Width; inkCanvas.Height = CanvasBorder.Height = Global.SourceImage.Height; MessageBox.Show(Global.SourceImage.Width + ":" + Global.SourceImage.Height); ImageBrush imgBrush = new ImageBrush { ImageSource = BitmapSourceConverter.ToBitmapSource(Global.SourceImage), Stretch = Stretch.Uniform, }; inkCanvas.Background = imgBrush; } } }
public void VideoDrawWork_ProgressChanged(object sender, ProgressChangedEventArgs e) { capturedImg.Source = BitmapSourceConverter.ToBitmapSource(frame); }
private void MouseUp(MouseEventArgs e) { IsDragging = false; var image = e.Source as Image; if (image != null) { var originalPhosImage = _phosImageList[PhosImageListIndex - 1]; var pixelMousePositionX = _imagePixelPos.X * originalPhosImage.PixelWidth / image.ActualWidth; var pixelMousePositionY = _imagePixelPos.Y * originalPhosImage.PixelHeight / image.ActualHeight; var pixelWidth = RoiWidth * originalPhosImage.PixelWidth / image.ActualWidth; var pixelHeight = RoiHeight * originalPhosImage.PixelHeight / image.ActualHeight; if (pixelHeight > 0 && pixelWidth > 0) { Mat src = BitmapSourceConverter.ToMat(originalPhosImage); int rectX = (int)Math.Round(pixelMousePositionX); if (rectX < 0) { rectX = 0; } if (rectX + pixelWidth > originalPhosImage.PixelWidth) { rectX = (int)(originalPhosImage.PixelWidth - pixelWidth); } int rectY = (int)Math.Round(pixelMousePositionY); if (rectY < 0) { rectY = 0; } if (rectY + pixelHeight > originalPhosImage.PixelHeight) { rectY = (int)(originalPhosImage.PixelHeight - pixelHeight); } var roi = new OpenCvSharp.Rect(rectX, rectY, (int)pixelWidth, (int)pixelHeight); _roiMat = new Mat(src, roi).Clone(); int rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelWidth, 0); if (originalPhosImage.PixelHeight < originalPhosImage.PixelWidth) { rectThick = (int)Math.Round(0.01 * originalPhosImage.PixelHeight, 0); } Cv2.Rectangle(src, new OpenCvSharp.Point(rectX, rectY), new OpenCvSharp.Point(pixelWidth + rectX, pixelHeight + rectY), new Scalar(0, 0, 255, 255), rectThick); //Cv2.NamedWindow("src", WindowMode.Normal); //Cv2.ImShow("src", src); //Cv2.ResizeWindow("src", 400, 300); //Cv2.WaitKey(); //Cv2.DestroyAllWindows(); PhosImage = BitmapSourceConverter.ToBitmapSource(src); } } RoiX = RoiY = RoiWidth = RoiHeight = 0; }
private async void DetectFace() { try { if (!_videoCaptureManager.IsCaptureInProgress) { return; } var snaphsot = _videoCaptureManager.TakeSnapshot(); if (snaphsot == null) { return; } DetectionInfo detectionInfo = null; switch (DetectionMode) { case DetectionModeEnum.OpenCV: detectionInfo = DetectFacesOpenCV(snaphsot); break; case DetectionModeEnum.Bing: detectionInfo = await DetectFacesBing(snaphsot); break; } if (detectionInfo != null) { DetectionInfo = detectionInfo; if (SettingsViewModel.IsShowDetectionFacePreview) { var detectionImage = detectionInfo.Image; foreach (var faceRectangle in detectionInfo.Rectangles) { detectionImage.Draw(faceRectangle, new Bgr(0, double.MaxValue, 0), 3); } Dispatcher.CurrentDispatcher.Invoke(() => FaceDetectionImageSource = BitmapSourceConverter.ToBitmapSource(detectionImage)); } else { Dispatcher.CurrentDispatcher.Invoke(() => FaceDetectionImageSource = null); } if (SettingsViewModel.IsSendToIoTHub) { DeviceNotification facesAnalysis = new DeviceNotification(DeviceId, detectionInfo.Rectangles.Count, detectionInfo.MaleCount, detectionInfo.FemaleCount, detectionInfo.SmileCount, detectionInfo.SunGlassesCount, detectionInfo.ReadingGlassesCount, detectionInfo.AgeAverage, detectionInfo.EmotionHappyCount, detectionInfo.EmotionNeutralCount, detectionInfo.EmotionDisgustCount, detectionInfo.EmotionAngerCount, detectionInfo.HappyRatio, detectionInfo.HearyCount); _iotHubPublisher.SendDataAsync(facesAnalysis); } } else { DetectionInfo = new DetectionInfo(); } } catch (Exception ex) { App.Log.Error(ex.FlattenException()); } }