public static WriteableBitmap ToMono(this Pixel <float> src, byte[] buf = null, WriteableBitmap dst = null) { if (buf == null) { buf = new byte[src.Width * src.Height * 3]; } if (dst == null) { dst = new WriteableBitmap(src.Width, src.Height, 96, 96, PixelFormats.Bgr24, null); } using (Mat matsrc = new Mat(src.Height, src.Width, MatType.CV_8UC3, buf)) { int c = 0; for (int y = 0; y < src.Height; y++) { for (int x = 0; x < src.Width; x++) { var hoge = src[x, y].ConvertToByte(); buf[c++] = hoge; buf[c++] = hoge; buf[c++] = hoge; } } WriteableBitmapConverter.ToWriteableBitmap(matsrc, dst); } return(dst); }
public void ToMatGray8() { const int width = 3; const int height = 4; var buffer = new byte[height, width] { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 }, { 10, 11, 12 }, }; var wb = new WriteableBitmap(width, height, 92, 92, PixelFormats.Gray8, null); wb.WritePixels(new Int32Rect(0, 0, width, height), buffer, width, 0); using var mat = WriteableBitmapConverter.ToMat(wb); Assert.Equal(MatType.CV_8UC1, mat.Type()); Assert.Equal(width, mat.Cols); Assert.Equal(height, mat.Rows); var indexer = mat.GetUnsafeGenericIndexer <byte>(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { Assert.True(buffer[y, x] == indexer[y, x], $"wb[{y},{x}] = {buffer[y, x]}, mat[{y},{x}] = {indexer[y, x]}"); } } }
private void DisplayImage(Mat image) { WriteableBitmap wb = new WriteableBitmap(image.Width, image.Height, 96, 96, PixelFormats.Bgr24, null); WriteableBitmapConverter.ToWriteableBitmap(image, wb); ShowImage.Source = wb; }
public static WriteableBitmap ToColor(this Pixel <int> src, ColorConversionCodes cc, byte[] buf = null, WriteableBitmap dst = null) { byte[] bufraw = null; if (buf == null) { buf = new byte[src.Width * src.Height * 3]; } if (bufraw == null) { bufraw = new byte[src.Width * src.Height]; } if (dst == null) { dst = new WriteableBitmap(src.Width, src.Height, 96, 96, PixelFormats.Bgr24, null); } var matrix = new float[] { 2, 0, 0, 0, 1, 0, 0, 0, 1.8F }; for (int i = 0; i < src.pixel.Length; i++) { var hoge = (byte)(src.pixel[i] > 255 ? 255 : src.pixel[i] < 0 ? 0 : src.pixel[i]); bufraw[i] = hoge; } using (Mat matmatrix = new Mat(3, 3, MatType.CV_32FC1, matrix)) using (Mat matraw = new Mat(src.Height, src.Width, MatType.CV_8UC1, bufraw)) using (Mat mat = new Mat(src.Height, src.Width, MatType.CV_8UC3, buf)) { Cv2.CvtColor(matraw, mat, cc); Cv2.Transform(mat, mat, matmatrix); WriteableBitmapConverter.ToWriteableBitmap(mat, dst); } return(dst); }
public static ExtraImageInfo CreateImageFromFile(string fileName) { WriteableBitmap resultImage = null; double dpiX = OS.SystemDpiX; double dpiY = OS.SystemDpiY; ExtraImageInfo extraImageInfo = WriteableBitmapCreator.CreateWriteableBitmapFromFile(fileName, dpiX, dpiY); bool isImageFormatGrayScale = WriteableBitmapWrapper.IsImageFormatGrayScale(extraImageInfo.Image); if (isImageFormatGrayScale) { resultImage = extraImageInfo.Image; } else if (extraImageInfo.Image.Format != PixelFormats.Bgra32) { PixelFormat pixelFormat = PixelFormats.Bgra32; WriteableBitmap newImage = WriteableBitmapConverter.ConvertWriteableBitmap(extraImageInfo.Image, pixelFormat); resultImage = newImage; extraImageInfo.Image = resultImage; } return(extraImageInfo); }
private void Done(object sender, RoutedEventArgs e) { string imgPath = AppDomain.CurrentDomain.BaseDirectory + "instTest"; //string imgPath = @"C:\Users\Won\Documents\instTest"; int count = 1; foreach (ImagePreperationItem ipi in IpiList) { // identify chosen signal int index = ipi.SignalIndex; string signalSt = SignalList[index]; string signalName = SignalToNameDictionary[signalSt]; ipi.SignalName = signalName; // Resize and locate image to MATLAB directory. IplImage img = new IplImage(ipi.ImageName); CvSize size = new CvSize(ImgWidth, ImgHeight); IplImage resized = new IplImage(size, img.Depth, img.NChannels); Cv.Resize(img, resized); WriteableBitmap rawImage = WriteableBitmapConverter.ToWriteableBitmap(resized); FileStream fs = new System.IO.FileStream(imgPath + @"\s1\" + count + ".jpg", System.IO.FileMode.Create); JpegBitmapEncoder pbe = new JpegBitmapEncoder(); pbe.Frames.Add(BitmapFrame.Create(rawImage)); pbe.Save(fs); fs.Dispose(); count++; } /* * MATLAB starts learning image */ MLApp.MLApp matlab = new MLApp.MLApp(); Console.WriteLine(@"cd '" + imgPath + "'"); matlab.Execute(@"cd '" + imgPath + "'"); // Define the output object result = null; int imgCount = count - 1; // Call the MATLAB function learning matlab.Feval("learning", 5, out result, imgCount); // Display result parameters = result as object[]; ImageParameters[0] = imgCount; ImageParameters[1] = ImgWidth; ImageParameters[2] = ImgHeight; string savePath = AppDomain.CurrentDomain.BaseDirectory + "params"; // Save parameters and IPI list into file. WriteoutIpiList(); WriteoutMatlabPrams(); }
//画像の表示 void SetInputImage(Mat inputImage) { this.Dispatcher.BeginInvoke(new Action(() => { this.InputImage.Source = WriteableBitmapConverter.ToWriteableBitmap(inputImage); //this.DrawLine(this.GetEllipsePosition(InOutPutMode.input), InOutPutMode.input); })); }
private void UpdatePreviewBitmap() { using (Mat mat = new Mat(_page.Image.AbsoluteMasterPath, ImreadModes.Unchanged)) using (Mat ext = CoverSegmenting.ExtractRect(mat, new OpenCvSharp.Rect(CoverLeftSide.Offset, 0, CoverRightSide.Offset - CoverLeftSide.Offset + 1, mat.Height))) { PreviewBitmap = WriteableBitmapConverter.ToWriteableBitmap(ext); } }
private static unsafe WriteableBitmap CopyBitmap(PageViewModel _page, CoverSideCandidate CoverLeftSide, CoverSideCandidate CoverRightSide) { using (Mat mat = new Mat(_page.Image.AbsoluteMasterPath, ImreadModes.Unchanged)) using (Mat ext = ExtractRect(mat, new OpenCvSharp.Rect(CoverLeftSide.Offset, 0, CoverRightSide.Offset - CoverLeftSide.Offset + 1, mat.Height))) { return(WriteableBitmapConverter.ToWriteableBitmap(ext)); } }
public void player() { string path = AppDomain.CurrentDomain.BaseDirectory; string fullpath = System.IO.Path.Combine(path, "cheerleading_0001.mp4"); VideoCapture video = VideoCapture.FromFile(fullpath); var asfe = video.Get(CaptureProperty.FrameCount); if (!video.IsOpened()) { MessageBox.Show("not open"); return; } video.Set(CaptureProperty.FrameWidth, video.FrameWidth); video.Set(CaptureProperty.FrameHeight, video.FrameHeight); double fps = video.Get(CaptureProperty.Fps); //프레임 진행 int count = 0; DateTime start = DateTime.Now; while (true) { Mat frame = new Mat(); if (video.Read(frame)) { if (frame.Width == 0 && frame.Height == 0) { break; } count++; TimeSpan playTime = DateTime.Now - start; TimeSpan targetTime = TimeSpan.FromSeconds(count / fps); if (targetTime < playTime) { //Console.WriteLine($"{playTime}, {targetTime}"); continue; } Dispatcher.Invoke(new Action(delegate() { var a = WriteableBitmapConverter.ToWriteableBitmap(frame, 96, 96, PixelFormats.Bgr24, null); //img_player.Source = a; })); playTime = DateTime.Now - start; if (targetTime > playTime) { Thread.Sleep(targetTime - playTime); } } } }
/// <summary> /// Открывает выбор камер /// </summary> private void CameraChanger_Click(object sender, RoutedEventArgs e) { if (Start.IsEnabled == false) { Stop_Click(new object(), new RoutedEventArgs()); } if (cameraMode == StreamSrc.Video) { var f = new CameraSelectWindow().GetCam(); if (f == "") { return; } try { VideoCapture v; if (f == "USB") { cameraMode = StreamSrc.USB_cam; v = VideoCapture.FromCamera(CaptureDevice.Any); filePath = "0"; camM = true; } else { cameraMode = StreamSrc.IP_cam; filePath = f; v = VideoCapture.FromFile(filePath); camM = true; } var r = new Mat(); v.Read(r); myImage.Source = WriteableBitmapConverter.ToWriteableBitmap(r); r.Dispose(); v.Dispose(); CameraChanger.Content = "Режим камеры активирован"; CameraChanger.Background = Brushes.Green; labelCurState.Content = "Получение потока с камеры"; } catch { MessageBox.Show("Камера недоступна"); cameraMode = StreamSrc.Video; } } else { labelCurState.Content = "Чтение видео-файла"; CameraChanger.Content = "Выбор камеры"; CameraChanger.Background = Brushes.LightGray; cameraMode = StreamSrc.Video; } }
public MainWindow() { InitializeComponent(); //ViewModel からのスクリプト実行用のdelegate DataContextChanged += (o, e) => { ViewModel vm = DataContext as ViewModel; if (vm != null) { vm._ExecuteScript += (sender, arg) => { Dispatcher.Invoke(new Action(() => { pythonConsole.Pad.Console.RunStatements(arg.cmd); })); }; vm._DrawCameraBitmap += (sender, arg) => { Dispatcher.BeginInvoke(new Action(() => { IplImage img = vm.VisionControl.GetCameraImage(); DrawCameraViewEventArgs a = arg as DrawCameraViewEventArgs; if (a._draw == 1) { CvRect rect = new CvRect(a._x1, a._y1, a._x2, a._y2); img.DrawRect(rect, new CvScalar(255, 0, 0), 2); } else if (a._draw == 2) { int x1 = a._x1 - a._x2 / 2; int x2 = a._x1 + a._x2 / 2; int y1 = a._y1 - a._y2 / 2; int y2 = a._y1 + a._y2 / 2; img.DrawLine(x1, a._y1, x2, a._y1, new CvScalar(255, 0, 0), 2); img.DrawLine(a._x1, y1, a._x1, y2, new CvScalar(255, 0, 0), 2); } if (VM.CenterLine == true) { img.DrawLine(0, 320, 640, 320, new CvScalar(255, 0, 0, 0), 2); img.DrawLine(320, 0, 320, 640, new CvScalar(255, 0, 0, 0), 2); } WriteableBitmapConverter.ToWriteableBitmap(img, _col_wb); cameraImage.Source = _col_wb; img.Dispose(); //cameraImage.Source = vm.VisionControl.GetCameraBitmap(); })); }; } }; pythonConsole.Pad.Host.ConsoleCreated += new PythonConsoleControl.ConsoleCreatedEventHandler(Host_ConsoleCreated); }
public static WriteableBitmap LoadBitmap(string filename) { using (Mat mat = new Mat(filename, ImreadModes.Unchanged)) using (Mat bgra = new Mat()) { Cv2.CvtColor(mat, bgra, ColorConversionCodes.BGR2BGRA); return(WriteableBitmapConverter.ToWriteableBitmap(bgra)); } }
/// <summary> /// Загружает первый кадр в imageBox /// </summary> private void GetFirstFrame() { var v = VideoCapture.FromFile(filePath); var r = new Mat(); v.Read(r); myImage.Source = WriteableBitmapConverter.ToWriteableBitmap(r); r.Dispose(); v.Dispose(); }
public static void ToWriteableBitmap(this Mat mat) { /*WriteableBitmap wrb = new WriteableBitmap(mat.Width, mat.Height, 96, 96, System.Windows.Media.PixelFormats.Bgr32, null); * var bytes = mat.ToBytes(); * wrb.WritePixels(new System.Windows.Int32Rect(0, 0, mat.Width, mat.Height), bytes, 3, 0);*/ //return wrb; /*var ms = new MemoryStream(mat.ToBytes()); * System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(ms);*/ var b = WriteableBitmapConverter.ToWriteableBitmap(mat); }
public ImagePreperation(CANdb candb) { ImgWidth = 1280; ImgHeight = 720; IpiList = new List <ImagePreperationItem>(); ImageParameters = new int[3]; // Get all signal from candb instance, this list is binded to dropbox in ListView List <Signal> signals = candb.GetAllSignal(); SignalList = new List <string>(); //Dictionary is used to convert dropbox label to actuall signal name foreach (Signal s in signals) { SignalList.Add(s.ToString()); SignalToNameDictionary.Add(s.ToString(), s.Name); } ImageDictionary = new Dictionary <string, WriteableBitmap>(); SignalDictionary = new Dictionary <string, string>(); //Get current location of app string initPath = AppDomain.CurrentDomain.BaseDirectory; //Ask user to specify the folder of images FolderBrowserDialog fbd = new FolderBrowserDialog(); fbd.SelectedPath = initPath; System.Windows.Forms.DialogResult result = fbd.ShowDialog(); string dirPath = fbd.SelectedPath; string[] paths = Directory.GetFiles(dirPath); NameList = new List <string>(paths); // Create IPI list with image and signal name. // IPI list is binded to ListView so any change in the form is directly changes value in IPI list. foreach (string name in NameList) { //Resize image so that it fits to window IplImage img = new IplImage(name); CvSize size = new CvSize(427, 240); IplImage resized = new IplImage(size, img.Depth, img.NChannels); Cv.Resize(img, resized); //WritableBitmap is compatible with Image Window of WPF WriteableBitmap wb = WriteableBitmapConverter.ToWriteableBitmap(resized); ImageDictionary.Add(name, wb); IpiList.Add(new ImagePreperationItem(wb, name)); } InitializeComponent(); }
private void worker_ProgressChanged(object sender, ProgressChangedEventArgs e) { // frameがe.UserStateプロパティにセットされて渡されてくる var image = (IplImage)e.UserState; // Sourceプロパティにセットするため、frameをWriteableBitmapへと変換(Bitmapだと型変換エラー) // WriteableBitmapConverterを使うには、 // usingディレクティブにOpenCvSharp.Extensionsを追加 // (OpenCvSharp.UserInterface.dll内) Monitor.Source = WriteableBitmapConverter.ToWriteableBitmap(image); }
/// <summary> /// 取り込んだ画像をRGBビットマップで取得する /// </summary> /// <returns></returns> public WriteableBitmap GetCameraBitmap() { //RGB画像へ変換 Cv2.CvtColor(_mat, _col_mat, ColorConversion.GrayToRgb); IplImage img = _col_mat.ToIplImage(); WriteableBitmapConverter.ToWriteableBitmap(img, _col_wb); img.Dispose(); return(_col_wb); }
void CaptureVideo(object filePathVideo) { // YOLO setting int yoloWidth = pictureBoxIpl1.Width, yoloHeight = pictureBoxIpl1.Height; VideoCapture videocapture; Mat image = new Mat(); WriteableBitmap wb = new WriteableBitmap(yoloWidth, yoloHeight, 96, 96, PixelFormats.Bgr24, null); byte[] imageInBytes = new byte[(int)(yoloWidth * yoloHeight * image.Channels())]; using (var yoloWrapper = new YoloWrapper(config, weights, label)) { using (videocapture = new VideoCapture((string)filePathVideo)) { int sleepTime = (int)Math.Round(1000 / videocapture.Fps); using (Mat imageOriginal = new Mat()) { while (true) { // read a single frame and convert the frame into a byte array videocapture.Read(imageOriginal); videocapture.Read(imageOriginal); image = imageOriginal.Resize(new OpenCvSharp.Size(yoloWidth, yoloHeight)); if (image.Empty()) { break; } imageInBytes = image.ToBytes(); // conduct object detection and display the result var items = yoloWrapper.Detect(imageInBytes); count = 0; // get result GetValueInImage(items, image); var countVehicle = $"Vehicle count: {count}"; image.PutText(countVehicle, new OpenCvSharp.Point(0, 50), HersheyFonts.HersheyTriplex, 1.5, colorCount); // display the detection result WriteableBitmapConverter.ToWriteableBitmap(image, wb); // pictureBoxIpl1.Image = BitmapFromSource(wb); Bitmap bitmap = BitmapFromSource(wb); SetImage(bitmap); Cv2.WaitKey(0); } } } } }
public void fn_SetImage(Mat mat, string strTitle = "Image_") { this.Title = strTitle; ImageBrush ib = new ImageBrush(WriteableBitmapConverter.ToWriteableBitmap(mat)); ib.Stretch = Stretch.None; canvas.Width = ib.ImageSource.Width; canvas.Height = ib.ImageSource.Height; canvas.Background = ib; this.Width = canvas.Width + 40; this.Height = canvas.Height + 40; }
public Mat fn_GetImage() { Mat matRtn = null; ImageBrush ib = canvas.Background as ImageBrush; if (ib != null) { WriteableBitmap wb = ib.ImageSource as WriteableBitmap; if (wb != null) { matRtn = WriteableBitmapConverter.ToMat(wb); } } return(matRtn); }
private void button1_Click(object sender, RoutedEventArgs e) { int a, b, c, d; a = 5; b = 5; c = 0; d = addValue(a, b, ref c); OpenFileDialog fDlg = new OpenFileDialog(); string strFileName = ""; fDlg.DefaultExt = "BMP"; fDlg.Filter = "그림파일(*.bmp, *.jpg, *.png)|*.bmp;*.jpg;*.png;|모든파일 *.*|*.*"; if (fDlg.ShowDialog() == true) { strFileName = fDlg.FileName; Mat imgSrc = new Mat(strFileName, ImreadModes.Color); WriteableBitmap bitImg = new WriteableBitmap(imgSrc.Width, imgSrc.Height, 96, 96, PixelFormats.Bgr24, null); unsafe { Byte *pBuffer = (Byte *)imgSrc.Data; for (a = 0; a < imgSrc.Height; a++) { for (b = 0; b < imgSrc.Width; b++) { for (c = 0; c < imgSrc.Channels(); c++) { pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + c] = (Byte)((a * b + c) % 255); //흑백물결무늬 //pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + c] = (Byte)((a + b + c) % 255); //흑백사선무늬 //pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + c] = (Byte)((a + b * c) % 255); //칼라줄무늬 } //칼러격자무늬 //pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + 0] = (Byte)((a) % 255); //pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + 1] = (Byte)((b) % 255); //pBuffer[a * imgSrc.Width * imgSrc.Channels() + b * imgSrc.Channels() + 2] = (Byte)((a + b) % 255); } } } WriteableBitmapConverter.ToWriteableBitmap(imgSrc, bitImg); image.Source = bitImg; } }
public void ToBitmap() { using var mat = new Mat(FilePath.Image.Lenna511, ImreadModes.Color); // width % 4 != 0 var wb = WriteableBitmapConverter.ToWriteableBitmap(mat); var image = new System.Windows.Controls.Image(); image.Source = wb; var window = new System.Windows.Window(); window.Content = image; var app = new Application(); app.Run(window); }
public void ToWriteableBitmap() { var expected = new byte[] { 1, 2, 3, 4, 5, 6 }; using (var mat = new Mat(3, 2, MatType.CV_8UC1, expected)) { var wb = WriteableBitmapConverter.ToWriteableBitmap(mat); byte[] actual = new byte[6]; wb.CopyPixels(Int32Rect.Empty, actual, mat.Cols, 0); for (int i = 0; i < expected.Length; i++) { Assert.True(expected[i] == actual[i], $"values[{i}] = {expected[i]}, pixels[{i}] = {actual[i]}"); } } GC.KeepAlive(expected); }
public void showCamera() { new Thread(CollectFrames).Start(); new Thread(() => { int k = 0; Thread.Sleep(100); while (!stop) { IplImage img; bool suc = showFrames.TryDequeue(out img); if (suc) { wpfImage.Dispatcher.BeginInvoke(new Action(() => { k++; try { UpdateImage(wpfImage, WriteableBitmapConverter.ToWriteableBitmap(img, PixelFormats.Bgr24)); if (k % 200 == 0) { deng.Fill = new SolidColorBrush(Color.FromRgb(255, 255, 0)); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss-fffffff")); } if (k % 200 == 100) { deng.Fill = new SolidColorBrush(Color.FromRgb(255, 0, 255)); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss-fffffff")); } } catch (Exception e) { } })); } //stop = true; Thread.Sleep(1000 / fps); } cap.Dispose(); Console.WriteLine(k + "=="); }).Start(); }
private void Start(object sender, RoutedEventArgs e) { testing = true; //Console.WriteLine(@"cd '" + imgPath + "'"); captureImg = delegate() { IplImage img; while (testing) { //Stopwatch is handy and accuarate way to measure execution time. Stopwatch sw = new Stopwatch(); /* a 500 - 333 FPS ... 2 - 3ms*/ img = Cv.QueryFrame(capture); //This 4 lines of code allows me to interact with GUI from separate thread (this) Dispatcher.Invoke(DispatcherPriority.Render, new Action(() => { imageFrame.Source = WriteableBitmapConverter.ToWriteableBitmap(img); })); /* a end 500 - 333 FPS ... 2 - 3ms*/ sw.Start(); ProcessImage(img); sw.Stop(); //Calculate FPS and show it on GUI. Dispatcher.Invoke(DispatcherPriority.Render, new Action(() => { int ms = sw.Elapsed.Milliseconds; if (ms != 0) { FPS.Text = Convert.ToString((Convert.ToInt32((1000.0 / ms) * 10000.0)) / 10000.0); } })); } }; thread = new Thread(captureImg); thread.Start(); }
public async Task ReadFrame(IProgress <ProcessedFrame> progress, CancellationToken token) { await Task.Run(async() => { while (!(token.IsCancellationRequested || ended)) { using (frameMat = videoCapture.RetrieveMat()) { ended = frameMat.Empty(); var result = WriteableBitmapConverter.ToWriteableBitmap(frameMat); result.Freeze(); progress.Report(new ProcessedFrame { Bitmap = result }); } } await Task.Delay(5); Console.WriteLine("done"); }); }
public void Run() { using var mat = new Mat(FilePath.Image.Fruits, ImreadModes.Color); // width % 4 != 0 var wb = WriteableBitmapConverter.ToWriteableBitmap(mat); var image = new System.Windows.Controls.Image { Source = wb }; var window = new System.Windows.Window { Content = image }; var app = new Application(); app.Run(window); }
// 一定時間経過ごとに実行 void dispatcherTimer_Tick(object sender, EventArgs e) { // カメラから画像を取得 IplImage frame = Cv.QueryFrame(camera); // 640x480の画像から320x240を切り出す double w = 640, h = 480; Cv.SetCaptureProperty(camera, CaptureProperty.FrameWidth, w); Cv.SetCaptureProperty(camera, CaptureProperty.FrameHeight, h); face = trimming(frame, (int)w / 4, (int)h / 4, (int)image.Width, (int)image.Height); // フレーム画像をコントロールに貼り付け image.Source = WriteableBitmapConverter.ToWriteableBitmap(face); // 感情認識結果をオーバーレイ IplImage stamp = stamps[emotionL]; image1.Source = WriteableBitmapConverter.ToWriteableBitmap(stamp); }
private void DrawImages() { Mat large = new Mat(INPUT_FILE); Mat rgb = new Mat(), small = new Mat(), grad = new Mat(), bw = new Mat(), connected = new Mat(); // downsample and use it for processing Cv2.PyrDown(large, rgb); Cv2.CvtColor(rgb, small, ColorConversionCodes.BGR2GRAY); // morphological gradient var morphKernel = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(3, 3)); Cv2.MorphologyEx(small, grad, MorphTypes.Gradient, morphKernel); // binarize Cv2.Threshold(grad, bw, 0, 255, ThresholdTypes.Binary | ThresholdTypes.Otsu); imgReceipt0.Source = WriteableBitmapConverter.ToWriteableBitmap(large); imgReceipt1.Source = WriteableBitmapConverter.ToWriteableBitmap(grad); imgReceipt2.Source = WriteableBitmapConverter.ToWriteableBitmap(bw); }