public async Task <SoftwareBitmap> ConvertStreamToBitmap(Stream stream) { SoftwareBitmap result = null; stream.Seek(0, 0); using (Stream newStream = new MemoryStream()) { stream.CopyTo(newStream); newStream.Seek(0, 0); using (var randStream = newStream.AsRandomAccessStream()) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(randStream); result = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied); randStream.Dispose(); } newStream.Dispose(); } stream.Dispose(); return(SoftwareBitmap.Copy(result)); }
public SoftwareBitmap ProcessFrame(MediaFrameReference frame) { var softwareBitmap = FrameRenderer.ConvertToDisplayableImage(frame?.VideoMediaFrame); var softwareBitmap2 = softwareBitmap != null?SoftwareBitmap.Copy(softwareBitmap) : null; if (softwareBitmap != null) { softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap); var task = _imageElement.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { if (_taskRunning) { return; } _taskRunning = true; SoftwareBitmap latestBitmap; while ((latestBitmap = Interlocked.Exchange(ref _backBuffer, null)) != null) { var imageSource = (SoftwareBitmapSource)_imageElement.Source; await imageSource.SetBitmapAsync(latestBitmap); latestBitmap.Dispose(); } _taskRunning = false; }); } return(softwareBitmap2); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { try { if (e.Parameter is Tuple <Frame, object> Parameters) { FileControlNav = Parameters.Item1; PhotoDisplaySupport Item = Parameters.Item2 as PhotoDisplaySupport; OriginFile = (await Item.PhotoFile.GetStorageItem().ConfigureAwait(true)) as StorageFile; OriginImage = await Item.GenerateImageWithRotation().ConfigureAwait(true); OriginBackupImage = SoftwareBitmap.Copy(OriginImage); WriteableBitmap WBitmap = new WriteableBitmap(OriginImage.PixelWidth, OriginImage.PixelHeight); OriginImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; UnchangeRegion = Cropper.CroppedRegion; await AddEffectsToPane().ConfigureAwait(false); } } catch (Exception ex) { ExceptionTracer.RequestBlueScreen(ex); } }
internal void SetSample(SoftwareBitmap input) { if (!Activated) { return; } if (Sample == null) { lock (SampleLock) { if (!SupportedBitmapPixelFormat.HasValue) { if (FaceDetector.IsBitmapPixelFormatSupported(input.BitmapPixelFormat)) { SupportedBitmapPixelFormat = input.BitmapPixelFormat; } else { SupportedBitmapPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().First(); } } Sample = SoftwareBitmap.Copy(input); } } }
public static SoftwareBitmap ResizeToActual(SoftwareBitmap Input) { using (Mat inputMat = Input.SoftwareBitmapToMat()) { Mat[] Channels = Array.Empty <Mat>(); try { Channels = Cv2.Split(inputMat); if (Channels.Length == 4) { Mat Contour = Channels.Last().FindNonZero(); Rect Area = Cv2.BoundingRect(Contour); return(inputMat[Area].Clone().MatToSoftwareBitmap()); } else { throw new ArgumentException("Input must have be BGRA image"); } } catch { return(SoftwareBitmap.Copy(Input)); } finally { Array.ForEach(Channels, (Channel) => Channel.Dispose()); } } }
/// <summary> /// The callback that is triggered when new video preview frame arrives. In this function, /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered /// in this function call, and video FPS is recorded. [internal use] /// </summary> /// <param name="sender">MediaFrameReader object</param> /// <param name="args">arguments not used here</param> private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { ARUWPUtils.VideoTick(); using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { float[] cameraToWorldMatrixAsFloat; if (TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false) { return; } Interlocked.Exchange(ref _cameraToWorldMatrix, cameraToWorldMatrixAsFloat); var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap; var softwareBitmap = SoftwareBitmap.Convert(originalSoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore); originalSoftwareBitmap?.Dispose(); if (videoPreview) { Interlocked.Exchange(ref _bitmap, softwareBitmap); controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap)); } else { controller.ProcessFrameAsync(softwareBitmap); } signalTrackingUpdated = true; } } }
//<SnippetOpenCVFrameArrived> private void ColorFrameReader_FrameArrived_OpenCV(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var mediaFrameReference = sender.TryAcquireLatestFrame(); if (mediaFrameReference != null) { SoftwareBitmap openCVInputBitmap = null; var inputBitmap = mediaFrameReference.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { //The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { openCVInputBitmap = SoftwareBitmap.Copy(inputBitmap); } else { openCVInputBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } SoftwareBitmap openCVOutputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, openCVInputBitmap.PixelWidth, openCVInputBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); // operate on the image and render it openCVHelper.Blur(openCVInputBitmap, openCVOutputBitmap); _frameRenderer.PresentSoftwareBitmap(openCVOutputBitmap); } } }
private void HandleFrameArrive(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { SoftwareBitmap originalBitmap = null; var inputBitmap = frame.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); _helper.Process(originalBitmap, outputBitmap); var localBitmap = SoftwareBitmap.Copy(outputBitmap); GetOCRAsync(localBitmap); LatestPreviewFrame = SoftwareBitmap.Copy(originalBitmap); LatestProcessedFrame = SoftwareBitmap.Copy(outputBitmap); OnFrameProcessed(new FrameHandlerEventArgs()); } } }
private async void GetOCRAsync(SoftwareBitmap inputBitmap) { if (_ocrRunning) { return; } _ocrRunning = true; SoftwareBitmap localBitmap = SoftwareBitmap.Copy(inputBitmap); var ocrEngine = OcrEngine.TryCreateFromUserProfileLanguages(); var recognizeAsync = await ocrEngine.RecognizeAsync(localBitmap); var str = new StringBuilder(); foreach (var ocrLine in recognizeAsync.Lines) { str.AppendLine(ocrLine.Text); } var readText = str.ToString(); if (readText != "") { WinOCRResult = readText; } _ocrRunning = false; return; }
internal ExampleMediaFrame(VideoMediaFrame frame) { IsIlluminated = frame.InfraredMediaFrame?.IsIlluminated; SourceKind = frame.FrameReference.SourceKind; SystemRelativeTime = frame.FrameReference.SystemRelativeTime; SoftwareBitmap = SoftwareBitmap.Copy(frame.SoftwareBitmap); }
private async Task SetImageAsync(StorageFile file) { ShowProgressBar("FlyoutLoading"); using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read)) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream); SelectedBitmap = await decoder.GetSoftwareBitmapAsync(); } if (SelectedBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || SelectedBitmap.BitmapAlphaMode != BitmapAlphaMode.Premultiplied) { SelectedBitmap = SoftwareBitmap.Convert(SelectedBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } // Output is a copy of Selected and the source is the same for the two views var bitmapSource = new SoftwareBitmapSource(); await bitmapSource.SetBitmapAsync(SelectedBitmap); OutputBitmap = SoftwareBitmap.Copy(SelectedBitmap); EditingImageFile = file; SelectedImage.Source = bitmapSource; await bitmapSource.SetBitmapAsync(OutputBitmap); ResultImage.Source = bitmapSource; SaveButton.IsEnabled = true; ResetButton.IsEnabled = false; EffectActionsGrid.Visibility = Visibility.Visible; HideProgressBar(); }
private void AutoOptimizeButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e) { if (FilterImage != null) { FilterImage.Dispose(); FilterImage = null; FilterBackupImage.Dispose(); FilterBackupImage = null; } AlphaSlider.ValueChanged -= AlphaSlider_ValueChanged; BetaSlider.ValueChanged -= BetaSlider_ValueChanged; AlphaSlider.Value = 1; BetaSlider.Value = 0; AlphaSlider.ValueChanged += AlphaSlider_ValueChanged; BetaSlider.ValueChanged += BetaSlider_ValueChanged; FilterImage = ComputerVisionProvider.AutoColorEnhancement(OriginImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(OriginImage.PixelWidth, OriginImage.PixelHeight); FilterImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; using (SoftwareBitmap Histogram = ComputerVisionProvider.CalculateHistogram(FilterImage)) { WriteableBitmap HBitmap = new WriteableBitmap(Histogram.PixelWidth, Histogram.PixelHeight); Histogram.CopyToBuffer(HBitmap.PixelBuffer); HistogramImage.Source = HBitmap; } ResetButton.IsEnabled = true; }
public static SoftwareBitmap ResizeToActual(SoftwareBitmap Input) { using (Mat inputMat = Input.SoftwareBitmapToMat()) { Mat[] Channels = Array.Empty <Mat>(); try { Channels = Cv2.Split(inputMat); if (Channels.Length == 4) { Mat Contour = Channels.Last().FindNonZero(); Rect ActualArea = Cv2.BoundingRect(Contour); Rect ExtraArea = new Rect(Math.Max(ActualArea.X - 5, 0), Math.Max(ActualArea.Y - 5, 0), Math.Min(ActualArea.Width + 10, inputMat.Width - ActualArea.X), Math.Min(ActualArea.Height + 10, inputMat.Height - ActualArea.Y)); return(inputMat[ExtraArea].Clone().MatToSoftwareBitmap()); } else { throw new ArgumentException("Input must be BGRA image"); } } catch { return(SoftwareBitmap.Copy(Input)); } finally { Array.ForEach(Channels, (Channel) => Channel.Dispose()); } } }
private async void SaveChangeButtonClick(object sender, RoutedEventArgs e) { var bitmapSource = new SoftwareBitmapSource(); await bitmapSource.SetBitmapAsync(OutputBitmap); SelectedBitmap = SoftwareBitmap.Copy(OutputBitmap); SelectedImage.Source = bitmapSource; }
//################################################################################ /// <summary> /// Method for get a saved frame in the camera. Must be called after <see cref="CaptureFrame"/> /// </summary> /// <returns> SoftwareBitmap object that contain a copy the frame </returns> public SoftwareBitmap GetFrame() { if (_frame != null) { return(SoftwareBitmap.Copy(_frame)); // return a copy } else { return(null); } }
//############################################################################################# /// <summary> /// Return the last available frame. /// </summary> /// <returns> The frame is a SoftwareBitmap object that could be manipulate for tranformations. </returns> public SoftwareBitmap GetFrame() { // check if a frame could be returned if (ReadFrame()) { return(SoftwareBitmap.Copy(_frame)); // return a copy of the last frame to avoid multiple i/o } else { return(null); } }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // SoftwareBitmap is already in the correct format for an Image control, so just return a copy. result = SoftwareBitmap.Copy(inputBitmap); } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { if (inputFrame.FrameReference.SourceKind == MediaFrameSourceKind.Depth) { // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale)); } else { // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); } } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8) { // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); } else { try { // Convert to Bgra8 Premultiplied SoftwareBitmap, so xaml can display in UI. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } catch (ArgumentException exception) { // Conversion of software bitmap format is not supported. Drop this frame. System.Diagnostics.Debug.WriteLine(exception.Message); } } } } return(result); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // SoftwareBitmap is already in the correct format for an Image control, so just return a copy. result = SoftwareBitmap.Copy(inputBitmap); } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { string subtype = inputFrame.VideoFormat.MediaFrameFormat.Subtype; if (string.Equals(subtype, "D16", StringComparison.OrdinalIgnoreCase)) { // Use a special pseudo color to render 16 bits depth frame. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorForDepth); } else { // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); } } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8) { // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); } else { try { // Convert to Bgra8 Premultiplied SoftwareBitmap, so xaml can display in UI. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } catch (ArgumentException exception) { // Conversion of software bitmap format is not supported. Drop this frame. System.Diagnostics.Debug.WriteLine(exception.Message); } } } } return(result); }
static SoftwareBitmap ConvertColorFrame(SoftwareBitmap inputBitmap) { // XAML requires Bgra8 with premultiplied alpha. if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8) { return(null); } if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { return(SoftwareBitmap.Copy(inputBitmap)); } return(SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied)); }
// https://github.com/MarekKowalski/HoloFace/blob/master/HoloFace/Assets/HololensCameraUWP.cs private void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { using (var frame = sender.TryAcquireLatestFrame()) { // https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera var coordinateSystem = frame?.CoordinateSystem; var cameraIntrinsics = frame?.VideoMediaFrame?.CameraIntrinsics; var ht = coordinateSystem.TryGetTransformTo(originalFrameOfReference.CoordinateSystem); Matrix4 webcamToWorldMatrix = new Matrix4( ht?.M11 ?? 1, ht?.M21 ?? 0, ht?.M31 ?? 0, ht?.Translation.X ?? 0, ht?.M12 ?? 0, ht?.M22 ?? 1, ht?.M32 ?? 0, ht?.Translation.Y ?? 0, -ht?.M13 ?? 0, -ht?.M23 ?? 0, -ht?.M33 ?? 1, -ht?.Translation.Z ?? 0, 0, 0, 0, 1); using (var bitmap = frame?.VideoMediaFrame?.SoftwareBitmap) { if (bitmap == null) { return; } Width = bitmap.PixelWidth; Height = bitmap.PixelHeight; var projectionMatrix = new Matrix4(); projectionMatrix.M11 = 2 * cameraIntrinsics.FocalLength.X / Width; projectionMatrix.M22 = 2 * cameraIntrinsics.FocalLength.Y / Height; projectionMatrix.M13 = -2 * (cameraIntrinsics.PrincipalPoint.X - Width / 2) / Width; projectionMatrix.M23 = 2 * (cameraIntrinsics.PrincipalPoint.Y - Height / 2) / Height; projectionMatrix.M33 = -1; projectionMatrix.M44 = -1; ProjectionMatrix = projectionMatrix; var copy = SoftwareBitmap.Copy(bitmap); FrameReady?.Invoke(new FrameData() { bitmap = copy, webcamToWorldMatrix = webcamToWorldMatrix, projectionMatrix = projectionMatrix }); } } }
/// <summary> /// The callback that is triggered when new video preview frame arrives. In this function, /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered /// in this function call, and video FPS is recorded. [internal use] /// </summary> /// <param name="sender">MediaFrameReader object</param> /// <param name="args">arguments not used here</param> private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { ARUWPUtils.VideoTick(); using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { var softwareBitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore); if (videoPreview) { Interlocked.Exchange(ref _bitmap, softwareBitmap); controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap)); } else { controller.ProcessFrameAsync(softwareBitmap); } signalTrackingUpdated = true; } } }
async Task OcrProcessFrameAsync( MediaFrameReader reader, OcrEngine ocrEngine, DeviceOcrResult ocrDeviceResult) { using (var frame = reader.TryAcquireLatestFrame()) { if (frame?.VideoMediaFrame != null) { using (var bitmap = frame.VideoMediaFrame.SoftwareBitmap) { var result = await ocrEngine.RecognizeAsync(bitmap); if (result?.Text != null) { var matchingResults = this.matchExpression.Matches(result.Text); var matched = matchingResults?.Count > 0; if (matched) { // We take the first one, we don't do multiple (yet). ocrDeviceResult.MatchedText = matchingResults[0].Value; ocrDeviceResult.ResultType = OcrMatchResult.Succeeded; ocrDeviceResult.BestOcrSoftwareBitmap?.Dispose(); ocrDeviceResult.BestOcrSoftwareBitmap = null; } else if (result.Text.Length > ocrDeviceResult.BestOcrTextLengthFound) { ocrDeviceResult.BestOcrTextLengthFound = result.Text.Length; ocrDeviceResult.BestOcrSoftwareBitmap?.Dispose(); ocrDeviceResult.BestOcrSoftwareBitmap = SoftwareBitmap.Copy(bitmap); ocrDeviceResult.ResultType = OcrMatchResult.TimedOutCloudCallAvailable; } } } } } }
public static SoftwareBitmap ResizeToActual(SoftwareBitmap Input) { using (Mat inputMat = Input.SoftwareBitmapToMat()) using (Mat tempMat = new Mat(inputMat.Rows, inputMat.Cols, MatType.CV_8UC4)) { Cv2.CvtColor(inputMat, tempMat, ColorConversionCodes.BGRA2GRAY); Cv2.Threshold(tempMat, tempMat, 100, 255, ThresholdTypes.Binary); using (Mat Hie = new Mat(tempMat.Size(), tempMat.Type())) { Cv2.FindContours(tempMat, out Mat[] Result, Hie, RetrievalModes.External, ContourApproximationModes.ApproxNone); try { if (Result.FirstOrDefault() is Mat Contour) { Rect Area = Cv2.BoundingRect(Contour); return(inputMat[Area].Clone().MatToSoftwareBitmap()); } else { return(SoftwareBitmap.Copy(Input)); } } catch { return(SoftwareBitmap.Copy(Input)); } finally { foreach (Mat Item in Result) { Item.Dispose(); } } } } }
private async void ProcessImage(SoftwareBitmap SourceBitmap) { SoftwareBitmap RotatedSourceBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, SourceBitmap.PixelHeight, SourceBitmap.PixelWidth, BitmapAlphaMode.Premultiplied); _cvhelper.RotateImage(SourceBitmap, RotatedSourceBitmap, 90); int PanelWidth = (int)Math.Abs(EndPoint.X - StartPoint.X); int PanelHeight = (int)Math.Abs(EndPoint.Y - StartPoint.Y); SoftwareBitmap CroppedBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, (int)PanelImageSize.Width, (int)PanelImageSize.Height, BitmapAlphaMode.Premultiplied); _cvhelper.CropImage(StartPoint, EndPoint, RotatedSourceBitmap, CroppedBitmap); BrightnessCalc.CalcBrightness(CroppedBitmap); SoftwareBitmap MaskedBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, (int)PanelImageSize.Width, (int)PanelImageSize.Height, BitmapAlphaMode.Premultiplied); _cvhelper.MaskAndDrawPanelArea(CroppedBitmap, MaskedBitmap); using (var ImgResult = PanelParser.ParsePanelImage(MaskedBitmap)) { var FilterImg = ImgResult?.SelectResult(FilterSelectedBeadType); if (FilterImg != null) { _ColorFilterRenderer.RenderFrame(SoftwareBitmap.Copy(FilterImg)); } } _SourceRenderer.RenderFrame(RotatedSourceBitmap); _ClippedRenderer.RenderFrame(CroppedBitmap); await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.High, () => { LightThresTextbox.Text = "Thres:" + (BrightnessCalc.Threshold?.ToString("0.00") ?? String.Empty); LightValueTextbox.Text = "Value:" + (BrightnessCalc.Latest?.ToString("0.00") ?? String.Empty); AnalyzedTextBlock.Text = PanelParser.GetParsedPanel().ToString() ?? String.Empty; }); }
private void ResetButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e) { AspList.SelectedIndex = 0; ResetButton.IsEnabled = false; OriginImage.Dispose(); OriginImage = SoftwareBitmap.Copy(OriginBackupImage); WriteableBitmap WBitmap = new WriteableBitmap(OriginImage.PixelWidth, OriginImage.PixelHeight); OriginImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; using (SoftwareBitmap Histogram = ComputerVisionProvider.CalculateHistogram(OriginImage)) { WriteableBitmap HBitmap = new WriteableBitmap(Histogram.PixelWidth, Histogram.PixelHeight); Histogram.CopyToBuffer(HBitmap.PixelBuffer); HistogramImage.Source = HBitmap; } FilterImage?.Dispose(); FilterImage = null; FilterBackupImage?.Dispose(); FilterBackupImage = null; AlphaSlider.ValueChanged -= AlphaSlider_ValueChanged; BetaSlider.ValueChanged -= BetaSlider_ValueChanged; FilterGrid.SelectionChanged -= FilterGrid_SelectionChanged; FilterGrid.SelectedIndex = 0; FilterGrid.SelectionChanged += FilterGrid_SelectionChanged; AlphaSlider.Value = 1; BetaSlider.Value = 0; AlphaSlider.ValueChanged += AlphaSlider_ValueChanged; BetaSlider.ValueChanged += BetaSlider_ValueChanged; Cropper.Reset(); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { try { if (e?.Parameter is PhotoDisplaySupport Item) { OriginFile = (await Item.PhotoFile.GetStorageItem().ConfigureAwait(true)) as StorageFile; OriginImage = await Item.GenerateImageWithRotation().ConfigureAwait(true); OriginBackupImage = SoftwareBitmap.Copy(OriginImage); WriteableBitmap WBitmap = new WriteableBitmap(OriginImage.PixelWidth, OriginImage.PixelHeight); OriginImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; UnchangeRegion = Cropper.CroppedRegion; await AddEffectsToPane().ConfigureAwait(false); } } catch (Exception ex) { LogTracer.Log(ex, "An error was threw when initializing CropperPage"); } }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { switch (inputFrame.FrameReference.SourceKind) { case MediaFrameSourceKind.Color: // XAML requires Bgra8 with premultiplied alpha. // We requested Bgra8 from the MediaFrameReader, so all that's // left is fixing the alpha channel if necessary. if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8) { Debug.WriteLine("Color frame in unexpected format."); } else if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // Already in the correct format. result = SoftwareBitmap.Copy(inputBitmap); } else { // Convert to premultiplied alpha. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } break; case MediaFrameSourceKind.Depth: // We requested D16 from the MediaFrameReader, so the frame should // be in Gray16 format. if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth)); } else { Debug.WriteLine("Depth frame in unexpected format."); } break; case MediaFrameSourceKind.Infrared: // We requested L8 or L16 from the MediaFrameReader, so the frame should // be in Gray8 or Gray16 format. switch (inputBitmap.BitmapPixelFormat) { case BitmapPixelFormat.Gray16: // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); break; case BitmapPixelFormat.Gray8: // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); break; default: Debug.WriteLine("Infrared frame in unexpected format."); break; } break; } } } return(result); }
private void FilterGrid_SelectionChanged(object sender, SelectionChangedEventArgs e) { if (FilterGrid.SelectedItem is FilterItem Item) { if (FilterImage != null) { FilterImage.Dispose(); FilterImage = null; FilterBackupImage.Dispose(); FilterBackupImage = null; } AlphaSlider.ValueChanged -= AlphaSlider_ValueChanged; BetaSlider.ValueChanged -= BetaSlider_ValueChanged; AlphaSlider.Value = 1; BetaSlider.Value = 0; AlphaSlider.ValueChanged += AlphaSlider_ValueChanged; BetaSlider.ValueChanged += BetaSlider_ValueChanged; switch (Item.Type) { case FilterType.Origin: { WriteableBitmap WBitmap = new WriteableBitmap(OriginImage.PixelWidth, OriginImage.PixelHeight); OriginImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; break; } case FilterType.Invert: { using (SoftwareBitmap InvertImage = ComputerVisionProvider.InvertEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(InvertImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(InvertImage.PixelWidth, InvertImage.PixelHeight); InvertImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.Gray: { using (SoftwareBitmap GrayImage = ComputerVisionProvider.GrayEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(GrayImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(GrayImage.PixelWidth, GrayImage.PixelHeight); GrayImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.Threshold: { using (SoftwareBitmap ThresholdImage = ComputerVisionProvider.ThresholdEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(ThresholdImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(ThresholdImage.PixelWidth, ThresholdImage.PixelHeight); ThresholdImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.Sketch: { using (SoftwareBitmap SketchImage = ComputerVisionProvider.SketchEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(SketchImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(SketchImage.PixelWidth, SketchImage.PixelHeight); SketchImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.GaussianBlur: { using (SoftwareBitmap GaussianBlurImage = ComputerVisionProvider.GaussianBlurEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(GaussianBlurImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(GaussianBlurImage.PixelWidth, GaussianBlurImage.PixelHeight); GaussianBlurImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.Sepia: { using (SoftwareBitmap SepiaImage = ComputerVisionProvider.SepiaEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(SepiaImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(SepiaImage.PixelWidth, SepiaImage.PixelHeight); SepiaImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.OilPainting: { using (SoftwareBitmap OilPaintingImage = ComputerVisionProvider.OilPaintingEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(OilPaintingImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(OilPaintingImage.PixelWidth, OilPaintingImage.PixelHeight); OilPaintingImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } case FilterType.Mosaic: { using (SoftwareBitmap MosaicImage = ComputerVisionProvider.MosaicEffect(OriginImage)) { FilterImage = SoftwareBitmap.Copy(MosaicImage); FilterBackupImage = SoftwareBitmap.Copy(FilterImage); WriteableBitmap WBitmap = new WriteableBitmap(MosaicImage.PixelWidth, MosaicImage.PixelHeight); MosaicImage.CopyToBuffer(WBitmap.PixelBuffer); Cropper.Source = WBitmap; } break; } } using (SoftwareBitmap Histogram = ComputerVisionProvider.CalculateHistogram(Item.Type == FilterType.Origin ? OriginImage : FilterImage)) { WriteableBitmap HBitmap = new WriteableBitmap(Histogram.PixelWidth, Histogram.PixelHeight); Histogram.CopyToBuffer(HBitmap.PixelBuffer); HistogramImage.Source = HBitmap; } ResetButton.IsEnabled = true; } }
private async void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var mediaFrameReference = sender.TryAcquireLatestFrame(); if (mediaFrameReference != null) { SoftwareBitmap openCVInputBitmap = null; var inputBitmap = mediaFrameReference.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { //The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { openCVInputBitmap = SoftwareBitmap.Copy(inputBitmap); } else { openCVInputBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } SoftwareBitmap openCVOutputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, openCVInputBitmap.PixelWidth, openCVInputBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); if (frameSkip == 0) { frameSkip++; using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream); // Set the software bitmap encoder.SetSoftwareBitmap(inputBitmap); await encoder.FlushAsync(); var bytes = new byte[stream.Size]; await stream.ReadAsync(bytes.AsBuffer(), (uint)stream.Size, InputStreamOptions.None); var client = new HttpClient(); var queryString = HttpUtility.ParseQueryString(string.Empty); // Request headers client.DefaultRequestHeaders.Add("Prediction-Key", ""); client.DefaultRequestHeaders.Add("Prediction-key", "3ac99d01a5ed4ea7a155b8fdc688a5fa"); var uri = "https://southcentralus.api.cognitive.microsoft.com/customvision/v2.0/Prediction/0c1a773e-bb3d-4c84-9749-99bee73cbe1e/image?" + queryString; HttpResponseMessage response; using (ByteArrayContent streamContent = new ByteArrayContent(bytes)) { try { response = await client.PostAsync(uri, streamContent); if (response.IsSuccessStatusCode) { try { var content = await response.Content.ReadAsStringAsync(); HttpClient client2 = new HttpClient(); Item item = new Item() { id = 1, json = content }; HttpRequestMessage request = new HttpRequestMessage(HttpMethod.Post, "https://luckycharms.azurewebsites.net/items"); request.Content = new StringContent(JsonConvert.SerializeObject(item), Encoding.UTF8, "application/json"); var response2 = await client2.SendAsync(request); if (!response2.IsSuccessStatusCode) { Debug.WriteLine(response2.ReasonPhrase); } await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { textElement.Text = content; }); } catch (Exception ex) { } } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } } } //using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream()) //{ // BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream); // // Set the software bitmap // encoder.SetSoftwareBitmap(openCVOutputBitmap); // await encoder.FlushAsync(); // var predictionEndpoint = new PredictionEndpoint() { ApiKey = "3ac99d01a5ed4ea7a155b8fdc688a5fa" }; // var result = await predictionEndpoint.PredictImageWithHttpMessagesAsync(new Guid("0c1a773e-bb3d-4c84-9749-99bee73cbe1e"), stream.AsStream()); // await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => // { // textElement.Text = JsonConvert.SerializeObject(result.Body); // }); //} } else { frameSkip++; if (frameSkip > 20) { frameSkip = 0; } } // operate on the image and render it //openCVHelper.Blur(openCVInputBitmap, openCVOutputBitmap); _frameRenderer.PresentSoftwareBitmap(openCVInputBitmap); } } }
async Task Worker() { Exception = null; InitialClearDone = false; try { while (Activated) { var action = WorkerAction.Set; SoftwareBitmap input = null; lock (SampleLock) { if (Sample != null) { if (SupportedBitmapPixelFormat == Sample.BitmapPixelFormat) { input = SoftwareBitmap.Copy(Sample); } else { input = SoftwareBitmap.Convert(Sample, SupportedBitmapPixelFormat.Value); } } Sample = null; } if (Paused) { action = WorkerAction.None; await Task.Delay(Timeout); } else { if (IsRegionSet && Stopwatch.Elapsed - SetEpoch > Timeout) { action = WorkerAction.Clear; } else if (input == null || !SetAllowed) { action = WorkerAction.None; } if (!InitialClearDone && input != null) { InitialClearDone = true; action = WorkerAction.Clear; } } switch (action) { case WorkerAction.Set: await SetAsync(input); break; case WorkerAction.Clear: try { await RegionsOfInterestControl.ClearRegionsAsync(); IsRegionSet = false; } catch (COMException e) when(e.HResult.Equals(unchecked ((int)0x80070016))) // ERROR_BAD_COMMAND { // TODO RegionOfInterest.Clear.Failed } break; case WorkerAction.None: break; default: throw new NotImplementedException(); } // await Task.Delay(Period); await Task.Yield(); } } catch (Exception e) { Exception = e; } finally { // RegionOfInterest.Stop } }