private void ColorReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var subscribers = FrameArrived; if (subscribers != null) { Task.Run(() => { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { Sensor.GetCoordinateMapper().UpdateFromColorFrame(frame.CoordinateSystem); var bitmap = frame.VideoMediaFrame.SoftwareBitmap; if (ReaderConfiguration.HasFlag(ReaderConfig.HalfResolution)) { var original = bitmap; bitmap = bitmap.Downsize(); original.Dispose(); } var colorArgs = new ColorFrameArrivedEventArgs( this, bitmap, new CameraIntrinsics(frame.VideoMediaFrame.CameraIntrinsics)); subscribers(this, colorArgs); frame.Dispose(); } }); } }
//<SnippetOpenCVFrameArrived> private void ColorFrameReader_FrameArrived_OpenCV(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var mediaFrameReference = sender.TryAcquireLatestFrame(); if (mediaFrameReference != null) { SoftwareBitmap openCVInputBitmap = null; var inputBitmap = mediaFrameReference.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { //The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { openCVInputBitmap = SoftwareBitmap.Copy(inputBitmap); } else { openCVInputBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } SoftwareBitmap openCVOutputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, openCVInputBitmap.PixelWidth, openCVInputBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); // operate on the image and render it openCVHelper.Blur(openCVInputBitmap, openCVOutputBitmap); _frameRenderer.PresentSoftwareBitmap(openCVOutputBitmap); } } }
/// <summary> /// Handles a frame arrived event and renders the frame to the screen. /// </summary> private void ColorFrameReader_FrameArrivedAsync(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { SoftwareBitmap originalBitmap = null; var inputBitmap = frame.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { // The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha // The frame reader as configured in this sample gives BGRA8 with straight alpha, so need to convert it originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); // Operate on the image in the manner chosen by the user. if (currentOperation == OperationType.FindChessboardCorners) { if (_currentChessParameters.isValid) { _helper.DrawChessboard(originalBitmap, outputBitmap, _currentChessParameters.chessX, _currentChessParameters.chessY, _currentChessParameters.squareSizeMeters, SavingDetectedCorners); } } // Display both the original bitmap and the processed bitmap. _previewRenderer.RenderFrame(originalBitmap); _outputRenderer.RenderFrame(outputBitmap); } Interlocked.Increment(ref _frameCount); } }
private void NewFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (!StreamFaultTimer.IsRunning) { StreamFaultTimer.Start(); } StreamFaultTimer.Restart(); MediaFrameReference LatestFrame = sender.TryAcquireLatestFrame(); if (LatestFrame != null) { VideoMediaFrame LatestVideoFrame = LatestFrame.VideoMediaFrame; if (LatestVideoFrame.SoftwareBitmap == null) { HandleFrame(Convert.Direct3dToSKImage(LatestVideoFrame.Direct3DSurface)); } else { HandleFrame(Convert.SoftwareBitmapToSKImage(LatestVideoFrame.SoftwareBitmap)); } if (LatestVideoFrame.Direct3DSurface != null) { LatestVideoFrame.Direct3DSurface.Dispose(); } if (LatestVideoFrame.SoftwareBitmap != null) { LatestVideoFrame.SoftwareBitmap.Dispose(); } LatestFrame.Dispose(); } else { } }
async void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (Interlocked.CompareExchange(ref this.processingFlag, 1, 0) == 0) { try { using (var frame = sender.TryAcquireLatestFrame()) using (var videoFrame = frame.VideoMediaFrame?.GetVideoFrame()) { if (videoFrame != null) { // If there is a frame, set it as input to the model ONNXModelInput input = new ONNXModelInput(); input.data = videoFrame; // Evaluate the input data var evalOutput = await model.EvaluateAsync(input); // Do something with the model output await this.ProcessOutputAsync(evalOutput); } } } finally { Interlocked.Exchange(ref this.processingFlag, 0); } } }
protected async void FrameReaderOnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { try { //MediaFrameReference mediaFrameReference = sender.TryAcquireLatestFrame( ); //if (mediaFrameReference != null) //{ // var imageDataFrame = mediaFrameReference.BufferMediaFrame; // var imageDataBuffer = imageDataFrame.Buffer; // var imageData = imageDataBuffer.ToArray( ); // mediaFrameReference.Dispose( ); //} using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { frameQueue.Add(frame.VideoMediaFrame?.SoftwareBitmap); } } //frameReader?.Dispose( ); } catch (Exception e) { await frameReader.StopAsync( ); } }
private void HandleFrameArrive(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { SoftwareBitmap originalBitmap = null; var inputBitmap = frame.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied); _helper.Process(originalBitmap, outputBitmap); var localBitmap = SoftwareBitmap.Copy(outputBitmap); GetOCRAsync(localBitmap); LatestPreviewFrame = SoftwareBitmap.Copy(originalBitmap); LatestProcessedFrame = SoftwareBitmap.Copy(outputBitmap); OnFrameProcessed(new FrameHandlerEventArgs()); } } }
void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { MediaFrameReference frame = sender.TryAcquireLatestFrame(); if (frame != null) { Interlocked.Exchange(ref m_latestFrame, frame); } }
void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { MediaFrameReference frame = sender.TryAcquireLatestFrame(); if (frame != null && frame.CoordinateSystem != null) { SetFrame(frame); } }
/// <summary> /// あとで実装するかも。 /// </summary> private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { } } }
/// <summary> /// Video Capture: Get camera frame and feed as model input. /// Implementation is from the UWP official tutorial. /// https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader /// </summary> private void ColorFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var mediaFrameReference = sender.TryAcquireLatestFrame(); var videoMediaFrame = mediaFrameReference?.VideoMediaFrame; if (videoMediaFrame != null) { if (videoMediaFrame.CameraIntrinsics != null) { cameraFocalLength = videoMediaFrame.CameraIntrinsics.FocalLength; System.Diagnostics.Debug.WriteLine("FocalLength: " + cameraFocalLength.X + " " + cameraFocalLength.Y); } } var softwareBitmap = videoMediaFrame?.SoftwareBitmap; if (softwareBitmap != null) { if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || softwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Premultiplied) { softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } // Swap the processed frame to _backBuffer and dispose of the unused image. softwareBitmap = Interlocked.Exchange(ref backBuffer, softwareBitmap); softwareBitmap?.Dispose(); // Changes to XAML ImageElement must happen on UI thread through Dispatcher var task = inputImage.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { // Don't let two copies of this task run at the same time. if (taskRunning) { return; } taskRunning = true; // Keep draining frames from the backbuffer until the backbuffer is empty. SoftwareBitmap latestBitmap; while ((latestBitmap = Interlocked.Exchange(ref backBuffer, null)) != null) { var img = new SoftwareBitmapSource(); await img.SetBitmapAsync(latestBitmap); inputImage.Source = img; // Detect face and facial landmarks UpdateImageInputData(latestBitmap); DetectFaces(); latestBitmap.Dispose(); } taskRunning = false; }); } mediaFrameReference?.Dispose(); }
//<SnippetProcessAudioFrame> private void MediaFrameReader_AudioFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { using (MediaFrameReference reference = sender.TryAcquireLatestFrame()) { if (reference != null) { ProcessAudioFrame(reference.AudioMediaFrame); } } }
private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var reference = sender.TryAcquireLatestFrame(); lock (TransformLock) { if (reference.Properties.TryGetValue(InteropStatics.MFSampleExtensionSpatialCameraCoordinateSystem, out object coordinateSystem)) { CoordinateSystem = coordinateSystem as SpatialCoordinateSystem; } else { return; } var newViewMatrix = (reference.Properties[InteropStatics.MFSampleExtensionSpatialCameraViewTransform] as byte[]).To <Matrix4x4>(); ProjectionMatrix = (reference.Properties[InteropStatics.MFSampleExtensionSpatialCameraProjectionTransform] as byte[]).To <Matrix4x4>(); ProjectionMatrix.M33 = FarPlane / (NearPlane - FarPlane); ProjectionMatrix.M43 = NearPlane * FarPlane / (NearPlane - FarPlane); UpdateStability(newViewMatrix); LastViewMatrix = newViewMatrix; } if (AllowUnstableFrames || Stable) { ViewMatrix = LastViewMatrix; var surface = reference.VideoMediaFrame.Direct3DSurface; var surfaceInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess; IntPtr resourcePointer = surfaceInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource); Resource resource = SharpDX.CppObject.FromPointer <Resource>(resourcePointer); Marshal.Release(resourcePointer); Texture2D frameTexture = resource.QueryInterface <Texture2D>(); if (deviceTexture == null) { Texture2D texture = new Texture2D(frameTexture.Device, new Texture2DDescription() { Width = frameTexture.Description.Width, Height = frameTexture.Description.Height, MipLevels = 1, ArraySize = 1, Format = frameTexture.Description.Format, SampleDescription = frameTexture.Description.SampleDescription, Usage = frameTexture.Description.Usage, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.SharedKeyedmutex }); cameraTexture = frameTexture.Device.OpenSharedResource <Texture2D>(texture.QueryInterface <SharpDX.DXGI.Resource>().SharedHandle); deviceTexture = device.OpenSharedResource <Texture2D>(texture.QueryInterface <SharpDX.DXGI.Resource>().SharedHandle); Ready = true; } LockTexture(cameraTexture); frameTexture.Device.ImmediateContext.CopyResource(frameTexture, cameraTexture); UnlockTexture(cameraTexture); FrameUpdated(); } }
private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (!frameProcessingSemaphore.Wait(0)) { return; } try { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { this.frames[frame.SourceKind] = frame; } if (this.frames[MediaFrameSourceKind.Color] != null) { var colorDesc = this.frames[MediaFrameSourceKind.Color].VideoMediaFrame.SoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read).GetPlaneDescription(0); // get color information var bitmap = SoftwareBitmap.Convert(this.frames[MediaFrameSourceKind.Color].VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore); byte[] colorBytes = new byte[bitmap.PixelWidth * bitmap.PixelHeight * 4]; bitmap.CopyToBuffer(colorBytes.AsBuffer()); byte[] bgrColorBytes = new byte[colorDesc.Width * colorDesc.Height * 3]; Parallel.ForEach(System.Collections.Concurrent.Partitioner.Create(0, colorDesc.Height), (range) => { for (int i = range.Item1; i < range.Item2; ++i) { int srcIdx = i * colorDesc.Width * 4; int destIdx = i * colorDesc.Width * 3; for (int x = 0; x < colorDesc.Width; ++x) { Buffer.BlockCopy(colorBytes, srcIdx + x * 4, bgrColorBytes, destIdx + x * 3, 3); } } }); this.client.Publish("/" + this.nameSpace + "/stream/image/hd", bgrColorBytes); #if PRINT_STATUS_MESSAGE ++this.kinectFrameCount; #endif bitmap.Dispose(); this.frames[MediaFrameSourceKind.Color].Dispose(); this.frames[MediaFrameSourceKind.Color] = null; } } catch (Exception ex) { // TODO } finally { frameProcessingSemaphore.Release(); } }
//############################################################################################# /// <summary> /// The callback method called when a new frame is available. By default, try to capture the frame and display it in _imageElement if set. /// Note that parameters are not used. /// </summary> /// <param name="sender"> the object itself </param> /// <param name="args"> class MediaFrameArrivedEventArgs </param> private void FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { // run only if a display is set if (_imageElement != null) { // if the frame is available if (ReadFrame()) { AfterFrameArrived?.Invoke(); } } }
void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (!IsAnalyzingFrame && !IsNewFrameAvailable) { MediaFrameReference frame = sender.TryAcquireLatestFrame(); if (frame != null) { new Task(() => SetFrame(frame)).Start(); } } }
/// <summary> /// Handles a frame arrived event and renders the frame to the screen. /// </summary> private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { if (frame.SourceKind == MediaFrameSourceKind.Depth) { frameRenderer.ProcessFrame(frame); } } } }
/// <summary> /// A new frame from the camera is available /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private void _modelInputFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { Debug.WriteLine("_modelInputFrameReader_FrameArrived"); MediaFrameReference frame = null; if (_isProcessingFrames) { return; } // Do not attempt processing of more than 1 frame at a time _frameAquisitionLock.Wait(); { _isProcessingFrames = true; _CaptureFPS += 1; try { frame = sender.TryAcquireLatestFrame(); } catch (Exception ex) { Debug.WriteLine(ex.Message); NotifyUser(ex.Message, NotifyType.ErrorMessage); frame = null; } if ((frame != null) && (frame.VideoMediaFrame != null)) { VideoFrame vf = null; // Receive frames from the camera and transfer to system memory _perfStopwatch.Restart(); SoftwareBitmap softwareBitmap = frame.VideoMediaFrame.SoftwareBitmap; if (softwareBitmap == null) // frames are coming as Direct3DSurface { Debug.Assert(frame.VideoMediaFrame.Direct3DSurface != null); vf = VideoFrame.CreateWithDirect3D11Surface(frame.VideoMediaFrame.Direct3DSurface); } else { vf = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap); } EvaluateVideoFrameAsync(vf).ConfigureAwait(false).GetAwaiter().GetResult(); } Thread.Sleep(500); _isProcessingFrames = false; } _frameAquisitionLock.Release(); }
private void IrReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { try { using (var frame = sender.TryAcquireLatestFrame()) { var videoFrame = frame.VideoMediaFrame; ProcessVideoFrame(videoFrame, videoFrame.FrameReference.Duration); } } catch (Exception ex) { throw ex; } }
/// <summary> /// Handles a frame arrived event and renders the frame to the screen. /// </summary> private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { // TryAcquireLatestFrame will return the latest frame that has not yet been acquired. // This can return null if there is no such frame, or if the reader is not in the // "Started" state. The latter can occur if a FrameArrived event was in flight // when the reader was stopped. using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { var renderer = _frameRenderers[frame.SourceKind]; renderer.ProcessFrame(frame); } } }
void HandleFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (FrameSampleAcquired == null) { return; } using (var frameReference = _frameReader.TryAcquireLatestFrame()) //frameReference is a MediaFrameReference { if (frameReference != null) { var sample = new VideoCaptureSample(frameReference, worldOrigin); FrameSampleAcquired?.Invoke(sample); } } }
private async void ColorFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var mediaFrameReference = sender.TryAcquireLatestFrame(); var videoMediaFrame = mediaFrameReference?.VideoMediaFrame; var softwareBitmap = videoMediaFrame?.SoftwareBitmap; if (softwareBitmap != null) { if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || softwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Premultiplied) { softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } // Swap the processed frame to _backBuffer and dispose of the unused image. softwareBitmap = Interlocked.Exchange(ref backBuffer, softwareBitmap); softwareBitmap?.Dispose(); // Changes to XAML ImageElement must happen on UI thread through Dispatcher var task = imageElement.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () => { // Don't let two copies of this task run at the same time. if (taskRunning) { return; } taskRunning = true; // Keep draining frames from the backbuffer until the backbuffer is empty. SoftwareBitmap latestBitmap; while ((latestBitmap = Interlocked.Exchange(ref backBuffer, null)) != null) { var imageSource = (SoftwareBitmapSource)imageElement.Source; await imageSource.SetBitmapAsync(latestBitmap); latestBitmap.Dispose(); //await Task.Delay(TimeSpan.FromMilliseconds(6)); } taskRunning = false; }); } mediaFrameReference.Dispose(); }
private async void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { string classification = string.Empty; // TryAcquireLatestFrame will return the latest frame that has not yet been acquired. // This can return null if there is no such frame, or if the reader is not in the // "Started" state. The latter can occur if a FrameArrived event was in flight // when the reader was stopped. using (var frame = sender.TryAcquireLatestFrame()) { if (frame != null) { var renderer = _frameRenderers[frame.SourceKind]; using (var inputBitmap = frame?.VideoMediaFrame.SoftwareBitmap) { if (inputBitmap != null) { _currentClassificationFrame++; var sourceKind = frame?.VideoMediaFrame.FrameReference.SourceKind; var depthScale = (float)frame?.VideoMediaFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; using (var buffer = inputBitmap.LockBuffer(Windows.Graphics.Imaging.BitmapBufferAccessMode.Read)) { renderer.ProcessFrameForPreview(inputBitmap, sourceKind, depthScale); if (_currentClassificationFrame >= ClassificationFrequencyInFrames) { _currentClassificationFrame = 0; int classificationId = await _analyzer.BeginProcessing(buffer, buffer.GetPlaneDescription(0).Stride, inputBitmap.PixelWidth, inputBitmap.PixelHeight); if (classificationId != -1) { if (approvedClassifiedImages.Contains <int>(classificationId)) { ClassifiedImage = classificationId; renderer.ProcessFrame(inputBitmap, sourceKind, depthScale); } } } } } } } } }
private void ColorFrameReader_FrameArrivedAsync(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (Enable && CaptureCD.isTimeout()) { var frame = sender.TryAcquireLatestFrame(); if (frame != null) { var inputBitmap = frame.VideoMediaFrame?.SoftwareBitmap; if (inputBitmap != null) { OnReceiveFrame?.Invoke(inputBitmap); } //inputBitmap.Dispose(); } CaptureCD.Reset(); } }
private void BodyFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var subscribers = FrameArrived; if (subscribers != null) { var frame = sender.TryAcquireLatestFrame(); if (frame != null && frame.BufferMediaFrame?.Buffer != null) { var coordinateMapper = frame.CoordinateSystem; var bodyArgs = new BodyFrameArrivedEventArgs(this, BodyFrame.Parse(frame)); frame.Dispose(); subscribers(this, bodyArgs); } } }
/// <summary> /// Function to handle the frame when it arrives from FrameReader /// and send it back to registered new frame handler if it is valid. /// </summary> /// <param name="FrameReader"></param> /// <param name="args"></param> private void FrameArrivedHandler(MediaFrameReader FrameReader, MediaFrameArrivedEventArgs args) { using (var frame = FrameReader.TryAcquireLatestFrame()) { if (frame == null) { return; } var vmf = frame.VideoMediaFrame; var videoFrame = vmf.GetVideoFrame(); if (videoFrame != null) { m_frameHandler(videoFrame); } frame.Dispose(); } }
/// <summary> /// Handles the event of frame arrived from Frame Reader /// </summary> /// <param name="sender"></param> /// <param name="e"></param> /// <returns></returns> private static void FrameArrivedHandler(MediaFrameReader sender, MediaFrameArrivedEventArgs e) { using (var frame = sender.TryAcquireLatestFrame()) { if (frame == null) { return; } var vmf = frame.VideoMediaFrame; var videoFrame = vmf.GetVideoFrame(); Task.Run(async() => { await EvaluateFrameAsync(videoFrame); }).Wait(); videoFrame.Dispose(); } }
private void FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { var reader = sender.TryAcquireLatestFrame(); if (reader != null) { var videoMediaFrame = reader.VideoMediaFrame; var softwareBitmap = videoMediaFrame.SoftwareBitmap; softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); var task = ImageView.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { SoftwareBitmapSource softwareBitmapSource = new SoftwareBitmapSource(); await softwareBitmapSource.SetBitmapAsync(softwareBitmap); ImageView.Source = softwareBitmapSource; }); reader.Dispose(); } }
/// <summary> /// 新しいフレームを取得したときのハンドラ /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private void HandleFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { // プラグイン外からイベントハンドラが設定されていない場合は何もしない if (OnFrameArrived == null) { return; } // 最新のフレームを取得 using (var frame = _frameReader.TryAcquireLatestFrame()) { if (frame != null) { // SoftwareBitmapとして保持し、サブスクライバにはBitmapのサイズを通知 _bitmap = frame.VideoMediaFrame.SoftwareBitmap; OnFrameArrived?.Invoke(4 * _bitmap.PixelHeight * _bitmap.PixelWidth); } } }
// https://github.com/MarekKowalski/HoloFace/blob/master/HoloFace/Assets/HololensCameraUWP.cs private void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { using (var frame = sender.TryAcquireLatestFrame()) { // https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera var coordinateSystem = frame?.CoordinateSystem; var cameraIntrinsics = frame?.VideoMediaFrame?.CameraIntrinsics; var ht = coordinateSystem.TryGetTransformTo(originalFrameOfReference.CoordinateSystem); Matrix4 webcamToWorldMatrix = new Matrix4( ht?.M11 ?? 1, ht?.M21 ?? 0, ht?.M31 ?? 0, ht?.Translation.X ?? 0, ht?.M12 ?? 0, ht?.M22 ?? 1, ht?.M32 ?? 0, ht?.Translation.Y ?? 0, -ht?.M13 ?? 0, -ht?.M23 ?? 0, -ht?.M33 ?? 1, -ht?.Translation.Z ?? 0, 0, 0, 0, 1); using (var bitmap = frame?.VideoMediaFrame?.SoftwareBitmap) { if (bitmap == null) { return; } Width = bitmap.PixelWidth; Height = bitmap.PixelHeight; var projectionMatrix = new Matrix4(); projectionMatrix.M11 = 2 * cameraIntrinsics.FocalLength.X / Width; projectionMatrix.M22 = 2 * cameraIntrinsics.FocalLength.Y / Height; projectionMatrix.M13 = -2 * (cameraIntrinsics.PrincipalPoint.X - Width / 2) / Width; projectionMatrix.M23 = 2 * (cameraIntrinsics.PrincipalPoint.Y - Height / 2) / Height; projectionMatrix.M33 = -1; projectionMatrix.M44 = -1; ProjectionMatrix = projectionMatrix; var copy = SoftwareBitmap.Copy(bitmap); FrameReady?.Invoke(new FrameData() { bitmap = copy, webcamToWorldMatrix = webcamToWorldMatrix, projectionMatrix = projectionMatrix }); } } }
/// <summary> /// Handles the frame arrived event by converting the frame to a displayable /// format and rendering it to the screen. /// </summary> private void Reader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { // TryAcquireLatestFrame will return the latest frame that has not yet been acquired. // This can return null if there is no such frame, or if the reader is not in the // "Started" state. The latter can occur if a FrameArrived event was in flight // when the reader was stopped. using (var frame = sender.TryAcquireLatestFrame()) { _frameRenderer.ProcessFrame(frame); } }