//--------------------------------------------------------Set-, Get- Methods:---------------------------------------------------------\\ #region --Set-, Get- Methods-- public void GetSoftwareBitmap(ref SoftwareBitmap softwareBitmap) { MediaFrameReference frameRef = frameReader.TryAcquireLatestFrame(); VideoMediaFrame frame = frameRef?.VideoMediaFrame; softwareBitmap = frame?.SoftwareBitmap; }
public static unsafe SoftwareBitmap ConvertToImageAsync(VideoMediaFrame input) { if (input != null) { var inputBitmap = input.SoftwareBitmap; var surface = input.Direct3DSurface; try { if (surface != null) { inputBitmap = SoftwareBitmap.CreateCopyFromSurfaceAsync(surface, BitmapAlphaMode.Ignore).AsTask().GetAwaiter().GetResult(); } if (inputBitmap != null) { return(SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore)); } } finally { inputBitmap?.Dispose(); surface?.Dispose(); } } return(null); }
internal ExampleMediaFrame(VideoMediaFrame frame) { IsIlluminated = frame.InfraredMediaFrame?.IsIlluminated; SourceKind = frame.FrameReference.SourceKind; SystemRelativeTime = frame.FrameReference.SystemRelativeTime; SoftwareBitmap = SoftwareBitmap.Copy(frame.SoftwareBitmap); }
private async void FrameReader_FrameArrived(MultiSourceMediaFrameReader sender, MultiSourceMediaFrameArrivedEventArgs args) { if (capturing) { capturing = false; using (MultiSourceMediaFrameReference muxedFrameRef = sender.TryAcquireLatestFrame()) using (MediaFrameReference colorFrameRef = muxedFrameRef.TryGetFrameReferenceBySourceId(_colorSourceId)) using (MediaFrameReference depthFrameRef = muxedFrameRef.TryGetFrameReferenceBySourceId(_depthSourceId)) { _frameReceived.Set(); // do something with the frames VideoMediaFrame colorFrame = colorFrameRef.VideoMediaFrame; VideoMediaFrame depthFrame = depthFrameRef.VideoMediaFrame; SoftwareBitmap colorBitmap = colorFrame?.SoftwareBitmap; SoftwareBitmap depthBitmap = depthFrame?.SoftwareBitmap; StorageFolder storageFolder = ApplicationData.Current.LocalFolder; StorageFile outputFile = await storageFolder.CreateFileAsync("image.png", CreationCollisionOption.ReplaceExisting); SaveSoftwareBitmapToFile(colorBitmap, outputFile); colorBitmap.Dispose(); depthBitmap.Dispose(); } } }
private void NewFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (!StreamFaultTimer.IsRunning) { StreamFaultTimer.Start(); } StreamFaultTimer.Restart(); MediaFrameReference LatestFrame = sender.TryAcquireLatestFrame(); if (LatestFrame != null) { VideoMediaFrame LatestVideoFrame = LatestFrame.VideoMediaFrame; if (LatestVideoFrame.SoftwareBitmap == null) { HandleFrame(Convert.Direct3dToSKImage(LatestVideoFrame.Direct3DSurface)); } else { HandleFrame(Convert.SoftwareBitmapToSKImage(LatestVideoFrame.SoftwareBitmap)); } if (LatestVideoFrame.Direct3DSurface != null) { LatestVideoFrame.Direct3DSurface.Dispose(); } if (LatestVideoFrame.SoftwareBitmap != null) { LatestVideoFrame.SoftwareBitmap.Dispose(); } LatestFrame.Dispose(); } else { } }
public override SoftwareBitmap ConvertFrame(VideoMediaFrame frame) { try { // XAML requires Bgra8 with premultiplied alpha. return(SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied)); } catch (ObjectDisposedException) { } return(null); }
public override SoftwareBitmap ConvertFrame(VideoMediaFrame frame) { try { var input = SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); return(ApplyFilter(input)); } catch (ObjectDisposedException) { } return(null); }
public void ProcessFrame() { MediaFrameReference frame = videoFrameProcessor.GetLatestFrame(); VideoMediaFrame videoMediaFrame = frame?.VideoMediaFrame; if (videoMediaFrame == null) { return; } // Validate that the incoming frame format is compatible with the FaceTracker bool isBitmapPixelFormatSupported = videoMediaFrame.SoftwareBitmap != null && FaceTracker.IsBitmapPixelFormatSupported(videoMediaFrame.SoftwareBitmap.BitmapPixelFormat); if (!isBitmapPixelFormatSupported) { return; } // Ask the FaceTracker to process this frame asynchronously IAsyncOperation <IList <DetectedFace> > processFrameTask = faceTracker.ProcessNextFrameAsync(videoMediaFrame.GetVideoFrame()); try { IList <DetectedFace> faces = processFrameTask.GetResults(); lock (@lock) { if (faces.Count == 0) { ++numFramesWithoutFaces; // The FaceTracker might lose track of faces for a few frames, for example, // if the person momentarily turns their head away from the videoFrameProcessor. To smooth out // the tracking, we allow 30 video frames (~1 second) without faces before // we say that we're no longer tracking any faces. if (numFramesWithoutFaces > 30 && latestFaces.Any()) { latestFaces.Clear(); } } else { numFramesWithoutFaces = 0; latestFaces.Clear(); foreach (var face in faces) { latestFaces.Add(face.FaceBox); } } } } catch (Exception e) { // The task might be cancelled if the FaceAnalysis failed. Debug.LogException(e); } }
public override SoftwareBitmap ConvertFrame(VideoMediaFrame frame) { try { var bitmap = SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); BoostGreen(bitmap); return(bitmap); } catch (ObjectDisposedException) { } return(null); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // SoftwareBitmap is already in the correct format for an Image control, so just return a copy. result = SoftwareBitmap.Copy(inputBitmap); } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { if (inputFrame.FrameReference.SourceKind == MediaFrameSourceKind.Depth) { // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale)); } else { // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); } } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8) { // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); } else { try { // Convert to Bgra8 Premultiplied SoftwareBitmap, so xaml can display in UI. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } catch (ArgumentException exception) { // Conversion of software bitmap format is not supported. Drop this frame. System.Diagnostics.Debug.WriteLine(exception.Message); } } } } return(result); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // SoftwareBitmap is already in the correct format for an Image control, so just return a copy. result = SoftwareBitmap.Copy(inputBitmap); } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { string subtype = inputFrame.VideoFormat.MediaFrameFormat.Subtype; if (string.Equals(subtype, "D16", StringComparison.OrdinalIgnoreCase)) { // Use a special pseudo color to render 16 bits depth frame. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorForDepth); } else { // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); } } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8) { // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); } else { try { // Convert to Bgra8 Premultiplied SoftwareBitmap, so xaml can display in UI. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } catch (ArgumentException exception) { // Conversion of software bitmap format is not supported. Drop this frame. System.Diagnostics.Debug.WriteLine(exception.Message); } } } } return(result); }
async Task <byte[]> GetFrameData(MediaFrameReference frame) { byte[] bytes = null; if (frame == null) { return(bytes); } VideoMediaFrame videoMediaFrame = frame.VideoMediaFrame; if (videoMediaFrame == null) { return(bytes); } VideoFrame videoFrame = videoMediaFrame.GetVideoFrame(); SoftwareBitmap softwareBitmap = videoFrame.SoftwareBitmap; if (softwareBitmap == null) { return(bytes); } SoftwareBitmap bitmapBGRA8 = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore); using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); // Set the software bitmap encoder.SetSoftwareBitmap(bitmapBGRA8); encoder.IsThumbnailGenerated = false; try { await encoder.FlushAsync(); bytes = new byte[stream.Size]; await stream.AsStream().ReadAsync(bytes, 0, bytes.Length); } catch (Exception e) { Debug.WriteLine($"Error while trying to encode frame into a byte array, expceiton {e.Message}"); } } return(bytes); }
static unsafe SoftwareBitmap ConvertDepthFrame(VideoMediaFrame inputFrame, SoftwareBitmap inputBitmap) { // We requested D16 from the MediaFrameReader, so the frame should be in Gray16 format. if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Gray16) { return(null); } // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; return(TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth))); }
private void ProcessVideoFrame(VideoMediaFrame videoMediaframe, TimeSpan timeStamp) { try { ImageFrame imageFrame = new ImageFrame(); imageFrame.FrameIllumination = GetLuminanceInfomation(videoMediaframe); imageFrame.Timestamp = (UInt64)timeStamp.Ticks; imageFrame.OriginalBitmap = new SoftwareBitmap(videoMediaframe.SoftwareBitmap.BitmapPixelFormat, (int)videoMediaframe.VideoFormat.Width, (int)videoMediaframe.VideoFormat.Height); videoMediaframe.SoftwareBitmap.CopyTo(imageFrame.OriginalBitmap); _frameQueue.Enqueue(imageFrame); if (_frameQueue.Count > MaxQueueCount) { _frameQueue.Dequeue().Dispose(); } _ImageAutoEvent.Set(); } catch (Exception ex) { throw ex; } }
private LuminanceInfo GetLuminanceInfomation(VideoMediaFrame videoFrame) { LuminanceInfo illuminationInfo = LuminanceInfo.AmbientSubtractionEnabled; try { if (videoFrame.InfraredMediaFrame.IsIlluminated) { illuminationInfo = LuminanceInfo.Light; } else { illuminationInfo = LuminanceInfo.Dark; } } catch (Exception ex) { throw ex; } return(illuminationInfo); }
unsafe private void ProcessVideoFrame(VideoMediaFrame videoMediaFrame) { float focalX = videoMediaFrame.CameraIntrinsics.FocalLength.X; float focalY = videoMediaFrame.CameraIntrinsics.FocalLength.Y; uint imageWidth = videoMediaFrame.CameraIntrinsics.ImageWidth; uint imageHeight = videoMediaFrame.CameraIntrinsics.ImageHeight; SoftwareBitmap softwareBitmap = videoMediaFrame.SoftwareBitmap; if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Nv12) { softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Nv12); } BitmapBuffer bitmapBuffer = softwareBitmap.LockBuffer(BitmapBufferAccessMode.Read); IMemoryBufferReference reference = bitmapBuffer.CreateReference(); byte[] buf = new byte[4 * 4 + imageWidth * imageHeight * 3 / 2]; fixed(byte *pBufByte = buf) { float *pBufFloat = (float *)pBufByte; uint * pBufUint = (uint *)pBufByte; byte * dataInBytes; uint capacityInBytes; ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes); pBufFloat[0] = focalX; pBufFloat[1] = focalY; pBufUint[2] = imageWidth; pBufUint[3] = imageHeight; for (uint i = 0; i < imageWidth * imageHeight * 3 / 2; i++) { pBufByte[4 * 4 + i] = dataInBytes[i]; } } videoStream.WriteAsync(buf.AsBuffer()); }
private CameraFrameInternal GetFrameFromMediaFrameReader(MediaFrameReader frameReader) { // get the latest frame MediaFrameReference frameReference = frameReader?.TryAcquireLatestFrame(); VideoMediaFrame videoFrame = frameReference?.VideoMediaFrame; SoftwareBitmap frameBmp = videoFrame?.SoftwareBitmap; CameraFrameInternal frame = null; if (frameBmp != null) { // get a camera frame and populate with the correct data for this frame - acquire copies the bitmap to the frame frame = framePool.AcquireFrame(frameBmp, desiredPixelFormat); frame.PixelFormat = desiredPixelFormat; frame.Resolution = Resolution; frame.FrameTime = frameReference.SystemRelativeTime.HasValue ? frameReference.SystemRelativeTime.Value.TotalSeconds : 0.0; frame.Exposure = frameReference.Duration.TotalSeconds; frame.Gain = Gain; if (KeepSoftwareBitmap) { frame.SoftwareBitmap = frameBmp; } else { frameBmp.Dispose(); } // extrinsics and intrinsics frame.Extrinsics = GetExtrinsics(frameReference.CoordinateSystem); frame.Intrinsics = ConvertIntrinsics(frameReference.VideoMediaFrame.CameraIntrinsics); } frameReference?.Dispose(); return(frame); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static async Task <SoftwareBitmap> ConvertToDisplayableImageAsync(VideoMediaFrame inputFrame) { Debug.Assert(inputFrame != null); var inputBitmap = inputFrame.SoftwareBitmap; Debug.Assert(inputBitmap != null); SoftwareBitmap result = null; await Task.Run(() => { try { switch (inputFrame.FrameReference.SourceKind) { case MediaFrameSourceKind.Color: result = ConvertColorFrame(inputBitmap); break; case MediaFrameSourceKind.Depth: result = ConvertDepthFrame(inputFrame, inputBitmap); break; case MediaFrameSourceKind.Infrared: result = ConvertInfraredFrame(inputBitmap); break; } } catch (ObjectDisposedException) { } inputBitmap.Dispose(); }); Debug.Assert(result != null); return(result); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { switch (inputFrame.FrameReference.SourceKind) { case MediaFrameSourceKind.Color: // XAML requires Bgra8 with premultiplied alpha. // We requested Bgra8 from the MediaFrameReader, so all that's // left is fixing the alpha channel if necessary. if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8) { Debug.WriteLine("Color frame in unexpected format."); } else if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // Already in the correct format. result = SoftwareBitmap.Copy(inputBitmap); } else { // Convert to premultiplied alpha. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } break; case MediaFrameSourceKind.Depth: // We requested D16 from the MediaFrameReader, so the frame should // be in Gray16 format. if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth)); } else { Debug.WriteLine("Depth frame in unexpected format."); } break; case MediaFrameSourceKind.Infrared: // We requested L8 or L16 from the MediaFrameReader, so the frame should // be in Gray8 or Gray16 format. switch (inputBitmap.BitmapPixelFormat) { case BitmapPixelFormat.Gray16: // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); break; case BitmapPixelFormat.Gray8: // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); break; default: Debug.WriteLine("Infrared frame in unexpected format."); break; } break; } } } return(result); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 && inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { // SoftwareBitmap is already in the correct format for an Image control, so just return a copy. result = SoftwareBitmap.Copy(inputBitmap); } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { if (inputFrame.FrameReference.SourceKind == MediaFrameSourceKind.Depth) { // Use a special pseudo color to render 16 bits depth frame. var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale)); } else { // Use pseudo color to render 16 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); } } else if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8) { // Use pseudo color to render 8 bits frames. result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); } else { try { // Convert to Bgra8 Premultiplied SoftwareBitmap, so xaml can display in UI. result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } catch (ArgumentException exception) { // Conversion of software bitmap format is not supported. Drop this frame. System.Diagnostics.Debug.WriteLine(exception.Message); } } } } return result; }
public async Task EvaluateVideoFrameAsync(VideoFrame frame, VideoMediaFrame VideoFrame, SpatialCoordinateSystem worldCoordinateSystem, SpatialCoordinateSystem cameraCoordinateSystem) // <-- 2 { if (frame != null) { try { TimeRecorder.Restart(); // A matrix to transform camera coordinate system to world coordinate system Matrix4x4 cameraToWorld = (Matrix4x4)cameraCoordinateSystem.TryGetTransformTo(worldCoordinateSystem); // Internal orientation of camera CameraIntrinsics cameraIntrinsics = VideoFrame.CameraIntrinsics; // The frame of depth camera DepthMediaFrame depthFrame = VideoFrame.DepthMediaFrame; // not working, cause error // DepthCorrelatedCoordinateMapper depthFrameMapper = depthFrame.TryCreateCoordinateMapper(cameraIntrinsics, cameraCoordinateSystem); ONNXModelInput inputData = new ONNXModelInput(); inputData.Data = frame; var output = await Model.EvaluateAsync(inputData).ConfigureAwait(false); // <-- 3 TimeRecorder.Stop(); string timeStamp = $"({DateTime.Now})"; // $" Evaluation took {TimeRecorder.ElapsedMilliseconds}ms\n"; int count = 0; foreach (var prediction in output) { var product = prediction.TagName; // <-- 4 var loss = prediction.Probability; // <-- 5 if (loss > 0.5f) { float left = prediction.BoundingBox.Left; float top = prediction.BoundingBox.Top; float right = prediction.BoundingBox.Left + prediction.BoundingBox.Width; float bottom = prediction.BoundingBox.Top + prediction.BoundingBox.Height; float x = prediction.BoundingBox.Left + prediction.BoundingBox.Width / 2; float y = prediction.BoundingBox.Top + prediction.BoundingBox.Height / 2; Direct3DSurfaceDescription pixelData = frame.Direct3DSurface.Description; int height = pixelData.Height; int width = pixelData.Width; Vector3 ImageToWorld(float X, float Y) { // remove image distortion // Point objectCenterPoint = cameraIntrinsics.UndistortPoint(new Point(x, y)); // screen space -> camera space // unproject pixel coordinate of object center towards a plane that is one meter from the camera Vector2 objectCenter = cameraIntrinsics.UnprojectAtUnitDepth(new Point(X * width, Y * height)); // construct a ray towards object Vector3 vectorTowardsObject = Vector3.Normalize(new Vector3(objectCenter.X, objectCenter.Y, -1.0f)); // estimate the vending machine distance by its width // less accurate than use depth frame // magic number 940 pixels in width for an average vending machine at 2m // float estimatedVendingMachineDepth = (0.94f / prediction.BoundingBox.Width) * 2; float estimatedVendingMachineDepth = (0.3f / prediction.BoundingBox.Width) * 1; // times the vector towards object by the distance to get object's vector in camera coordinate system Vector3 vectorToObject = vectorTowardsObject * estimatedVendingMachineDepth; // camera space -> world space // tranform the object postion from camera coordinate system to world coordinate system Vector3 targetPositionInWorldSpace = Vector3.Transform(vectorToObject, cameraToWorld); return(targetPositionInWorldSpace); } Vector3 objectCenterInWorld = ImageToWorld(x, y); Vector3 objectTopLeft = ImageToWorld(left, top); Vector3 objectTopRight = ImageToWorld(right, top); Vector3 objectBotLeft = ImageToWorld(left, bottom); float widthInWorld = Vector3.Distance(objectTopLeft, objectTopRight); float heightInWorld = widthInWorld / (width * prediction.BoundingBox.Width) * (height * prediction.BoundingBox.Height); var lossStr = (loss * 100.0f).ToString("#0.00") + "%"; // lossStr = $"{prediction.BoundingBox.Width*width}X{prediction.BoundingBox.Height*height}"; UnityApp.StoreNetworkResult(timeStamp, product, lossStr, objectCenterInWorld.X, objectCenterInWorld.Y, objectCenterInWorld.Z, widthInWorld, heightInWorld); } } } catch (Exception ex) { var err_message = $"{ex.Message}"; ModifyText(err_message); } } }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { switch (inputFrame.FrameReference.SourceKind) { case MediaFrameSourceKind.Color: if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8) { Debug.WriteLine("Color frame in unexpected format."); } else if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) { result = SoftwareBitmap.Copy(inputBitmap); } else { result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } break; case MediaFrameSourceKind.Depth: if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) { var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth)); } else { Debug.WriteLine("Depth frame in unexpected format."); } break; case MediaFrameSourceKind.Infrared: switch (inputBitmap.BitmapPixelFormat) { case BitmapPixelFormat.Gray16: result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); break; case BitmapPixelFormat.Gray8: result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); break; default: Debug.WriteLine("Infrared frame in unexpected format."); break; } break; } } } return(result); }
/// <summary> /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control. /// </summary> /// <param name="inputFrame">Frame to convert.</param> public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame) { SoftwareBitmap result = null; using (var inputBitmap = inputFrame?.SoftwareBitmap) { if (inputBitmap != null) { var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth)); /* switch (inputFrame.FrameReference.SourceKind) * { * case MediaFrameSourceKind.Color: * // XAML requires Bgra8 with premultiplied alpha. * // We requested Bgra8 from the MediaFrameReader, so all that's * // left is fixing the alpha channel if necessary. * if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied) * { * // Already in the correct format. * result = SoftwareBitmap.Copy(inputBitmap); * } * else * { * // Convert to premultiplied alpha. * result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); * } * break; * * case MediaFrameSourceKind.Depth: * // We requested D16 from the MediaFrameReader, so the frame should * // be in Gray16 format. * if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16) * { * // Use a special pseudo color to render 16 bits depth frame. * var depthScale = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters; * var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth; * var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth; * result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth)); * } * * break; * * case MediaFrameSourceKind.Infrared: * // We requested L8 or L16 from the MediaFrameReader, so the frame should * // be in Gray8 or Gray16 format. * switch (inputBitmap.BitmapPixelFormat) * { * case BitmapPixelFormat.Gray16: * // Use pseudo color to render 16 bits frames. * result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared); * break; * * case BitmapPixelFormat.Gray8: * * // Use pseudo color to render 8 bits frames. * result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared); * break; * * default: * * break; * } * break; * } */ } } return(result); }
public abstract SoftwareBitmap ConvertFrame(VideoMediaFrame videoMediaFrame);