Esempio n. 1
0
        /// <summary>
        /// Returns a new VideoFrameSample as soon as the next one is available.
        /// This method is preferable to listening to the FrameSampleAcquired event
        /// in circumstances where most or all frames are not needed. For instance, if
        /// you were planning on sending frames to a remote image recognition service twice per second,
        /// you may consider using this method rather than ignoring most of the event dispatches from FrameSampleAcquired.
        /// This will avoid the overhead of acquiring and disposing of unused frames.
        ///
        /// If, for whatever reason, a frame reference cannot be obtained, it is possible that the callback will return a null sample.
        /// </summary>
        /// <param name="onFrameSampleAcquired"></param>
        public void RequestNextFrameSample(FrameSampleAcquiredCallback onFrameSampleAcquired)
        {
            if (onFrameSampleAcquired == null)
            {
                throw new ArgumentNullException("onFrameSampleAcquired");
            }

            if (IsStreaming == false)
            {
                throw new Exception("You cannot request a frame sample until the video mode is started.");
            }

            TypedEventHandler <MediaFrameReader, MediaFrameArrivedEventArgs> handler = null;

            handler = (MediaFrameReader sender, MediaFrameArrivedEventArgs args) =>
            {
                using (var frameReference = _frameReader.TryAcquireLatestFrame()) //frame: MediaFrameReference
                {
                    if (frameReference != null)
                    {
                        onFrameSampleAcquired.Invoke(new VideoCaptureSample(frameReference, worldOrigin));
                    }
                    else
                    {
                        onFrameSampleAcquired.Invoke(null);
                    }
                }
                _frameReader.FrameArrived -= handler;
            };
            _frameReader.FrameArrived += handler;
        }
        /// <summary>
        ///
        /// </summary>
        /// <param name="photoAction"></param>
        /// <returns></returns>
        public bool TakePhoto(Action <Matrix4x4, Matrix4x4, List <byte>, int, int> photoAction)
        {
#if WINDOWS_UWP
            using (MediaFrameReference frame = _depthReader.TryAcquireLatestFrame())
            {
                if (frame == null)
                {
                    return(false);
                }

                var videoFrame = frame.VideoMediaFrame;
                using (var bitmap = videoFrame.SoftwareBitmap)
                {
                    if (bitmap == null)
                    {
                        return(false);
                    }

                    int width  = bitmap.PixelWidth;
                    int height = bitmap.PixelHeight;

                    var bytes = new byte[width * height * 2];

                    bitmap.CopyToBuffer(bytes.AsBuffer());
                    photoAction?.Invoke(Matrix4x4.zero, Matrix4x4.zero, bytes.ToList(), height, width);
                    return(true);
                }
            }
#else
            return(false);
#endif
        }
Esempio n. 3
0
        private void HandleFrameArrive(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                SoftwareBitmap originalBitmap = null;
                var            inputBitmap    = frame.VideoMediaFrame?.SoftwareBitmap;
                if (inputBitmap != null)
                {
                    originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

                    SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied);

                    _helper.Process(originalBitmap, outputBitmap);

                    var localBitmap = SoftwareBitmap.Copy(outputBitmap);
                    GetOCRAsync(localBitmap);

                    LatestPreviewFrame   = SoftwareBitmap.Copy(originalBitmap);
                    LatestProcessedFrame = SoftwareBitmap.Copy(outputBitmap);
                    OnFrameProcessed(new FrameHandlerEventArgs());
                }
            }
        }
    private unsafe void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        // TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
        // This can return null if there is no such frame, or if the reader is not in the
        // "Started" state. The latter can occur if a FrameArrived event was in flight
        // when the reader was stopped.
        if (onFrameArrivedProcessing)
        {
            Debug.Log(TAG + " " + id + " OnFrameArrived() is still processing");
            return;
        }
        onFrameArrivedProcessing = true;
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                var softwareBitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);

                //using (var input = softwareBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                //using (var inputReference = input.CreateReference()) {
                //    byte* inputBytes;
                //    uint inputCapacity;
                //    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                //    downPtr = (IntPtr)inputBytes;
                //    Interlocked.Exchange(ref upPtr, downPtr);
                //}
                //Debug.Log(TAG + " OnFrameArrived() thread ID is " + Thread.CurrentThread.ManagedThreadId);

                Interlocked.Exchange(ref upBitmap, softwareBitmap);
            }
        }
        onFrameArrivedProcessing = false;
    }
        /// <summary>
        /// Starts the camera and evaluates video frames every <paramref name="PredictionFrequency"/>
        /// </summary>
        public void StartPullCameraFrames()
        {
            Cts = new CancellationTokenSource();
            Task.Run(async() => {
                while (!Cts.Token.IsCancellationRequested)
                {
                    FramesCaptured++;
                    await Task.Delay(PredictionFrequency);
                    using (var frameReference = CameraFrameReader.TryAcquireLatestFrame())
                        using (var videoFrame = frameReference?.VideoMediaFrame?.GetVideoFrame()) {
                            if (videoFrame == null)
                            {
                                continue; //ignoring frame
                            }

                            if (videoFrame.Direct3DSurface == null)
                            {
                                videoFrame.Dispose();
                                continue; //ignoring frame
                            }

                            try {
                                await ModelProcessor.EvaluateVideoFrameAsync(videoFrame).ConfigureAwait(false);
                            } catch (Exception ex) {
                                System.Diagnostics.Debug.WriteLine(ex.Message);
                            }
                        }
                }
            });
        }
Esempio n. 6
0
    public void StartPullFrames(MediaFrameReader sender)
    {
        Task.Run(async() =>
        {
            for (; ;)
            {
                if (bCaptureStart)
                {
                    bCaptureStart = false;

                    MediaFrameReference frameReference = sender.TryAcquireLatestFrame();
                    VideoFrame videoFrame = frameReference?.VideoMediaFrame?.GetVideoFrame();

                    #region 주 색상 추출
                    //SoftwareBitmap bitmap = videoFrame.SoftwareBitmap;
                    //byte[] bytes;
                    //WriteableBitmap newBitmap = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                    //bitmap.CopyToBuffer(newBitmap.PixelBuffer);
                    //using (Stream stream = newBitmap.PixelBuffer.AsStream())
                    //using (MemoryStream memoryStream = new MemoryStream())
                    //{
                    //    stream.CopyTo(memoryStream);
                    //    bytes = memoryStream.ToArray();
                    //}
                    //ImageCapture.Instance.ByteToTexture(bytes, bitmap.PixelWidth, bitmap.PixelHeight);
                    #endregion

                    if (videoFrame == null)
                    {
                        ImageCapture.Instance.iState = -3;
                        //Debug.Log("No VideoFrame");

                        bFinalizeStart = true;
                        continue;                         //ignoring frame
                    }

                    if (videoFrame.Direct3DSurface == null)
                    {
                        ImageCapture.Instance.iState = -3;
                        //Debug.Log("No D3DSurface");

                        bFinalizeStart = true;
                        continue;                         //ignoring frame
                    }

                    try
                    {
                        await LoadAndEvaluateModelAsync(videoFrame);
                    }

                    catch (Exception e)
                    {
                        //Debug.Log(e.Message);
                        bFinalizeStart = true;
                    }
                }
                await Task.Delay(predictEvery);
            }
        });
    }
        static async Task CameraProcessingAsync(Model model, MediaFrameReader reader, EventWaitHandle evtframe, AzureConnection azure)
        {
            var    fps_t0     = DateTime.Now;
            string prev_label = null;

            double fps = 0.0;

            for (UInt64 total_frame = 0, current_frame = 0; ; ++total_frame)
            {
                if (Model.Full)
                {
                    evtframe.WaitOne();
                    evtframe.Reset();
                }
                //auto input_feature{ImageFeatureValue::CreateFromVideoFrame(vf.get())};
                var frame = reader.TryAcquireLatestFrame();
                if (frame == null)
                {
                    // assume 60 fps, wait about half a frame for more input.
                    // in the unlikely event that eval is faster than capture this should be done differently
                    Thread.Sleep(10);
                    continue;
                }
                ++current_frame;
                //int oldFrame, double oldFps, DateTime oldFpsT0, string oldLabel)
                (current_frame, fps, fps_t0, prev_label) = await FrameProcessingAsync(model, frame, total_frame, current_frame, fps, fps_t0, prev_label, azure);
            }
        }
Esempio n. 8
0
        protected async void FrameReaderOnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            try
            {
                //MediaFrameReference mediaFrameReference = sender.TryAcquireLatestFrame( );

                //if (mediaFrameReference != null)
                //{
                //    var imageDataFrame = mediaFrameReference.BufferMediaFrame;
                //    var imageDataBuffer = imageDataFrame.Buffer;
                //    var imageData = imageDataBuffer.ToArray( );

                //    mediaFrameReference.Dispose( );
                //}

                using (var frame = sender.TryAcquireLatestFrame())
                {
                    if (frame != null)
                    {
                        frameQueue.Add(frame.VideoMediaFrame?.SoftwareBitmap);
                    }
                }


                //frameReader?.Dispose( );
            }

            catch (Exception e)
            {
                await frameReader.StopAsync( );
            }
        }
Esempio n. 9
0
        private void ColorReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var subscribers = FrameArrived;

            if (subscribers != null)
            {
                Task.Run(() =>
                {
                    var frame = sender.TryAcquireLatestFrame();
                    if (frame != null)
                    {
                        Sensor.GetCoordinateMapper().UpdateFromColorFrame(frame.CoordinateSystem);
                        var bitmap = frame.VideoMediaFrame.SoftwareBitmap;
                        if (ReaderConfiguration.HasFlag(ReaderConfig.HalfResolution))
                        {
                            var original = bitmap;
                            bitmap       = bitmap.Downsize();
                            original.Dispose();
                        }

                        var colorArgs =
                            new ColorFrameArrivedEventArgs(
                                this,
                                bitmap,
                                new CameraIntrinsics(frame.VideoMediaFrame.CameraIntrinsics));

                        subscribers(this, colorArgs);
                        frame.Dispose();
                    }
                });
            }
        }
Esempio n. 10
0
        /// <summary>
        /// Handles a frame arrived event and renders the frame to the screen.
        /// </summary>
        private void ColorFrameReader_FrameArrivedAsync(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                SoftwareBitmap originalBitmap = null;
                var            inputBitmap    = frame.VideoMediaFrame?.SoftwareBitmap;
                if (inputBitmap != null)
                {
                    // The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha
                    // The frame reader as configured in this sample gives BGRA8 with straight alpha, so need to convert it
                    originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

                    SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied);

                    // Operate on the image in the manner chosen by the user.
                    if (currentOperation == OperationType.FindChessboardCorners)
                    {
                        if (_currentChessParameters.isValid)
                        {
                            _helper.DrawChessboard(originalBitmap, outputBitmap, _currentChessParameters.chessX, _currentChessParameters.chessY, _currentChessParameters.squareSizeMeters, SavingDetectedCorners);
                        }
                    }

                    // Display both the original bitmap and the processed bitmap.
                    _previewRenderer.RenderFrame(originalBitmap);
                    _outputRenderer.RenderFrame(outputBitmap);
                }

                Interlocked.Increment(ref _frameCount);
            }
        }
Esempio n. 11
0
    /// <summary>
    /// The callback that is triggered when new video preview frame arrives. In this function,
    /// video frame is saved for Unity UI if videoPreview is enabled, tracking task is triggered
    /// in this function call, and video FPS is recorded. [internal use]
    /// </summary>
    /// <param name="sender">MediaFrameReader object</param>
    /// <param name="args">arguments not used here</param>
    private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        ARUWPUtils.VideoTick();
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                float[] cameraToWorldMatrixAsFloat;
                if (TryGetCameraToWorldMatrix(frame, out cameraToWorldMatrixAsFloat) == false)
                {
                    return;
                }

                Interlocked.Exchange(ref _cameraToWorldMatrix, cameraToWorldMatrixAsFloat);

                var originalSoftwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;
                var softwareBitmap         = SoftwareBitmap.Convert(originalSoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);
                originalSoftwareBitmap?.Dispose();
                if (videoPreview)
                {
                    Interlocked.Exchange(ref _bitmap, softwareBitmap);
                    controller.ProcessFrameAsync(SoftwareBitmap.Copy(softwareBitmap));
                }
                else
                {
                    controller.ProcessFrameAsync(softwareBitmap);
                }
                signalTrackingUpdated = true;
            }
        }
    }
Esempio n. 12
0
        private void NewFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            if (!StreamFaultTimer.IsRunning)
            {
                StreamFaultTimer.Start();
            }
            StreamFaultTimer.Restart();
            MediaFrameReference LatestFrame = sender.TryAcquireLatestFrame();

            if (LatestFrame != null)
            {
                VideoMediaFrame LatestVideoFrame = LatestFrame.VideoMediaFrame;
                if (LatestVideoFrame.SoftwareBitmap == null)
                {
                    HandleFrame(Convert.Direct3dToSKImage(LatestVideoFrame.Direct3DSurface));
                }
                else
                {
                    HandleFrame(Convert.SoftwareBitmapToSKImage(LatestVideoFrame.SoftwareBitmap));
                }
                if (LatestVideoFrame.Direct3DSurface != null)
                {
                    LatestVideoFrame.Direct3DSurface.Dispose();
                }
                if (LatestVideoFrame.SoftwareBitmap != null)
                {
                    LatestVideoFrame.SoftwareBitmap.Dispose();
                }
                LatestFrame.Dispose();
            }
            else
            {
            }
        }
Esempio n. 13
0
        async void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            if (Interlocked.CompareExchange(ref this.processingFlag, 1, 0) == 0)
            {
                try
                {
                    using (var frame = sender.TryAcquireLatestFrame())
                        using (var videoFrame = frame.VideoMediaFrame?.GetVideoFrame())
                        {
                            if (videoFrame != null)
                            {
                                // If there is a frame, set it as input to the model
                                ONNXModelInput input = new ONNXModelInput();
                                input.data = videoFrame;
                                // Evaluate the input data
                                var evalOutput = await model.EvaluateAsync(input);

                                // Do something with the model output
                                await this.ProcessOutputAsync(evalOutput);
                            }
                        }
                }
                finally
                {
                    Interlocked.Exchange(ref this.processingFlag, 0);
                }
            }
        }
        void OnFrameArrived(MediaFrameReader sender)
        {
            var frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                switch (frame.SourceKind)
                {
                case MediaFrameSourceKind.Custom:
                    this.ProcessCustomFrame(frame);
                    break;

                case MediaFrameSourceKind.Color:
                    this.ProcessColorFrame(frame);
                    break;

                case MediaFrameSourceKind.Infrared:
                    break;

                case MediaFrameSourceKind.Depth:
                    this.ProcessDepthFrame(frame);
                    break;

                default:
                    break;
                }
                frame.Dispose();
            }
        }
Esempio n. 15
0
    private void ColorFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        var mediaFrameReference = sender.TryAcquireLatestFrame();
        var videoMediaFrame     = mediaFrameReference?.VideoMediaFrame;
        var softwareBitmap      = videoMediaFrame?.SoftwareBitmap;

        if (softwareBitmap != null)
        {
            if (softwareBitmap.BitmapPixelFormat != Windows.Graphics.Imaging.BitmapPixelFormat.Bgra8 ||
                softwareBitmap.BitmapAlphaMode != Windows.Graphics.Imaging.BitmapAlphaMode.Premultiplied)
            {
                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
            }
            softwareBitmap.CopyToBuffer(buffer);
            buffer2 = buffer.ToArray();

            result = DecodeBufferToQRCode(buffer2, softwareBitmap.PixelWidth, softwareBitmap.PixelHeight, BitmapFormat.BGRA32);
            if (result != null)
            {
                if (ScanSucessfull != null)
                {
                    ScanSucessfull(result.Text);
                }
                print("Call send to zxing, result : " + result.Text);
            }
            else
            {
            }
        }
        mediaFrameReference.Dispose();
    }
        //--------------------------------------------------------Set-, Get- Methods:---------------------------------------------------------\\
        #region --Set-, Get- Methods--
        public void GetSoftwareBitmap(ref SoftwareBitmap softwareBitmap)
        {
            MediaFrameReference frameRef = frameReader.TryAcquireLatestFrame();
            VideoMediaFrame     frame    = frameRef?.VideoMediaFrame;

            softwareBitmap = frame?.SoftwareBitmap;
        }
        //<SnippetOpenCVFrameArrived>
        private void ColorFrameReader_FrameArrived_OpenCV(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var mediaFrameReference = sender.TryAcquireLatestFrame();

            if (mediaFrameReference != null)
            {
                SoftwareBitmap openCVInputBitmap = null;
                var            inputBitmap       = mediaFrameReference.VideoMediaFrame?.SoftwareBitmap;
                if (inputBitmap != null)
                {
                    //The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha
                    if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 &&
                        inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied)
                    {
                        openCVInputBitmap = SoftwareBitmap.Copy(inputBitmap);
                    }
                    else
                    {
                        openCVInputBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                    }

                    SoftwareBitmap openCVOutputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, openCVInputBitmap.PixelWidth, openCVInputBitmap.PixelHeight, BitmapAlphaMode.Premultiplied);

                    // operate on the image and render it
                    openCVHelper.Blur(openCVInputBitmap, openCVOutputBitmap);
                    _frameRenderer.PresentSoftwareBitmap(openCVOutputBitmap);
                }
            }
        }
        /// <summary>
        /// A new frame from the camera is available
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private async void _modelInputFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            Debug.WriteLine("_modelInputFrameReader_FrameArrived");
            MediaFrameReference frame = null;

            // Do not attempt processing of more than 1 frame at a time
            _frameAquisitionLock.Wait();
            {
                _CaptureFPS += 1;

                if (_isProcessingFrames)
                {
                    _frameAquisitionLock.Release();
                    return;
                }
                try
                {
                    frame = sender.TryAcquireLatestFrame();
                    _isProcessingFrames = true;
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.Message);
                    NotifyUser(ex.Message, NotifyType.ErrorMessage);
                    frame = null;
                    _isProcessingFrames = false;
                }
            }
            _frameAquisitionLock.Release();

            if ((frame != null) && (frame.VideoMediaFrame != null))
            {
                VideoFrame vf = null;

                // Receive frames from the camera and transfer to system memory
                _perfStopwatch.Restart();
                SoftwareBitmap softwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;

                if (softwareBitmap == null) // frames are coming as Direct3DSurface
                {
                    Debug.Assert(frame.VideoMediaFrame.Direct3DSurface != null);
                    vf = VideoFrame.CreateWithDirect3D11Surface(frame.VideoMediaFrame.Direct3DSurface);
                }
                else
                {
                    vf = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
                }

                await Task.Run(() =>
                {
                    EvaluateVideoFrameAsync(vf).ConfigureAwait(false).GetAwaiter().GetResult();
                    _frameAquisitionLock.Wait();
                    {
                        _isProcessingFrames = false;
                    }
                    _frameAquisitionLock.Release();
                });
            }
        }
Esempio n. 19
0
    public VideoFrame GetLatestFrame()
    {
        // The overloads of CreateFrameReaderAsync with the format arguments will actually return a copy so we don't have to copy again
        var frame      = _frameReader.TryAcquireLatestFrame();
        var videoFrame = frame?.VideoMediaFrame?.GetVideoFrame();

        return(videoFrame);
    }
Esempio n. 20
0
        void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            MediaFrameReference frame = sender.TryAcquireLatestFrame();

            if (frame != null && frame.CoordinateSystem != null)
            {
                SetFrame(frame);
            }
        }
Esempio n. 21
0
        private void ProcessFrames()
        {
            _isStopped = false;

            while (!_isStopping)
            {
                try
                {
                    GarbageCollectorCanWorkHere();

                    var frame = _mediaFrameReader.TryAcquireLatestFrame();

                    if (frame == null ||
                        frame.VideoMediaFrame == null ||
                        frame.VideoMediaFrame.SoftwareBitmap == null)
                    {
                        continue;
                    }

                    using (var stream = new InMemoryRandomAccessStream())
                    {
                        using (var bitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore))
                        {
                            var imageTask = BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream, _imageQuality).AsTask();
                            imageTask.Wait();
                            var encoder = imageTask.Result;
                            encoder.SetSoftwareBitmap(bitmap);

                            //Rotate image 180 degrees
                            var transform = encoder.BitmapTransform;
                            transform.Rotation = BitmapRotation.Clockwise180Degrees;

                            var flushTask = encoder.FlushAsync().AsTask();
                            flushTask.Wait();

                            using (var asStream = stream.AsStream())
                            {
                                asStream.Position = 0;

                                var image = new byte[asStream.Length];
                                asStream.Read(image, 0, image.Length);

                                Frame = image;

                                encoder = null;
                            }
                        }
                    }
                }
                catch (Exception exception)
                {
                    Logger.Write(nameof(Camera), exception).Wait();
                }
            }

            _isStopped = true;
        }
Esempio n. 22
0
 /// <summary>
 /// あとで実装するかも。
 /// </summary>
 private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
 {
     using (var frame = sender.TryAcquireLatestFrame())
     {
         if (frame != null)
         {
         }
     }
 }
Esempio n. 23
0
    /// <summary>
    /// Retrieve the latest video frame from the media frame reader
    /// </summary>
    /// <returns>VideoFrame object with current frame's software bitmap</returns>
    public VideoFrame GetLatestFrame()
    {
        // The overloads of CreateFrameReaderAsync with the format arguments will actually return a copy so we don't have to copy again
        var mediaFrameReference = _mediaFrameReader.TryAcquireLatestFrame();
        var videoFrame          = mediaFrameReference?.VideoMediaFrame?.GetVideoFrame();

        Debug.Log("GetLatestFrame: Successfully retrieved video frame.");
        return(videoFrame);
    }
Esempio n. 24
0
        /// <summary>
        /// Video Capture: Get camera frame and feed as model input.
        /// Implementation is from the UWP official tutorial.
        /// https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader
        /// </summary>
        private void ColorFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var mediaFrameReference = sender.TryAcquireLatestFrame();
            var videoMediaFrame     = mediaFrameReference?.VideoMediaFrame;

            if (videoMediaFrame != null)
            {
                if (videoMediaFrame.CameraIntrinsics != null)
                {
                    cameraFocalLength = videoMediaFrame.CameraIntrinsics.FocalLength;
                    System.Diagnostics.Debug.WriteLine("FocalLength: " + cameraFocalLength.X + " " + cameraFocalLength.Y);
                }
            }
            var softwareBitmap = videoMediaFrame?.SoftwareBitmap;

            if (softwareBitmap != null)
            {
                if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 ||
                    softwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Premultiplied)
                {
                    softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                }

                // Swap the processed frame to _backBuffer and dispose of the unused image.
                softwareBitmap = Interlocked.Exchange(ref backBuffer, softwareBitmap);
                softwareBitmap?.Dispose();

                // Changes to XAML ImageElement must happen on UI thread through Dispatcher
                var task = inputImage.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                                                          async() =>
                {
                    // Don't let two copies of this task run at the same time.
                    if (taskRunning)
                    {
                        return;
                    }
                    taskRunning = true;

                    // Keep draining frames from the backbuffer until the backbuffer is empty.
                    SoftwareBitmap latestBitmap;
                    while ((latestBitmap = Interlocked.Exchange(ref backBuffer, null)) != null)
                    {
                        var img = new SoftwareBitmapSource();
                        await img.SetBitmapAsync(latestBitmap);
                        inputImage.Source = img;
                        // Detect face and facial landmarks
                        UpdateImageInputData(latestBitmap);
                        DetectFaces();
                        latestBitmap.Dispose();
                    }

                    taskRunning = false;
                });
            }

            mediaFrameReference?.Dispose();
        }
Esempio n. 25
0
        void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            MediaFrameReference frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                Interlocked.Exchange(ref m_latestFrame, frame);
            }
        }
Esempio n. 26
0
 //<SnippetProcessAudioFrame>
 private void MediaFrameReader_AudioFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
 {
     using (MediaFrameReference reference = sender.TryAcquireLatestFrame())
     {
         if (reference != null)
         {
             ProcessAudioFrame(reference.AudioMediaFrame);
         }
     }
 }
 private void VideoFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
 {
     using (MediaFrameReference reference = sender.TryAcquireLatestFrame())
     {
         if (reference != null && videoStream != null)
         {
             ProcessVideoFrame(reference.VideoMediaFrame);
         }
     }
 }
        /// <summary>
        /// Handles a frame arrived event and renders the frame to the screen.
        /// </summary>
        private void ColorFrameReader_FrameArrivedAsync(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                SoftwareBitmap originalBitmap = null;
                var            inputBitmap    = frame.VideoMediaFrame?.SoftwareBitmap;

                if (inputBitmap != null)
                {
                    // The XAML Image control can only display images in BRGA8 format with premultiplied or no alpha
                    // The frame reader as configured in this sample gives BGRA8 with straight alpha, so need to convert it
                    originalBitmap = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

                    SoftwareBitmap outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, originalBitmap.PixelWidth, originalBitmap.PixelHeight, BitmapAlphaMode.Premultiplied);

                    // Operate on the image in the manner chosen by the user.
                    if (currentOperation == OperationType.Blur)
                    {
                        operation.Blur(originalBitmap, outputBitmap, storeditem);
                    }
                    else if (currentOperation == OperationType.HoughLines)
                    {
                        operation.HoughLines(originalBitmap, outputBitmap, storeditem);
                    }
                    else if (currentOperation == OperationType.Contours)
                    {
                        operation.Contours(originalBitmap, outputBitmap, storeditem);
                    }
                    else if (currentOperation == OperationType.Canny)
                    {
                        operation.Canny(originalBitmap, outputBitmap, storeditem);
                    }
                    else if (currentOperation == OperationType.MotionDetector)
                    {
                        operation.MotionDetector(originalBitmap, outputBitmap, storeditem);
                    }
                    else if (currentOperation == OperationType.Histogram)
                    {
#if USEOCVHERLPER
                        App.CvHelper.Histogram(originalBitmap, outputBitmap);
#else
                        // MP! Todo: Implement C# version in OcvOp.
#endif
                    }

                    // Display both the original bitmap and the processed bitmap.
                    previewRenderer.RenderFrame(originalBitmap);
                    outputRenderer.RenderFrame(outputBitmap);
                }

                Interlocked.Increment(ref frameCount);
            }
        }
    private void FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        var mediaframereference = sender.TryAcquireLatestFrame();

        if (mediaframereference != null)
        {
            test = 2;
            var videomediaframe     = mediaframereference?.VideoMediaFrame;
            var softwarebitmap      = videomediaframe?.SoftwareBitmap;
            CameraIntrinsics camerI = videomediaframe?.CameraIntrinsics;
            if (camerI != null)
            {
                var imageheight = videomediaframe.CameraIntrinsics.ImageHeight;
                var imagewidth  = videomediaframe.CameraIntrinsics.ImageWidth;

                Globals.maxdepth = imageheight;
                Globals.mindepth = imagewidth;
            }
            Globals.mindepth = 9.8f;
            if (softwarebitmap != null)
            {
                softwarebitmap = SDKTemplate.FrameRenderer.ConvertToDisplayableImage(videomediaframe);

                //softwarebitmap = SoftwareBitmap.Convert(softwarebitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Premultiplied);
                int w = softwarebitmap.PixelWidth;
                int h = softwarebitmap.PixelHeight;

                if (bytes == null)
                {
                    bytes = new byte[w * h * 4];
                }
                softwarebitmap.CopyToBuffer(bytes.AsBuffer());
                softwarebitmap.Dispose();
                UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                    if (tex == null)
                    {
                        tex = new Texture2D(w, h, TextureFormat.RGBA32, false);
                        GetComponent <Renderer>().material.mainTexture = tex;
                    }
                    for (int i = 0; i < bytes.Length / 4; ++i)
                    {
                        byte b           = bytes[i * 4];
                        bytes[i * 4 + 0] = bytes[i * 4 + 2];
                        bytes[i * 4 + 1] = bytes[i * 4 + 1];
                        bytes[i * 4 + 2] = bytes[i * 4 + 3];
                        bytes[i * 4 + 3] = 255;
                    }

                    tex.LoadRawTextureData(bytes);
                    tex.Apply();
                }, true);
            }
            mediaframereference.Dispose();
        }
    }
Esempio n. 30
0
        private void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            var reference = sender.TryAcquireLatestFrame();

            lock (TransformLock)
            {
                if (reference.Properties.TryGetValue(InteropStatics.MFSampleExtensionSpatialCameraCoordinateSystem, out object coordinateSystem))
                {
                    CoordinateSystem = coordinateSystem as SpatialCoordinateSystem;
                }
                else
                {
                    return;
                }
                var newViewMatrix = (reference.Properties[InteropStatics.MFSampleExtensionSpatialCameraViewTransform] as byte[]).To <Matrix4x4>();
                ProjectionMatrix     = (reference.Properties[InteropStatics.MFSampleExtensionSpatialCameraProjectionTransform] as byte[]).To <Matrix4x4>();
                ProjectionMatrix.M33 = FarPlane / (NearPlane - FarPlane);
                ProjectionMatrix.M43 = NearPlane * FarPlane / (NearPlane - FarPlane);
                UpdateStability(newViewMatrix);
                LastViewMatrix = newViewMatrix;
            }
            if (AllowUnstableFrames || Stable)
            {
                ViewMatrix = LastViewMatrix;
                var      surface = reference.VideoMediaFrame.Direct3DSurface;
                var      surfaceInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess;
                IntPtr   resourcePointer        = surfaceInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource);
                Resource resource = SharpDX.CppObject.FromPointer <Resource>(resourcePointer);
                Marshal.Release(resourcePointer);
                Texture2D frameTexture = resource.QueryInterface <Texture2D>();
                if (deviceTexture == null)
                {
                    Texture2D texture = new Texture2D(frameTexture.Device, new Texture2DDescription()
                    {
                        Width             = frameTexture.Description.Width,
                        Height            = frameTexture.Description.Height,
                        MipLevels         = 1,
                        ArraySize         = 1,
                        Format            = frameTexture.Description.Format,
                        SampleDescription = frameTexture.Description.SampleDescription,
                        Usage             = frameTexture.Description.Usage,
                        BindFlags         = BindFlags.ShaderResource,
                        CpuAccessFlags    = CpuAccessFlags.None,
                        OptionFlags       = ResourceOptionFlags.SharedKeyedmutex
                    });
                    cameraTexture = frameTexture.Device.OpenSharedResource <Texture2D>(texture.QueryInterface <SharpDX.DXGI.Resource>().SharedHandle);
                    deviceTexture = device.OpenSharedResource <Texture2D>(texture.QueryInterface <SharpDX.DXGI.Resource>().SharedHandle);
                    Ready         = true;
                }
                LockTexture(cameraTexture);
                frameTexture.Device.ImmediateContext.CopyResource(frameTexture, cameraTexture);
                UnlockTexture(cameraTexture);
                FrameUpdated();
            }
        }
 /// <summary>
 /// Handles the frame arrived event by converting the frame to a displayable
 /// format and rendering it to the screen.
 /// </summary>
 private void Reader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
 {
     // TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
     // This can return null if there is no such frame, or if the reader is not in the
     // "Started" state. The latter can occur if a FrameArrived event was in flight
     // when the reader was stopped.
     using (var frame = sender.TryAcquireLatestFrame())
     {
         _frameRenderer.ProcessFrame(frame);
     }
 }