Ejemplo n.º 1
0
        public void ProcessFrame()
        {
            MediaFrameReference frame           = videoFrameProcessor.GetLatestFrame();
            VideoMediaFrame     videoMediaFrame = frame?.VideoMediaFrame;

            if (videoMediaFrame == null)
            {
                return;
            }
            // Validate that the incoming frame format is compatible with the FaceTracker
            bool isBitmapPixelFormatSupported = videoMediaFrame.SoftwareBitmap != null && FaceTracker.IsBitmapPixelFormatSupported(videoMediaFrame.SoftwareBitmap.BitmapPixelFormat);

            if (!isBitmapPixelFormatSupported)
            {
                return;
            }
            // Ask the FaceTracker to process this frame asynchronously
            IAsyncOperation <IList <DetectedFace> > processFrameTask = faceTracker.ProcessNextFrameAsync(videoMediaFrame.GetVideoFrame());

            try
            {
                IList <DetectedFace> faces = processFrameTask.GetResults();

                lock (@lock)
                {
                    if (faces.Count == 0)
                    {
                        ++numFramesWithoutFaces;

                        // The FaceTracker might lose track of faces for a few frames, for example,
                        // if the person momentarily turns their head away from the videoFrameProcessor. To smooth out
                        // the tracking, we allow 30 video frames (~1 second) without faces before
                        // we say that we're no longer tracking any faces.
                        if (numFramesWithoutFaces > 30 && latestFaces.Any())
                        {
                            latestFaces.Clear();
                        }
                    }
                    else
                    {
                        numFramesWithoutFaces = 0;
                        latestFaces.Clear();
                        foreach (var face in faces)
                        {
                            latestFaces.Add(face.FaceBox);
                        }
                    }
                }
            }
            catch (Exception e)
            {
                // The task might be cancelled if the FaceAnalysis failed.
                Debug.LogException(e);
            }
        }
Ejemplo n.º 2
0
    /// <summary>
    /// Modified from https://github.com/VulcanTechnologies/HoloLensCameraStream/blob/master/HoloLensCameraStream/Plugin%20Project/VideoCaptureSample.cs
    /// This returns the transform matrix at the time the photo was captured, if location data if available.
    /// If it's not, that is probably an indication that the HoloLens is not tracking and its location is not known.
    /// It could also mean the VideoCapture stream is not running.
    /// If location data is unavailable then the camera to world matrix will be set to the identity matrix.
    /// </summary>
    /// <param name="matrix">The transform matrix used to convert between coordinate spaces.
    /// The matrix will have to be converted to a Unity matrix before it can be used by methods in the UnityEngine namespace.
    /// See https://forum.unity3d.com/threads/locatable-camera-in-unity.398803/ for details.</param>
    public bool TryGetCameraToWorldMatrix(MediaFrameReference frameReference, out float[] outMatrix)
    {
        if (frameReference.Properties.ContainsKey(viewTransformGuid) == false)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        if (worldOrigin == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        System.Numerics.Matrix4x4 cameraViewTransform = ConvertByteArrayToMatrix4x4(frameReference.Properties[viewTransformGuid] as byte[]);
        if (cameraViewTransform == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        SpatialCoordinateSystem cameraCoordinateSystem = frameReference.Properties[cameraCoordinateSystemGuid] as SpatialCoordinateSystem;

        if (cameraCoordinateSystem == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        System.Numerics.Matrix4x4?cameraCoordsToUnityCoordsMatrix = cameraCoordinateSystem.TryGetTransformTo(worldOrigin);
        if (cameraCoordsToUnityCoordsMatrix == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        // Transpose the matrices to obtain a proper transform matrix
        cameraViewTransform = System.Numerics.Matrix4x4.Transpose(cameraViewTransform);

        System.Numerics.Matrix4x4 cameraCoordsToUnityCoords = System.Numerics.Matrix4x4.Transpose(cameraCoordsToUnityCoordsMatrix.Value);

        System.Numerics.Matrix4x4 viewToWorldInCameraCoordsMatrix;
        System.Numerics.Matrix4x4.Invert(cameraViewTransform, out viewToWorldInCameraCoordsMatrix);
        System.Numerics.Matrix4x4 viewToWorldInUnityCoordsMatrix = System.Numerics.Matrix4x4.Multiply(cameraCoordsToUnityCoords, viewToWorldInCameraCoordsMatrix);

        // Change from right handed coordinate system to left handed UnityEngine
        viewToWorldInUnityCoordsMatrix.M31 *= -1f;
        viewToWorldInUnityCoordsMatrix.M32 *= -1f;
        viewToWorldInUnityCoordsMatrix.M33 *= -1f;
        viewToWorldInUnityCoordsMatrix.M34 *= -1f;

        outMatrix = ConvertMatrixToFloatArray(viewToWorldInUnityCoordsMatrix);

        return(true);
    }
        private async Task EncoderImage(MediaFrameReference image, IRandomAccessStream imageStream)
        {
            using (SoftwareBitmap bitmap = SoftwareBitmap.Convert(image.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore))
            {
                BitmapEncoder encodeur = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, imageStream, _qualiteEncodageImage);

                encodeur.SetSoftwareBitmap(bitmap);

                await encodeur.FlushAsync();
            }
        }
Ejemplo n.º 4
0
        void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            if (!IsAnalyzingFrame && !IsNewFrameAvailable)
            {
                MediaFrameReference frame = sender.TryAcquireLatestFrame();

                if (frame != null)
                {
                    new Task(() => SetFrame(frame)).Start();
                }
            }
        }
Ejemplo n.º 5
0
        internal VideoCaptureSample(MediaFrameReference frameReference, SpatialCoordinateSystem worldOrigin)
        {
            if (frameReference == null)
            {
                throw new ArgumentNullException("frameReference.");
            }

            this.frameReference = frameReference;
            this.worldOrigin    = worldOrigin;

            bitmap = frameReference.VideoMediaFrame.SoftwareBitmap;
        }
        private async Task EnregistrerImage(MediaFrameReference image)
        {
            using (InMemoryRandomAccessStream imageStream = new InMemoryRandomAccessStream())
            {
                await EncoderImage(image, imageStream);

                StorageFile fichierPhoto = await KnownFolders.PicturesLibrary.CreateFileAsync("GpsCam.jpg", CreationCollisionOption.GenerateUniqueName);

                using (IRandomAccessStream photoFileStream = await fichierPhoto.OpenAsync(FileAccessMode.ReadWrite))
                    await RandomAccessStream.CopyAndCloseAsync(imageStream.GetInputStreamAt(0), photoFileStream.GetOutputStreamAt(0));
            }
        }
Ejemplo n.º 7
0
    /// <summary>
    /// This is just one big lump of code right now which should be factored out into some kind of
    /// 'frame reader' class which can then be subclassed for depth frame and video frame but
    /// it was handy to have it like this while I experimented with it - the intention was
    /// to tidy it up if I could get it doing more or less what I wanted :-)
    /// </summary>
    async Task ProcessingLoopAsync()
    {
        var depthMediaCapture = await this.GetMediaCaptureForDescriptionAsync(
            MediaFrameSourceKind.Depth, 448, 450, 15);

        var depthFrameReader = await depthMediaCapture.Item1.CreateFrameReaderAsync(depthMediaCapture.Item2);

        depthFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;

        MediaFrameReference lastDepthFrame = null;

        long depthFrameCount = 0;

        List <float> DepthData = new List <float>();

        // Expecting this to run at 1fps although the API (seems to) reports that it runs at 15fps
        TypedEventHandler <MediaFrameReader, MediaFrameArrivedEventArgs> depthFrameHandler =
            (sender, args) =>
        {
            using (var depthFrame = sender.TryAcquireLatestFrame())
            {
                if ((depthFrame != null) && (depthFrame != lastDepthFrame))
                {
                    lastDepthFrame = depthFrame;

                    Interlocked.Increment(ref depthFrameCount);

                    //write depthdata into list 'DepthData'
                    DepthData = GetDepthDataFromBuffer(depthFrame, (float)depthFrame.VideoMediaFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters);
                    OnDepthProvided(DepthData);
                }
            }
        };


        depthFrameReader.FrameArrived += depthFrameHandler;


        await depthFrameReader.StartAsync();


        // Wait forever then dispose...just doing this to keep track of what needs disposing.
        await Task.Delay(-1);

        depthFrameReader.FrameArrived -= depthFrameHandler;


        depthFrameReader.Dispose();


        depthMediaCapture.Item1.Dispose();
    }
Ejemplo n.º 8
0
        /// <summary>
        /// A new frame from the camera is available
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private void _modelInputFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            Debug.WriteLine("_modelInputFrameReader_FrameArrived");
            MediaFrameReference frame = null;

            if (_isProcessingFrames)
            {
                return;
            }
            // Do not attempt processing of more than 1 frame at a time
            _frameAquisitionLock.Wait();
            {
                _isProcessingFrames = true;
                _CaptureFPS        += 1;

                try
                {
                    frame = sender.TryAcquireLatestFrame();
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.Message);
                    NotifyUser(ex.Message, NotifyType.ErrorMessage);
                    frame = null;
                }

                if ((frame != null) && (frame.VideoMediaFrame != null))
                {
                    VideoFrame vf = null;

                    // Receive frames from the camera and transfer to system memory
                    _perfStopwatch.Restart();
                    SoftwareBitmap softwareBitmap = frame.VideoMediaFrame.SoftwareBitmap;

                    if (softwareBitmap == null) // frames are coming as Direct3DSurface
                    {
                        Debug.Assert(frame.VideoMediaFrame.Direct3DSurface != null);
                        vf = VideoFrame.CreateWithDirect3D11Surface(frame.VideoMediaFrame.Direct3DSurface);
                    }
                    else
                    {
                        vf = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
                    }
                    EvaluateVideoFrameAsync(vf).ConfigureAwait(false).GetAwaiter().GetResult();
                }
                Thread.Sleep(500);
                _isProcessingFrames = false;
            }
            _frameAquisitionLock.Release();
        }
        internal VideoCaptureSample(MediaFrameReference frameReference)
        {
            if (frameReference == null)
            {
                throw new ArgumentNullException("frameReference.");
            }

            this.frameReference = frameReference;
            bitmap = frameReference.VideoMediaFrame.SoftwareBitmap;

            //TODO: Get location data
            //var transformMatrix = frameReference.Properties[TransformMatrixGuid] as SomeMatrixOrArray;
            //var projectionMatrix = frameReference.Properties[ProjectionMatrixGuid] as SomeMatrixOrArray;
        }
Ejemplo n.º 10
0
 // <SnippetMultiFrameArrived>
 private void MultiFrameReader_FrameArrived(MultiSourceMediaFrameReader sender, MultiSourceMediaFrameArrivedEventArgs args)
 {
     using (MultiSourceMediaFrameReference muxedFrame =
                sender.TryAcquireLatestFrame())
         using (MediaFrameReference colorFrame =
                    muxedFrame.TryGetFrameReferenceBySourceId(_colorSourceId))
             using (MediaFrameReference depthFrame =
                        muxedFrame.TryGetFrameReferenceBySourceId(_depthSourceId))
             {
                 // Notify the listener thread that the frame has been received.
                 _frameReceived.Set();
                 _frameRenderer.ProcessFrame(depthFrame);
             }
 }
Ejemplo n.º 11
0
        public void ProcessFrame(MediaFrameReference frame)
        {
            var softwareBitmap = FrameRenderer.ConvertToDisplayableImage(frame?.VideoMediaFrame);

            if (softwareBitmap != null)
            {
                // Swap the processed frame to _backBuffer and trigger UI thread to render it
                softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap);

                // UI thread always reset _backBuffer before using it.  Unused bitmap should be disposed.
                softwareBitmap?.Dispose();

                // Changes to xaml ImageElement must happen in UI thread through Dispatcher
                var task = _imageElement.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                                                             async() =>
                {
                    // Don't let two copies of this task run at the same time.
                    if (_taskRunning)
                    {
                        return;
                    }
                    _taskRunning = true;

                    // Keep draining frames from the backbuffer until the backbuffer is empty.
                    SoftwareBitmap latestBitmap;
                    while ((latestBitmap = Interlocked.Exchange(ref _backBuffer, null)) != null)
                    {
                        var imageSource = (SoftwareBitmapSource)_imageElement.Source;
                        await imageSource.SetBitmapAsync(latestBitmap);

                        IBarcodeReader reader = new BarcodeReader();
                        WriteableBitmap img   = new WriteableBitmap(latestBitmap.PixelWidth, latestBitmap.PixelHeight);
                        latestBitmap.CopyToBuffer(img.PixelBuffer);
                        var result = reader.Decode(img);

                        if (result != null)
                        {
                            _Page.NextStepAsync(result.Text);
                        }
                        //_TextBox.Text = result.Text;



                        latestBitmap.Dispose();
                    }

                    _taskRunning = false;
                });
            }
        }
Ejemplo n.º 12
0
        async Task <byte[]> GetFrameData(MediaFrameReference frame)
        {
            byte[] bytes = null;

            if (frame == null)
            {
                return(bytes);
            }

            VideoMediaFrame videoMediaFrame = frame.VideoMediaFrame;

            if (videoMediaFrame == null)
            {
                return(bytes);
            }

            VideoFrame     videoFrame     = videoMediaFrame.GetVideoFrame();
            SoftwareBitmap softwareBitmap = videoFrame.SoftwareBitmap;

            if (softwareBitmap == null)
            {
                return(bytes);
            }

            SoftwareBitmap bitmapBGRA8 = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);

            using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                // Set the software bitmap
                encoder.SetSoftwareBitmap(bitmapBGRA8);
                encoder.IsThumbnailGenerated = false;

                try
                {
                    await encoder.FlushAsync();

                    bytes = new byte[stream.Size];
                    await stream.AsStream().ReadAsync(bytes, 0, bytes.Length);
                }
                catch (Exception e)
                {
                    Debug.WriteLine($"Error while trying to encode frame into a byte array, expceiton {e.Message}");
                }
            }

            return(bytes);
        }
Ejemplo n.º 13
0
    private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
    {
        lock (_bodyFrameLock)
        {
            _bodyFrame = e.BodyFrame;

            if (_saveLatestFrames)
            {
                _latestBodyFrame = e.Frame;
            }

            _bodyFrameTime  = DateTime.Now.Ticks; // _bodyFrame.SystemRelativeTime.Value.Ticks;
            _bodyFrameReady = true;
        }
    }
Ejemplo n.º 14
0
        void SetFrame(MediaFrameReference frame)
        {
            var spatialCoordinateSystem = frame.CoordinateSystem;
            var cameraIntrinsics        = frame.VideoMediaFrame.CameraIntrinsics;

            LastFrame = new Frame
            {
                mediaFrameReference     = frame,
                spatialCoordinateSystem = spatialCoordinateSystem,
                cameraIntrinsics        = cameraIntrinsics,
                timestamp = Utils.GetCurrentUnixTimestampMillis()
            };

            _lastFrameCapturedTimestamp = DateTime.Now;
        }
Ejemplo n.º 15
0
    private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
    {
        _depthCameraIntrinsics = e.CameraIntrinsics;

        if (_depthDataBuf == null || sensorData.depthImageWidth != e.Bitmap.PixelWidth || sensorData.depthImageHeight != e.Bitmap.PixelHeight)
        {
            sensorData.depthImageWidth  = e.Bitmap.PixelWidth;
            sensorData.depthImageHeight = e.Bitmap.PixelHeight;

            int imageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight * sizeof(ushort);

            lock (_depthDataLock)
            {
                //_depthDataBuf = new byte[imageLen];
                //sensorData.depthImage = new ushort[e.Bitmap.PixelWidth * e.Bitmap.PixelHeight];
                Array.Resize <byte>(ref _depthDataBuf, imageLen);
                Array.Resize <ushort>(ref sensorData.depthImage, e.Bitmap.PixelWidth * e.Bitmap.PixelHeight);
            }

            int biImageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight;

            lock (_bodyIndexDataLock)
            {
                //_bodyIndexDataBuf = new byte[biImageLen];
                //sensorData.bodyIndexImage = new byte[biImageLen];
                Array.Resize <byte>(ref _bodyIndexDataBuf, biImageLen);
                Array.Resize <byte>(ref sensorData.bodyIndexImage, biImageLen);
            }
        }

        if (_depthDataBuf != null)
        {
            lock (_depthDataLock)
            {
                e.Bitmap.CopyToBuffer(_depthDataBuf.AsBuffer());

                if (_saveLatestFrames)
                {
                    _latestDepthFrame = e.Frame;
                }

                _depthDataTime  = DateTime.Now.Ticks; // depthFrame.RelativeTime.Ticks;
                _depthDataReady = true;
            }
        }
    }
Ejemplo n.º 16
0
    // Update is called once per frame
    void Update()
    {
#if ENABLE_WINMD_SUPPORT
        if (!_isReadyToRender)
        {
            return;
        }

        // The HolographicFrame has information that the app needs in order
        // to update and render the current frame. The app begins each new
        // frame by calling CreateNextFrame.
        //HolographicFrame ^ holographicFrame = m_holographicSpace->CreateNextFrame();

        // Get a prediction of where holographic cameras will be when this frame
        // is presented.
        //HolographicFramePrediction prediction = holographicFrame->CurrentPrediction;

        IntPtr spatialCoordinateSystemPtr               = WorldManager.GetNativeISpatialCoordinateSystemPtr();
        SpatialCoordinateSystem unityWorldOrigin        = Marshal.GetObjectForIUnknown(spatialCoordinateSystemPtr) as SpatialCoordinateSystem;
        SpatialCoordinateSystem currentCoordinateSystem = unityWorldOrigin;

        _isTrackingFaces = _faceTrackerProcessor.IsTrackingFaces();

        if (_isTrackingFaces)
        {
            MediaFrameReference frame = _videoFrameProcessor.GetLatestFrame();
            if (frame == null)
            {
                return;
            }
            var faces = _faceTrackerProcessor.GetLatestFaces();
            ProcessFaces(faces, frame, currentCoordinateSystem);


            TimeSpan currentTimeStamp = frame.SystemRelativeTime.Value.Duration();
            if (currentTimeStamp > _previousFrameTimestamp)
            {
                // TODO: copy to texture
                _previousFrameTimestamp = frame.SystemRelativeTime.Value.Duration();
            }
        }

        SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
#endif
    }
Ejemplo n.º 17
0
    private void BodyIndexReader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e)
    {
        if (_bodyIndexDataBuf != null)
        {
            lock (_bodyIndexDataLock)
            {
                e.Bitmap.CopyToBuffer(_bodyIndexDataBuf.AsBuffer());

                if (_saveLatestFrames)
                {
                    _latestBodyIndexFrame = e.Frame;
                }

                _bodyIndexDataTime  = DateTime.Now.Ticks; // bodyIndexFrame.RelativeTime.Ticks;
                _bodyIndexDataReady = true;
            }
        }
    }
Ejemplo n.º 18
0
        /// <summary>
        /// Analyze a frame and notufy the delegate of the results
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="callback"></param>
        public async void AnalyzeFrame(MediaFrameReference frame, Action <int, List <DetectedPerson> > callback)
        {
            IsAnalyzingFrame = true;

            var detectedPersonsInFrame = new List <DetectedPerson>();

            // get the raw data of the frame
            var data = await GetFrameData(frame);

            if (data == null || data.Length == 0)
            {
                Debug.WriteLine("ERROR :: AnalyzeFrame failed - data is null or empty");
                IsAnalyzingFrame = false;
                callback(FAILED_UNKNOWN, detectedPersonsInFrame);
                return;
            }

            // call the remote API to dectect faces
            await DetectFaces(data, detectedPersonsInFrame);

            // if faces were detected, then try to identify them
            if (detectedPersonsInFrame.Count > 0)
            {
                await IdentifyFaces(detectedPersonsInFrame);
            }

            // try to match each identified person with our loaded repository
            foreach (var p in detectedPersonsInFrame)
            {
                if (p.personId != null)
                {
                    var match = groupPersons.Where(m => m.personId.Equals(p.personId.ToString(), StringComparison.OrdinalIgnoreCase)).FirstOrDefault();
                    if (match != null)
                    {
                        p.name = match.name;
                    }
                }
            }

            lastFrameAnalysisTimestamp = Utils.GetCurrentUnixTimestampMillis();
            IsAnalyzingFrame           = false;

            callback(SUCCESS, detectedPersonsInFrame);
        }
        void SetFrame(MediaFrameReference frame)
        // extract meta data from the captured frame and update the LastFrame property
        {
            var spatialCoordinateSystem = frame.CoordinateSystem;
            var cameraIntrinsics        = frame.VideoMediaFrame.CameraIntrinsic;

            // mediaFrameReference (capturedFrame) // include location of camera & perspective projection
            // of camera (coordiateSystem and CameraIntrinsic) to infer position of
            // the camera in real world and augment it with digital content
            LastFrame = new Frame
            {
                mediaFrameReference     = frame,
                spatialCoordinateSystem = spatialCoordinateSystem,
                cameraIntrinsics        = cameraIntrinsics,
                timestamp = Utils.GetCurrentUnixTimestampMillis()
            };

            _lastFrameCapturedTimeStamp = DateTime.Now;
        }
        public async Task <ModelResult> EvaluateAsync(MediaFrameReference input, string correlationId)
        {
            var r = new ModelResult(_session, correlationId);

            lock (_results) {
                _results.Add(correlationId, r);
                ++depth;
            }
            var v = ImageFeatureValue.CreateFromVideoFrame(input.VideoMediaFrame.GetVideoFrame());

            // NOTE: following bind strings are specific to azure custom vision coreml output.
            r._binding.Bind("data", v);
            r._binding.Bind("classLabel", r._output.classLabelTensor);
            r._binding.Bind("loss", r._output.loss);

            r._result = await AsyncHelper.SyncFromAsync(_session.EvaluateAsync(r._binding, correlationId), "r._result");

            return(r);
        }
Ejemplo n.º 21
0
        public async Task <MediaFrameReference> GetFrameAsync()
        {
            MediaFrameReference result = null;

            do
            {
                evtFrame.WaitOne();
                evtFrame.Reset();

                result = mediaFrameReader.TryAcquireLatestFrame();

                if (null == result)
                {
                    await Task.Delay(10);
                }
            }while (null == result);

            return(result);
        }
Ejemplo n.º 22
0
        public async void ProcessFrame(MediaFrameReference frame)
        {
            var softwareBitmap = FrameRenderer.ConvertToDisplayableImage(frame?.VideoMediaFrame);

            _bytes = await Convert(softwareBitmap);

            if (softwareBitmap != null)
            {
                // Swap the processed frame to _backBuffer and trigger UI thread to render it
                softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap);

                //

                // UI thread always reset _backBuffer before using it.  Unused bitmap should be disposed.
                softwareBitmap?.Dispose();

                // Changes to xaml ImageElement must happen in UI thread through Dispatcher
                var task = _imageElement.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                                                             async() =>
                {
                    // Don't let two copies of this task run at the same time.
                    if (_taskRunning)
                    {
                        return;
                    }
                    _taskRunning = true;

                    // Keep draining frames from the backbuffer until the backbuffer is empty.
                    SoftwareBitmap latestBitmap;
                    while ((latestBitmap = Interlocked.Exchange(ref _backBuffer, null)) != null)
                    {
                        _bytes          = await Convert(latestBitmap);
                        var imageSource = (SoftwareBitmapSource)_imageElement.Source;
                        await imageSource.SetBitmapAsync(latestBitmap);

                        latestBitmap.Dispose();
                    }


                    _taskRunning = false;
                });
            }
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Invoked on each received video frame. Extracts the image according to the <see cref="ColorFormat"/> and invokes the <see cref="FrameArrived"/> event containing a <see cref="CameraFrame"/>.
        /// </summary>
        private unsafe void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            if (sender == null)
            {
                throw new ArgumentNullException(nameof(sender));
            }
            if (args == null)
            {
                throw new ArgumentNullException(nameof(args));
            }
            using (MediaFrameReference frame = sender.TryAcquireLatestFrame())
            {
                if (frame == null)
                {
                    return;
                }
                SoftwareBitmap originalSoftwareBitmap = frame.VideoMediaFrame?.SoftwareBitmap;
                if (originalSoftwareBitmap == null)
                {
                    _logger.LogWarning("Received frame without image.");
                    return;
                }

                CameraExtrinsic extrinsic = new CameraExtrinsic(frame.CoordinateSystem, WorldOrigin);
                CameraIntrinsic intrinsic = new CameraIntrinsic(frame.VideoMediaFrame.CameraIntrinsics);

                using (var input = originalSoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                    using (var inputReference = input.CreateReference())
                    {
                        byte *inputBytes;
                        uint  inputCapacity;
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                        MatUtils.copyToMat((IntPtr)inputBytes, _bitmap);
                        int thisFrameCount = Interlocked.Increment(ref FrameCount);

                        // TODO: Check out of using block
                        CameraFrame           cameraFrame = new CameraFrame(_bitmap, intrinsic, extrinsic, FrameWidth, FrameHeight, (uint)thisFrameCount, _format);
                        FrameArrivedEventArgs eventArgs   = new FrameArrivedEventArgs(cameraFrame);
                        FrameArrived?.Invoke(this, eventArgs);
                    }
                originalSoftwareBitmap?.Dispose();
            }
        }
 protected override bool ProcessFrame(MediaFrameReference frameReference, CameraCapture.ImageProcess processMethod)
 {
     this.Result = null;
     _result     = null;
     // doc here https://msdn.microsoft.com/en-us/library/windows/apps/xaml/windows.media.capture.frames.videomediaframe.aspx
     // says to dispose this softwarebitmap if you access it.
     using (bitmap = frameReference.VideoMediaFrame.SoftwareBitmap)
     {
         try
         {
             if (this.buffer == null)
             {
                 this.buffer = new byte[4 * bitmap.PixelHeight * bitmap.PixelWidth];
             }
             if (processMethod == null)
             {
                 Result = new object[1] {
                     1
                 };
                 var task = SaveSoftwareBitmapToFile();
                 task.Wait();
             }
             else
             {
                 bmpBuffer = bitmap.LockBuffer(BitmapBufferAccessMode.ReadWrite);
                 reference = bmpBuffer.CreateReference();
                 //ComPtr<IMemoryBufferByteAccess> pBufferByteAccess;
                 sourceImage = new Mat(bitmap.PixelHeight, bitmap.PixelWidth, MatType.CV_8UC4, buffer);
                 //Cv2.CvtColor(sourceImage, sourceImage, ColorConversionCodes.BGRA2BGR); //<Remove>
                 processMethod(sourceImage, out _result);
                 if (_result != null)
                 {
                     Result = _result;
                 }
             }
         }
         catch
         {
         }
     }
     return(this.Result != null);
 }
Ejemplo n.º 25
0
        // Handles each new Frame Event
        protected void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            /*if (semToFrame.WaitOne() == false)
             * {
             *  Debug.WriteLine("\n\t\t\t frame blocked by semaphore");
             * };*/
            Debug.WriteLine("\t --> Frame Arrived !");

            /*if (!mutexLock.Wait(0))
             * {
             *  Debug.WriteLine("\t\t-->mutex already took !!!");
             *  return;
             * }*/
            /* lock (propertiesLock)
             * {*/
            MediaFrameReference frame = sender.TryAcquireLatestFrame();

            if (frame != null)
            {
                // Deal with mutex then update new lastframe
                this.latestFrame = frame;

                if (frame.Equals(latestFrame))
                {
                    Debug.WriteLine("\t --> New frame setted up !");
                }
                else
                {
                    Debug.WriteLine("\t --> Semaphore issue ?!");
                }
                //FaceTracking.Proc
            }
            else if (frame == null)
            {
                Debug.WriteLine("\t --> Frame arrived, but could'nt be set up (=null)");
            }
            //}

            //semToFrame.Release(1);
            //mutexLock.Release();
        }
Ejemplo n.º 26
0
        private void RawStats(MediaFrameReference frame)
        {
            if (frame == null)
            {
                return;
            }
            if (frame.SystemRelativeTime == null)
            {
                return;
            }

            if (frame.SourceKind == MediaFrameSourceKind.Color)
            {
                ColorFrameEpochs.Enqueue(frame.SystemRelativeTime.Value);
                if (ColorFrameEpochs.Count > 10)
                {
                    ColorFrameEpochs.TryDequeue(out _);
                }
            }
            else if (frame.SourceKind == MediaFrameSourceKind.Infrared &&
                     frame.VideoMediaFrame != null &&
                     frame.VideoMediaFrame.InfraredMediaFrame != null)
            {
                if (frame.VideoMediaFrame.InfraredMediaFrame.IsIlluminated == true)
                {
                    IlluminatedInfraredFrameEpochs.Enqueue(frame.SystemRelativeTime.Value);
                    if (IlluminatedInfraredFrameEpochs.Count > 10)
                    {
                        IlluminatedInfraredFrameEpochs.TryDequeue(out _);
                    }
                }
                else
                {
                    NonIlluminatedInfraredFrameEpochs.Enqueue(frame.SystemRelativeTime.Value);
                    if (NonIlluminatedInfraredFrameEpochs.Count > 10)
                    {
                        NonIlluminatedInfraredFrameEpochs.TryDequeue(out _);
                    }
                }
            }
        }
        internal VideoCaptureSample(MediaFrameReference frameReference, SpatialCoordinateSystem worldOrigin)
        {
            if (frameReference == null)
            {
                throw new ArgumentNullException("frameReference.");
            }

            this.frameReference = frameReference;
            this.worldOrigin    = worldOrigin;

            // When Windows.Media.Devices.Core.CameraIntrinsics is out of prerelease, use this instead
            //cameraIntrinsics = new CameraIntrinsics(frameReference.VideoMediaFrame.CameraIntrinsics);

            byte[]  rawIntrinsics  = frameReference.Properties[cameraIntrinsicsGuid] as byte[];
            float[] intrinsicArray = ConvertByteArrayToFloatArray(rawIntrinsics);
            cameraIntrinsics = new CameraIntrinsics(intrinsicArray);

            bitmap      = frameReference.VideoMediaFrame.SoftwareBitmap;
            FrameWidth  = bitmap.PixelWidth;
            FrameHeight = bitmap.PixelHeight;
        }
Ejemplo n.º 28
0
        private void VideoFrameArrivedEvent(MediaFrameReference frame)
        {
            var buffer = frame.BufferMediaFrame;

            if (buffer == null)
            {
                return;
            }
            var format = frame.VideoMediaFrame?.VideoFormat;

            if (format == null)
            {
                return;
            }
            m_consumer.ConsumeRawVideoFrame(
                buffer.Buffer.ToArray(),
                VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_NV12,
                format.Width, format.Height,
                0,
                (ulong)new DateTimeOffset().ToUnixTimeMilliseconds());
        }
        protected override async Task <bool> ProcessFrameAsync(MediaFrameReference frameReference)
        {
            bool done = false;

            // doc here https://msdn.microsoft.com/en-us/library/windows/apps/xaml/windows.media.capture.frames.videomediaframe.aspx
            // says to dispose this softwarebitmap if you access it.
            using (var bitmap = frameReference.VideoMediaFrame.SoftwareBitmap)
            {
                try
                {
                    if (this.ocrEngine == null)
                    {
                        this.ocrEngine = OcrEngine.TryCreateFromUserProfileLanguages();
                        this.regex     = new Regex(IP_ADDRESS_PATTERN);
                    }
                    var results = await this.ocrEngine.RecognizeAsync(bitmap);

                    if (results != null)
                    {
                        var matchingResults = this.regex.Matches(results.Text);

                        for (int i = 0; !done && (i < matchingResults.Count); i++)
                        {
                            IPAddress parsedAddress;

                            done = IPAddress.TryParse(matchingResults[i].Value, out parsedAddress);

                            if (done)
                            {
                                this.Result = parsedAddress;
                            }
                        }
                    }
                }
                catch
                {
                }
            }
            return(done);
        }
Ejemplo n.º 30
0
    private void ColorReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
    {
        _colorCameraIntrinsics = e.CameraIntrinsics;

        if (_colorDataBuf == null || sensorData.colorImageWidth != e.Bitmap.PixelWidth || sensorData.colorImageHeight != e.Bitmap.PixelHeight)
        {
            sensorData.colorImageWidth  = e.Bitmap.PixelWidth;
            sensorData.colorImageHeight = e.Bitmap.PixelHeight;

            int imageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight * 4;

            lock (_colorDataLock)
            {
                //_colorDataBuf = new byte[imageLen];
                //sensorData.colorImage = new byte[imageLen];
                Array.Resize <byte>(ref _colorDataBuf, imageLen);
                Array.Resize <byte>(ref sensorData.colorImage, imageLen);
            }
        }

        if (_colorDataBuf != null)
        {
            // convert the bitmap
            SoftwareBitmap convertedBitmap = SoftwareBitmap.Convert(e.Bitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Straight);

            lock (_colorDataLock)
            {
                convertedBitmap?.CopyToBuffer(_colorDataBuf.AsBuffer());
                convertedBitmap?.Dispose();

                if (_saveLatestFrames)
                {
                    _latestColorFrame = e.Frame;
                }

                _colorDataTime  = DateTime.Now.Ticks; // colorFrame.RelativeTime.Ticks;
                _colorDataReady = true;
            }
        }
    }
        public void ProcessFrame(MediaFrameReference frame)
        {
            var softwareBitmap = FrameRenderer.ConvertToDisplayableImage(frame?.VideoMediaFrame);

            if (softwareBitmap != null)
            {
                // Swap the processed frame to _backBuffer and trigger UI thread to render it
                softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap);

                // UI thread always reset _backBuffer before using it.  Unused bitmap should be disposed.
                softwareBitmap?.Dispose();

                // Changes to xaml ImageElement must happen in UI thread through Dispatcher
                var task = _imageElement.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                    async () =>
                    {
                        // Don't let two copies of this task run at the same time.
                        if (_taskRunning)
                        {
                            return;
                        }
                        _taskRunning = true;

                        // Keep draining frames from the backbuffer until the backbuffer is empty.
                        SoftwareBitmap latestBitmap;
                        while ((latestBitmap = Interlocked.Exchange(ref _backBuffer, null)) != null)
                        {
                            var imageSource = (SoftwareBitmapSource)_imageElement.Source;
                            await imageSource.SetBitmapAsync(latestBitmap);
                            latestBitmap.Dispose();
                        }

                        _taskRunning = false;
                    });
            }
        }