Exemplo n.º 1
0
        /// <summary>
        /// Retrieve the corresponding VideoFrame via FrameArrived event
        /// </summary>
        public Task StartAsync()
        {
            FrameArrived?.Invoke(this, m_videoFrame);

            // Async not needed, return success
            return(Task.FromResult(true));
        }
        /// <summary>
        /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private async void MediaPlayer_VideoFrameAvailable(MediaPlayer sender, object args)
        {
            m_mediaPlayer.CopyFrameToVideoSurface(m_videoFrame.Direct3DSurface);

            if (m_desiredImageDescriptor != null)
            {
                await m_videoFrame.CopyToAsync(m_stagingVideoFrame);

                m_stagingVideoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
                FrameArrived?.Invoke(this, m_stagingVideoFrame);
            }
            else
            {
                m_videoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
                FrameArrived?.Invoke(this, m_videoFrame);
            }
        }
Exemplo n.º 3
0
 /// <summary>
 /// Depending on color format, the frame is passed on or buffered for conversion.
 /// </summary>
 private void OnFrameArrived(object sender, FrameArrivedEventArgs e)
 {
     if (Format == ColorFormat.Grayscale)
     {
         _frame = e.Frame;
         FPSUtils.VideoTick();
         FrameArrived?.Invoke(this, e);
     }
     else
     {
         if (IsProcessingFrame)
         {
             return;
         }
         IsProcessingFrame  = true;
         CurrentCameraFrame = e.Frame;
     }
 }
Exemplo n.º 4
0
        /// <summary>
        /// Processes the received frame, converts the image to grayscale if requested, and invokes the next photo request.
        /// </summary>
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (_stopped?.Task != null)
            {
                return;
            }
            if (result.resultType == PhotoCapture.CaptureResultType.UnknownError)
            {
                return;
            }
            if (photoCaptureFrame == null)
            {
                return;
            }
            Size size = new Size(FrameWidth, (double)FrameHeight * 3 / 2); // Luminance (grayscale) of the NV12 format requires image height, chrominance is stored in half resolution. <see href="https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12"/>.

            _image = new Mat(size, CvType.CV_8UC1);
            List <byte> imageBuffer = new List <byte>();

            photoCaptureFrame?.CopyRawImageDataIntoBuffer(imageBuffer);
            MatUtils.copyToMat(imageBuffer.ToArray(), _image);

            if (_format == ColorFormat.Grayscale)
            {
                Imgproc.cvtColor(_image, _image, Imgproc.COLOR_YUV2GRAY_NV12);
            }

            Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            CameraExtrinsic extrinsic = new CameraExtrinsic(cameraToWorldMatrix);

            Matrix4x4 projectionMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetProjectionMatrix(out projectionMatrix);
            CameraIntrinsic intrinsic = new CameraIntrinsic(projectionMatrix);

            CameraFrame           cameraFrame = new CameraFrame(_image, intrinsic, extrinsic, FrameWidth, FrameHeight, FrameCount++, _format);
            FrameArrivedEventArgs args        = new FrameArrivedEventArgs(cameraFrame);

            FrameArrived?.Invoke(this, args);

            _photoCaptureObject?.TakePhotoAsync(OnCapturedPhotoToMemory);
        }
Exemplo n.º 5
0
        /// <summary>
        /// Invoked if the NV12 to RGB conversion is complete and the data is ready to be read to the CPU.
        /// </summary>
        private void OnCompleteReadback(AsyncGPUReadbackRequest request)
        {
            if (request.hasError)
            {
                Debug.LogError("GPU readback error");
                return;
            }

            MatUtils.copyToMat(request.GetData <uint>(), _rgb);
            Core.flip(_rgb, _rgb, 0); // image is flipped on x-axis
            CameraFrame           newFrame = new CameraFrame(_rgb, CurrentCameraFrame.Intrinsic, CurrentCameraFrame.Extrinsic, CurrentCameraFrame.Width, CurrentCameraFrame.Height, CurrentCameraFrame.FrameCount, Format);
            FrameArrivedEventArgs args     = new FrameArrivedEventArgs(newFrame);

            _frame = newFrame;
            FrameArrived?.Invoke(this, args);
            FPSUtils.VideoTick();
            NewFrameAvailable = true;
            IsProcessingFrame = false;
        }
Exemplo n.º 6
0
        /// <summary>
        /// Invoked on each received video frame. Extracts the image according to the <see cref="ColorFormat"/> and invokes the <see cref="FrameArrived"/> event containing a <see cref="CameraFrame"/>.
        /// </summary>
        private unsafe void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            if (sender == null)
            {
                throw new ArgumentNullException(nameof(sender));
            }
            if (args == null)
            {
                throw new ArgumentNullException(nameof(args));
            }
            using (MediaFrameReference frame = sender.TryAcquireLatestFrame())
            {
                if (frame == null)
                {
                    return;
                }
                SoftwareBitmap originalSoftwareBitmap = frame.VideoMediaFrame?.SoftwareBitmap;
                if (originalSoftwareBitmap == null)
                {
                    _logger.LogWarning("Received frame without image.");
                    return;
                }

                CameraExtrinsic extrinsic = new CameraExtrinsic(frame.CoordinateSystem, WorldOrigin);
                CameraIntrinsic intrinsic = new CameraIntrinsic(frame.VideoMediaFrame.CameraIntrinsics);

                using (var input = originalSoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                    using (var inputReference = input.CreateReference())
                    {
                        byte *inputBytes;
                        uint  inputCapacity;
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                        MatUtils.copyToMat((IntPtr)inputBytes, _bitmap);
                        int thisFrameCount = Interlocked.Increment(ref FrameCount);

                        // TODO: Check out of using block
                        CameraFrame           cameraFrame = new CameraFrame(_bitmap, intrinsic, extrinsic, FrameWidth, FrameHeight, (uint)thisFrameCount, _format);
                        FrameArrivedEventArgs eventArgs   = new FrameArrivedEventArgs(cameraFrame);
                        FrameArrived?.Invoke(this, eventArgs);
                    }
                originalSoftwareBitmap?.Dispose();
            }
        }
Exemplo n.º 7
0
 private async void ReceiveLoop()
 {
     while (IsActive)
     {
         if (InternalFrameProvider !.Grab())
         {
             Mat matFrame = InternalFrameProvider.RetrieveMat();
             if (matFrame.Empty())
             {
                 StopActivity();
                 break;
             }
             MatVideoFrame frame = new MatVideoFrame(matFrame, EncodingProperties !)
             {
                 RelativeTime = StartTime - DateTime.Now,
             };
             FrameArrived?.Invoke(this, frame);
         }
         await Task.Delay(10);
     }
 }
Exemplo n.º 8
0
        /// <summary>
        /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            MediaFrameReference frame = null;

            lock (m_lock)
            {
                try
                {
                    frame = sender.TryAcquireLatestFrame();
                }
                catch (System.ObjectDisposedException)
                {
                    frame = null;
                }
            }
            if (frame != null)
            {
                VideoFrame videoFrame = frame.VideoMediaFrame.GetVideoFrame();
                videoFrame.SystemRelativeTime = frame.SystemRelativeTime;
                FrameArrived?.Invoke(sender, videoFrame);
            }
        }
        public void Update()
        {
            if (!_available)
            {
                return;
            }
            if (!_cameraTexture.didUpdateThisFrame)
            {
                return;
            }
            Color32[] pixels32 = _cameraTexture.GetPixels32();
            Utils.setDebugMode(true);
            Mat argbMat = new Mat(_targetVideoHeight, _targetVideoWidth, CvType.CV_8UC4);

            MatUtils.copyToMat(pixels32, argbMat);
            if (argbMat.empty())
            {
                return;
            }
            // workaround obs cam: drop frame if grey / empty.
            double[] values = argbMat.get(0, 0);
            if (values[0] == 128 && values[1] == 129 && values[2] == 127 && values[3] == 255)
            {
                return;
            }
            Mat yuvMat = new Mat(_targetVideoHeight * 2 / 3, _targetVideoWidth, CvType.CV_8UC1);

            Imgproc.cvtColor(argbMat, yuvMat, Imgproc.COLOR_BGRA2YUV_I420);
            Mat submat = yuvMat.submat(0, _targetVideoHeight, 0, _targetVideoWidth);

            Core.flip(submat, submat, 0);
            Utils.setDebugMode(false);
            CameraIntrinsic       intrinsic = new CameraIntrinsic();
            CameraExtrinsic       extrinsic = new CameraExtrinsic(Matrix4x4.identity);
            CameraFrame           frame     = new CameraFrame(submat, intrinsic, extrinsic, _targetVideoWidth, _targetVideoHeight, frameCount++, ColorFormat.Unknown);
            FrameArrivedEventArgs args      = new FrameArrivedEventArgs(frame);

            FrameArrived?.Invoke(this, args);
        }
Exemplo n.º 10
0
        private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args)
        {
            var newSize = false;

            using (var frame = sender.TryGetNextFrame())
            {
                if (frame.ContentSize.Width != _lastSize.Width ||
                    frame.ContentSize.Height != _lastSize.Height)
                {
                    newSize   = true;
                    _lastSize = frame.ContentSize;
                }
                using (var bitmap = Direct3D11Helper.CreateSharpDXTexture2D(frame.Surface))
                {
                    FrameArrived?.Invoke(new CapturedBitmap(bitmap));
                }
            }
            if (newSize)
            {
                _framePool.Recreate(_device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, _lastSize);
            }
        }
Exemplo n.º 11
0
 public static extern IntPtr CreateCamera(string name, RegionOfInterest roi,
                                          FrameArrived frameArrived, CameraConnected connected, CameraDisconnected disconnected);
Exemplo n.º 12
0
 /// <summary>
 /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="args"></param>
 private void mediaPlayer_VideoFrameAvailable(MediaPlayer sender, object args)
 {
     m_mediaPlayer.CopyFrameToVideoSurface(m_videoFrame.Direct3DSurface);
     m_videoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
     FrameArrived?.Invoke(this, m_videoFrame);
 }
Exemplo n.º 13
0
        public Screen(Slave slave, int virtualWid, int virtualHei)
        {
            // initialize
            this.port = slave.ScreenPort;
            _dev      = slave;
            // 获取屏幕大小
            var adb = new AdbClient(AdbServer.Instance.EndPoint);

            adb.ExecuteRemoteCommand("dumpsys window", slave.Device, this);
            // 等待执行完毕
            while (!flushed)
            {
                Thread.Sleep(100);
            }
            // 运行
            new Thread(() =>
            {
                var realSiz    = $"{Width}x{Height}";
                var virtualSiz = $"{virtualWid}x{virtualHei}";
                // 启动minicap
                new AdbClient(AdbServer.Instance.EndPoint)
                .ExecuteRemoteCommand($"LD_LIBRARY_PATH=/data/local/tmp/ /data/local/tmp/minicap -S -P {realSiz}@{virtualSiz}/0", slave.Device, this);
            })
            {
                IsBackground = true
            }.Start();
            // 等待minicap服务启动
            Thread.Sleep(1000);
            // 转发
            AdbClient.Instance.CreateForward(slave.Device,
                                             new ForwardSpec()
            {
                Port = port, Protocol = ForwardProtocol.Tcp
            },
                                             new ForwardSpec()
            {
                Protocol = ForwardProtocol.LocalAbstract, SocketName = "minicap"
            },
                                             true);

            client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);

            new Thread(() => {
#if DEBUG
                var secAccu   = 0.0;
                var frameAccu = 0;
#endif
                while (true)
                {
                    if (!started)
                    {
                        Thread.Sleep(100);
                        continue;
                    }

                    try
                    {
                        var before = DateTime.Now;
                        // 帧前4字节为长度
                        var lenBuf = new byte[4];
                        client.Receive(lenBuf, 4, SocketFlags.None);
                        var length = BitConverter.ToInt32(lenBuf, 0);
                        if (length <= 0)
                        {
                            // wrong frame
                            continue;
                        }
                        if (length / 1024 > 1000)
                        {
                            continue;
                        }
                        var frameBuf = new byte[length];
                        int len      = 0;
                        var ms       = new MemoryStream();
                        while (ms.Length < length)
                        {
                            len = client.Receive(frameBuf, length - (int)ms.Length, SocketFlags.None);
                            ms.Write(frameBuf, 0, len);
                        }
                        var imgBuffer = ms.ToArray();
                        // jpeg header
                        if ((imgBuffer[0] != 0xff || imgBuffer[1] != 0xD8))
                        {
                            continue;
                        }
                        var img = Image.FromStream(ms, true, true);
                        FrameArrived?.Invoke(slave.Device, this, img);
                        GC.Collect();

                        var subTime   = DateTime.Now.Subtract(before);
                        var frameTime = 1000 / frameLimiter;
                        //// 还没消耗够1frame所需的时间
                        //if (subTime.TotalMilliseconds < frameTime)
                        //{
                        //    Thread.Sleep(frameTime - (int)subTime.TotalMilliseconds);
                        //}
#if DEBUG
                        secAccu += subTime.TotalMilliseconds < frameTime ? frameTime : subTime.TotalMilliseconds;
                        if (secAccu >= 1000)
                        {
                            //Debug.WriteLine($"***************** frame rate: {frameAccu}");
                            frameAccu = 0;
                            secAccu   = 0;
                        }
                        //Debug.WriteLine($"frame size: {imgBuffer.Length / 1024} kB.");
                        //Debug.WriteLine($"proc time: {subTime.TotalMilliseconds} ms.");
                        ++frameAccu;
#endif
                    }
                    catch (Exception ex)
                    {
                        Program.Logs.WriteLog("读取帧字节出错!", ex.Message, Core.LogLevel.Exception);
                    }
                }
            })
            {
                IsBackground = true
            }.Start();
        }
Exemplo n.º 14
0
        private void MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (m_frameBool)
            {
                //pull multisource frame reference out
                MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
                //Return on null
                if (multiSourceFrame is null)
                {
                    return;
                }
                //Calibration and full get sampled number of frames
                if ((m_currentFrameType.Equals(FrameType.Calibration) || m_currentFrameType.Equals(FrameType.Full)))
                {
                    using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        //Store one frame
                        m_tempDepthData[m_sampleIndex] = new ushort[m_depthFrameDescription.Width * m_depthFrameDescription.Height];
                        depthFrame.CopyFrameDataToArray(m_tempDepthData[m_sampleIndex]);
                        m_minDepth = depthFrame.DepthMinReliableDistance;
                        m_maxDepth = depthFrame.DepthMaxReliableDistance;
                    }
                    //...until all samples are acquired
                    if (m_sampleIndex == m_samplingRate - 1)
                    {
                        //Then clean the points
                        CleanDepth();
                    }
                    else
                    {
                        //Not done, get next sample
                        m_sampleIndex++;
                        return;
                    }
                }
                //Instantiate images
                m_depthImage    = new WriteableBitmap(m_depthFrameDescription.Width, m_depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_colorImage    = new WriteableBitmap(m_colorFrameDescription.Width, m_colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_infraredImage = new WriteableBitmap(m_infraredFrameDescription.Width, m_infraredFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                switch (m_currentFrameType)
                {
                case FrameType.Alignment:
                    using (DepthFrame depthframe = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        depthframe.CopyFrameDataToArray(m_depthData);
                        m_maxDepth = depthframe.DepthMaxReliableDistance;
                        m_minDepth = depthframe.DepthMinReliableDistance;
                        ProcessDepth();
                        KinectFrameArgs args = new KinectFrameArgs(FrameType.Alignment, m_depthImage);
                        args.pointCloudV3 = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                        FrameArrived.Invoke(HelperContext, args);
                    }
                    break;

                case FrameType.Calibration:
                    ProcessDepth();
                    FrameArrived.Invoke(HelperContext, new KinectFrameArgs(FrameType.Calibration, m_depthImage));
                    break;

                case FrameType.Full:
                    using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                        using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                        {
                            ProcessDepth();
                            ProcessColor(colorFrame);
                            ProcessInfrared(infraredFrame);
                            KinectFrameArgs args = new KinectFrameArgs(FrameType.Full, m_depthImage, m_colorImage, m_infraredImage);
                            args.pointCloudCSP = m_cameraSpacePoints;
                            args.pointCloudV3  = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                            FrameArrived.Invoke(HelperContext, args);
                        }
                    break;
                }
                m_frameBool = false;
            }
            else
            {
                return;
            }
        }