void Update()
    {
        if (i_reader != null)
        {
            var frame = i_reader.AcquireLatestFrame();
            if (frame != null)
            {
                if (i_bodies == null)
                {
                    i_bodies = new Body[i_sensor.BodyFrameSource.BodyCount];
                }

                CalculateTilt(frame);

                frame.GetAndRefreshBodyData(i_bodies);
                frame.Dispose();
                frame = null;
                frameArrived.Invoke(i_bodies);
            }
        }
    }
        public void Update()
        {
            if (!_available)
            {
                return;
            }
            if (!_cameraTexture.didUpdateThisFrame)
            {
                return;
            }
            Color32[] pixels32 = _cameraTexture.GetPixels32();
            Utils.setDebugMode(true);
            Mat argbMat = new Mat(_targetVideoHeight, _targetVideoWidth, CvType.CV_8UC4);

            MatUtils.copyToMat(pixels32, argbMat);
            if (argbMat.empty())
            {
                return;
            }
            // workaround obs cam: drop frame if grey / empty.
            double[] values = argbMat.get(0, 0);
            if (values[0] == 128 && values[1] == 129 && values[2] == 127 && values[3] == 255)
            {
                return;
            }
            Mat yuvMat = new Mat(_targetVideoHeight * 2 / 3, _targetVideoWidth, CvType.CV_8UC1);

            Imgproc.cvtColor(argbMat, yuvMat, Imgproc.COLOR_BGRA2YUV_I420);
            Mat submat = yuvMat.submat(0, _targetVideoHeight, 0, _targetVideoWidth);

            Core.flip(submat, submat, 0);
            Utils.setDebugMode(false);
            CameraIntrinsic       intrinsic = new CameraIntrinsic();
            CameraExtrinsic       extrinsic = new CameraExtrinsic(Matrix4x4.identity);
            CameraFrame           frame     = new CameraFrame(submat, intrinsic, extrinsic, _targetVideoWidth, _targetVideoHeight, frameCount++, ColorFormat.Unknown);
            FrameArrivedEventArgs args      = new FrameArrivedEventArgs(frame);

            FrameArrived?.Invoke(this, args);
        }
        /// <summary>
        /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private void FrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            MediaFrameReference frame = null;

            lock (m_lock)
            {
                try
                {
                    frame = sender.TryAcquireLatestFrame();
                }
                catch (System.ObjectDisposedException)
                {
                    frame = null;
                }
            }
            if (frame != null)
            {
                VideoFrame videoFrame = frame.VideoMediaFrame.GetVideoFrame();
                videoFrame.SystemRelativeTime = frame.SystemRelativeTime;
                FrameArrived?.Invoke(this, videoFrame);
            }
        }
예제 #4
0
        private void OnFrameArrived(Direct3D11CaptureFramePool sender, object args)
        {
            var newSize = false;

            using (var frame = sender.TryGetNextFrame())
            {
                if (frame.ContentSize.Width != _lastSize.Width ||
                    frame.ContentSize.Height != _lastSize.Height)
                {
                    newSize   = true;
                    _lastSize = frame.ContentSize;
                }
                using (var bitmap = Direct3D11Helper.CreateSharpDXTexture2D(frame.Surface))
                {
                    FrameArrived?.Invoke(new CapturedBitmap(bitmap));
                }
            }
            if (newSize)
            {
                _framePool.Recreate(_device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, _lastSize);
            }
        }
예제 #5
0
 /// <summary>
 /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="args"></param>
 private void mediaPlayer_VideoFrameAvailable(MediaPlayer sender, object args)
 {
     m_mediaPlayer.CopyFrameToVideoSurface(m_videoFrame.Direct3DSurface);
     m_videoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
     FrameArrived?.Invoke(this, m_videoFrame);
 }
예제 #6
0
        public Screen(Slave slave, int virtualWid, int virtualHei)
        {
            // initialize
            this.port = slave.ScreenPort;
            _dev      = slave;
            // 获取屏幕大小
            var adb = new AdbClient(AdbServer.Instance.EndPoint);

            adb.ExecuteRemoteCommand("dumpsys window", slave.Device, this);
            // 等待执行完毕
            while (!flushed)
            {
                Thread.Sleep(100);
            }
            // 运行
            new Thread(() =>
            {
                var realSiz    = $"{Width}x{Height}";
                var virtualSiz = $"{virtualWid}x{virtualHei}";
                // 启动minicap
                new AdbClient(AdbServer.Instance.EndPoint)
                .ExecuteRemoteCommand($"LD_LIBRARY_PATH=/data/local/tmp/ /data/local/tmp/minicap -S -P {realSiz}@{virtualSiz}/0", slave.Device, this);
            })
            {
                IsBackground = true
            }.Start();
            // 等待minicap服务启动
            Thread.Sleep(1000);
            // 转发
            AdbClient.Instance.CreateForward(slave.Device,
                                             new ForwardSpec()
            {
                Port = port, Protocol = ForwardProtocol.Tcp
            },
                                             new ForwardSpec()
            {
                Protocol = ForwardProtocol.LocalAbstract, SocketName = "minicap"
            },
                                             true);

            client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);

            new Thread(() => {
#if DEBUG
                var secAccu   = 0.0;
                var frameAccu = 0;
#endif
                while (true)
                {
                    if (!started)
                    {
                        Thread.Sleep(100);
                        continue;
                    }

                    try
                    {
                        var before = DateTime.Now;
                        // 帧前4字节为长度
                        var lenBuf = new byte[4];
                        client.Receive(lenBuf, 4, SocketFlags.None);
                        var length = BitConverter.ToInt32(lenBuf, 0);
                        if (length <= 0)
                        {
                            // wrong frame
                            continue;
                        }
                        if (length / 1024 > 1000)
                        {
                            continue;
                        }
                        var frameBuf = new byte[length];
                        int len      = 0;
                        var ms       = new MemoryStream();
                        while (ms.Length < length)
                        {
                            len = client.Receive(frameBuf, length - (int)ms.Length, SocketFlags.None);
                            ms.Write(frameBuf, 0, len);
                        }
                        var imgBuffer = ms.ToArray();
                        // jpeg header
                        if ((imgBuffer[0] != 0xff || imgBuffer[1] != 0xD8))
                        {
                            continue;
                        }
                        var img = Image.FromStream(ms, true, true);
                        FrameArrived?.Invoke(slave.Device, this, img);
                        GC.Collect();

                        var subTime   = DateTime.Now.Subtract(before);
                        var frameTime = 1000 / frameLimiter;
                        //// 还没消耗够1frame所需的时间
                        //if (subTime.TotalMilliseconds < frameTime)
                        //{
                        //    Thread.Sleep(frameTime - (int)subTime.TotalMilliseconds);
                        //}
#if DEBUG
                        secAccu += subTime.TotalMilliseconds < frameTime ? frameTime : subTime.TotalMilliseconds;
                        if (secAccu >= 1000)
                        {
                            //Debug.WriteLine($"***************** frame rate: {frameAccu}");
                            frameAccu = 0;
                            secAccu   = 0;
                        }
                        //Debug.WriteLine($"frame size: {imgBuffer.Length / 1024} kB.");
                        //Debug.WriteLine($"proc time: {subTime.TotalMilliseconds} ms.");
                        ++frameAccu;
#endif
                    }
                    catch (Exception ex)
                    {
                        Program.Logs.WriteLog("读取帧字节出错!", ex.Message, Core.LogLevel.Exception);
                    }
                }
            })
            {
                IsBackground = true
            }.Start();
        }
예제 #7
0
        private void MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (m_frameBool)
            {
                //pull multisource frame reference out
                MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
                //Return on null
                if (multiSourceFrame is null)
                {
                    return;
                }
                //Calibration and full get sampled number of frames
                if ((m_currentFrameType.Equals(FrameType.Calibration) || m_currentFrameType.Equals(FrameType.Full)))
                {
                    using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        //Store one frame
                        m_tempDepthData[m_sampleIndex] = new ushort[m_depthFrameDescription.Width * m_depthFrameDescription.Height];
                        depthFrame.CopyFrameDataToArray(m_tempDepthData[m_sampleIndex]);
                        m_minDepth = depthFrame.DepthMinReliableDistance;
                        m_maxDepth = depthFrame.DepthMaxReliableDistance;
                    }
                    //...until all samples are acquired
                    if (m_sampleIndex == m_samplingRate - 1)
                    {
                        //Then clean the points
                        CleanDepth();
                    }
                    else
                    {
                        //Not done, get next sample
                        m_sampleIndex++;
                        return;
                    }
                }
                //Instantiate images
                m_depthImage    = new WriteableBitmap(m_depthFrameDescription.Width, m_depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_colorImage    = new WriteableBitmap(m_colorFrameDescription.Width, m_colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_infraredImage = new WriteableBitmap(m_infraredFrameDescription.Width, m_infraredFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                switch (m_currentFrameType)
                {
                case FrameType.Alignment:
                    using (DepthFrame depthframe = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        depthframe.CopyFrameDataToArray(m_depthData);
                        m_maxDepth = depthframe.DepthMaxReliableDistance;
                        m_minDepth = depthframe.DepthMinReliableDistance;
                        ProcessDepth();
                        KinectFrameArgs args = new KinectFrameArgs(FrameType.Alignment, m_depthImage);
                        args.pointCloudV3 = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                        FrameArrived.Invoke(HelperContext, args);
                    }
                    break;

                case FrameType.Calibration:
                    ProcessDepth();
                    FrameArrived.Invoke(HelperContext, new KinectFrameArgs(FrameType.Calibration, m_depthImage));
                    break;

                case FrameType.Full:
                    using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                        using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                        {
                            ProcessDepth();
                            ProcessColor(colorFrame);
                            ProcessInfrared(infraredFrame);
                            KinectFrameArgs args = new KinectFrameArgs(FrameType.Full, m_depthImage, m_colorImage, m_infraredImage);
                            args.pointCloudCSP = m_cameraSpacePoints;
                            args.pointCloudV3  = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                            FrameArrived.Invoke(HelperContext, args);
                        }
                    break;
                }
                m_frameBool = false;
            }
            else
            {
                return;
            }
        }