public void StartReceive()
        {
            IsStarted = true;

            TcpClient client = null;

            _networkService.OnConnectionsChanged += (s, e) =>
            {
                client = e.Connections.FirstOrDefault();
            };

            if (_networkService.ConnectToServer())
            {
                if (client != null)
                {
                    IFormatter formatter = new BinaryFormatter();

                    Task.Factory.StartNew(() =>
                    {
                        while (IsStarted)
                        {
                            OnFrame.Invoke(this, new ScreenShareEventArgs()
                            {
                                Capture = formatter.Deserialize(client.GetStream()) as byte[]
                            });
                        }
                    });
                }
            }
        }
Exemplo n.º 2
0
        public bool Decode(OnPacket onPacket, OnFrame onFrame)
        {
            int ret = ffmpeg.av_read_frame(formatContext, packet);

            if (ret < 0)
            {
                return(false);
            }

            onPacket?.Invoke(packet);

            ret = ffmpeg.avcodec_send_packet(context, packet);
            if (ret < 0)
            {
                return(true);
            }

            while (ret >= 0)
            {
                ret = ffmpeg.avcodec_receive_frame(context, frame);
                if ((ret == ffmpeg.AVERROR(ffmpeg.EAGAIN)) || (ret == ffmpeg.AVERROR_EOF))
                {
                    return(true);
                }
                else if (ret < 0)
                {
                    throw new Exception("error during encoding");
                }

                Console.WriteLine($"id: {context->frame_number}");
                onFrame?.Invoke(frame);
            }

            return(true);
        }
Exemplo n.º 3
0
 void stream__NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs)
 {
     if (OnFrame != null)
     {
         OnFrame.Invoke(this, new FrameEventArgs {
             Image = eventArgs.Frame
         });
     }
 }
Exemplo n.º 4
0
 private void PushPixelToFrameBuffer(float sample)
 {
     framePtr[framePtrIndex] = sample;
     framePtrIndex++;
     if (framePtrIndex >= width)
     {
         OnFrame?.Invoke(framePtr, width);
         framePtrIndex = 0;
     }
 }
Exemplo n.º 5
0
        private void InvokeEvents()
        {
            if (Device == null)
            {
                throw new InvalidOperationException("Renderer is null");
            }

            FrameStarting?.Invoke(this, Device);
            OnFrame?.Invoke(this, Device);
            FrameEnding?.Invoke(this, Device);
        }
Exemplo n.º 6
0
        /// <summary>Handles the body frame data arriving from the sensor</summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            using (BodyFrame BFrame = e.FrameReference.AcquireFrame()) {
                if (BFrame != null)
                {
                    if (bodies == null)
                    {
                        bodies = new Body[BFrame.BodyCount];
                    }
                    Frm.TmSpan    = e.FrameReference.RelativeTime;
                    Frm.FloorClip = BFrame.FloorClipPlane;
                    Frm.FrameNum  = FrameCnt;

                    // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                    // As long as those body objects are not disposed and not set to null in the array, those body objects will be reused.
                    BFrame.GetAndRefreshBodyData(bodies);
                    bool FrameHasTrackedBodies = false;
                    for (int BNdx = 0; BNdx < bodies.Length; BNdx++)
                    {
                        if (bodies[BNdx].IsTracked == true)
                        {
                            FrameHasTrackedBodies = true;
                            Frm.BodyNdx           = BNdx;
                            BJoints = bodies[BNdx].Joints;
                            foreach (JointType JType in BJoints.Keys)
                            {
                                Frm.CamPos[(int)JType]      = BJoints[JType].Position;
                                Frm.SmoothedCSP[(int)JType] = BJoints[JType].Position; // overwritten if smoothing is called later
                                Frm.TrackState[(int)JType]  = BJoints[JType].TrackingState;
                            }
                            OnFrame?.Invoke(Frm);
                        }
                    }
                    if (FrameHasTrackedBodies == true)
                    {
                        FrameCnt++;
                    }
                }
            }
        }
Exemplo n.º 7
0
        private void InvokeEvents()
        {
            if (Device == null)
            {
                throw new InvalidOperationException("Renderer is null");
            }

            OnFrameStarting?.Invoke(this, Device);

            if (!Device.IsDrawing)
            {
                Device.BeginScene();
            }

            OnFrame?.Invoke(this, Device);

            if (Device.IsDrawing)
            {
                Device.EndScene();
            }

            OnFrameEnding?.Invoke(this, Device);
        }
Exemplo n.º 8
0
        public KeyFrameScheduler(
            TimeSpan?beginTime,
            TimeSpan?duration,
            TValue initialValue,
            IEnumerable <IKeyFrame <TValue> >?frames,
            OnFrame onFrame,
            Action <EndReason> onCompleted)
        {
            CurrentValue = initialValue;

            frames ??= Enumerable.Empty <IKeyFrame <TValue> >();
            frames = duration.HasValue
                                ? frames.Where(k => k != null && k.KeyTime.TimeSpan <= duration.Value)
                                : frames.Trim();

            _frames = frames.ToList();
            _frames.Sort(KeyFrameComparer <TValue> .Instance);

            _beginTime   = beginTime;
            _duration    = duration;
            _onFrame     = onFrame;
            _onCompleted = onCompleted;
        }
Exemplo n.º 9
0
        private void FaceTrackingPipeline()
        {
            IsDispose = false;
            OnStart?.Invoke(this, null);

            #region Manager Init
            realSenseManager = RealSenseObjects.Session.CreateSenseManager();

            if (realSenseManager == null)
            {
                MessageBox.Show(
                    "PXCMSenseManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            PXCMCaptureManager captureManager = realSenseManager.captureManager;
            if (captureManager == null)
            {
                MessageBox.Show(
                    "PXCMCaptureManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            #region 基本設定
            //設定裝置
            captureManager.FilterByDeviceInfo(Form.SelectedDevice);

            //設定串流類型
            captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile);


            //啟用臉部追蹤模組
            realSenseManager.EnableFace();
            PXCMFaceModule faceModule = realSenseManager.QueryFace();
            if (faceModule == null)
            {
                MessageBox.Show(
                    "取得PXCMFaceModule失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            //建立臉部追蹤模組設定
            moduleConfiguration = faceModule.CreateActiveConfiguration();
            if (moduleConfiguration == null)
            {
                MessageBox.Show(
                    "建立PXCMFaceConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            //追蹤模式設定
            moduleConfiguration.SetTrackingMode(Form.ModeType);

            moduleConfiguration.strategy                  = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;
            moduleConfiguration.detection.isEnabled       = true;
            moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉
            moduleConfiguration.landmarks.isEnabled       = false;
            moduleConfiguration.pose.isEnabled            = false;

            recognitionConfig =
                moduleConfiguration.QueryRecognition();

            if (recognitionConfig == null)
            {
                MessageBox.Show(
                    "建立RecognitionConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            recognitionConfig.Enable();
            #endregion

            #region 讀取資料庫數據
            if (Form.FaceData != null)
            {
                recognitionConfig.SetDatabase(Form.FaceData);
                moduleConfiguration.ApplyChanges();
            }
            #endregion

            #region 預備啟動
            moduleConfiguration.EnableAllAlerts();
            //moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();
            Form.SetStatus("RealSenseManager初始化中");
            if (applyChangesStatus.IsError() || realSenseManager.Init().IsError())
            {
                MessageBox.Show(
                    "RealSenseManager初始化失敗,請檢查設定正確。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            using (moduleOutput = faceModule.CreateOutput()) {
                PXCMCapture.Device.StreamProfileSet profiles;
                PXCMCapture.Device device = captureManager.QueryDevice();

                if (device == null)
                {
                    MessageBox.Show(
                        "取得設備失敗。",
                        "初始化失敗",
                        MessageBoxButtons.OK,
                        MessageBoxIcon.Error);
                    OnStop?.Invoke(this, null);
                    return;
                }

                device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);

                #region Loop
                while (!_Stop)
                {
                    while (_Paush)
                    {
                        Application.DoEvents();
                    }
                    if (realSenseManager.AcquireFrame(true).IsError())
                    {
                        break;
                    }
                    var isConnected = realSenseManager.IsConnected();
                    if (isConnected)
                    {
                        var sample = realSenseManager.QueryFaceSample();
                        if (sample == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }
                        #region 畫面取出
                        PXCMImage image = null;
                        if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR)
                        {
                            image = sample.ir;
                        }
                        else
                        {
                            image = sample.color;
                        }
                        #endregion

                        moduleOutput.Update();//更新辨識
                        PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                        if (recognition == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }


                        #region 繪圖與事件
                        OnFrame?.Invoke(this, new FaceRecognitionEventArgs()
                        {
                            Image = ToBitmap(image)
                        });
                        FindFace(moduleOutput);
                        #endregion
                    }
                    //發布框
                    realSenseManager.ReleaseFrame();
                }
                #endregion

                //更新資料庫緩衝區
                //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer();
            }

            #region 釋放資源
            moduleConfiguration.Dispose();
            realSenseManager.Close();
            realSenseManager.Dispose();
            #endregion

            IsDispose = true;
            OnStop?.Invoke(this, null);
        }
Exemplo n.º 10
0
 public void DoFrame(long frameTimeNanos)
 {
     Choreographer.Instance.PostFrameCallback(this);
     OnFrame?.Invoke(frameTimeNanos);
 }
Exemplo n.º 11
0
 private void Update()
 {
     OnFrame?.Invoke();
 }
Exemplo n.º 12
0
        /// <summary>
        /// 服务端处理客户端信息循环。
        /// </summary>
        protected async void ClientLoop(TcpClient tc)
        {
            try
            {
                using (NetworkStream stream = tc.GetStream())
                    using (Timer timer = new Timer((object obj) => { stream.Close(); }, null, Timeout.Infinite, Timeout.Infinite))
                    {
                        await WriteFrame(stream, new Common.Frames.Information.RequestFrame()).ConfigureAwait(false);

                        timer.Change(Common.Configs.HelloTimeout, Timeout.Infinite);
                        var informationFrame = await ReadFrame(stream).ConfigureAwait(false) as Common.Frames.Information.AnswerFrame;

                        timer.Change(Timeout.Infinite, Timeout.Infinite);

                        if (informationFrame == null)
                        {
                            throw new Exception("Invalid Hello.");
                        }

                        ClientData client = new ClientData(this, tc, informationFrame);

                        try
                        {
                            OnConnect?.Invoke(client);
                        }
                        catch { }

                        try
                        {
                            Heartbeat(stream);
                            while (true)
                            {
                                timer.Change(Common.Configs.HeartbeatInterval * 3, Timeout.Infinite);
                                FrameBase frame = await ReadFrame(stream).ConfigureAwait(false);

                                timer.Change(Timeout.Infinite, Timeout.Infinite);

                                if (Common.Configs.Debug)
                                {
                                    Console.WriteLine("R >" + frame.GetType().ToString());
                                }
                                OnFrame?.Invoke(client, frame);
                            }
                        }
                        catch { }

                        try
                        {
                            OnDisconnect?.Invoke(client);
                        }
                        catch { }
                    }
            }
            catch (Exception ex)
            {
                if (Common.Configs.Debug)
                {
                    Console.WriteLine(ex.Message);
                }
            }
        }
Exemplo n.º 13
0
        static unsafe void Main(string[] args)
        {
            Decoder decoder = new Decoder();

            decoder.Initialize();
            // decoder.Start("1.h264");
            decoder.Start("rtmp://localhost:1935/live/test");

            OnPacket onPacket = (packet) => {
                Console.WriteLine($"length: {packet->size}");
                byte[] data = new byte[packet->size];
                Marshal.Copy((IntPtr)packet->data, data, 0, packet->size);
                // Console.WriteLine("data: " + string.Concat(data.Select(b => string.Format("0x{0},", b.ToString("X2"))).ToArray()));

                byte[] NALHeader  = new byte[] { 0xAA, 0xBB, 0xCC, 0xDD };
                int    startIndex = 0;
                int    pos;
                while ((pos = data.IndexOf(NALHeader, startIndex)) >= 0)
                {
                    byte[] content = new byte[4];
                    Array.Copy(data, pos + 4, content, 0, 4);

                    int length = BitConverter.ToInt32(content, 0);
                    content = new byte[length];
                    Buffer.BlockCopy(data, pos + 4 + 4, content, 0, length);
                    string userData = System.Text.Encoding.UTF8.GetString(Decompress(content));
                    Console.WriteLine($"SEI length: {length}, {userData}");

                    startIndex = pos + 4 + 4 + length;
                }
            };

            OnFrame onFrame = (frame) => {
                Mat mat = new Mat(frame->height, frame->width, MatType.CV_8UC3, new Scalar(0, 0, 0));
                mat = mat.CvtColor(ColorConversionCodes.BGR2YUV_I420);
                IntPtr ptr = mat.Data;

                int    length1 = frame->linesize[0] * frame->height;
                byte[] data    = new byte[length1];
                Marshal.Copy((IntPtr)frame->data[0], data, 0, length1);
                for (int i = 0; i < frame->height; ++i, ptr += frame->width)
                {
                    Marshal.Copy(data, frame->linesize[0] * i, ptr, frame->width);
                }

                int length2 = frame->linesize[1] * frame->height / 2;
                data = new byte[length2];
                Marshal.Copy((IntPtr)frame->data[1], data, 0, length2);
                for (int i = 0; i < frame->height / 2; ++i, ptr += frame->width / 2)
                {
                    Marshal.Copy(data, frame->linesize[1] * i, ptr, frame->width / 2);
                }

                int length3 = frame->linesize[2] * frame->height / 2;
                data = new byte[length3];
                Marshal.Copy((IntPtr)frame->data[2], data, 0, length3);
                for (int i = 0; i < frame->height / 2; ++i, ptr += frame->width / 2)
                {
                    Marshal.Copy(data, frame->linesize[2] * i, ptr, frame->width / 2);
                }

                mat = mat.CvtColor(ColorConversionCodes.YUV2BGR_I420);
                Cv2.ImShow($"mat", mat);
                Cv2.WaitKey(0);
            };

            while (decoder.Decode(onPacket, null))
            {
            }

            decoder.Stop();
            decoder.Dispose();
        }
Exemplo n.º 14
0
 public void SetState(States state, OnFrame onTrigger)
 {
     onTriggerSingle += onTrigger;
     SetState(state);
 }
Exemplo n.º 15
0
        protected void Update()
        {
            switch (source)
            {
            case SOURCE.SolAR:
                if (camera.getNextImage(inputImage) != FrameworkReturnCode._SUCCESS)
                {
                    return;
                }
                break;

            case SOURCE.Unity:
                if (!webcam.didUpdateThisFrame)
                {
                    return;
                }
                var ptr = webcam.GetNativeTexturePtr();
                if (inputTex == null)
                {
                    int w = webcam.width;
                    int h = webcam.height;
                    inputTex = Texture2D.CreateExternalTexture(w, h, TextureFormat.RGB24, false, false, ptr);
                }
                else
                {
                    inputTex.UpdateExternalTexture(ptr);
                }
                break;
            }
            count++;

            var isTracking = pipeline.Proceed(inputImage, pose, camera) == FrameworkReturnCode._SUCCESS;

            //if(mode == PIPELINE.SLAM)
            //{

            //}



            foreach (GameObject g in GameObject.FindGameObjectsWithTag("SolARObject"))
            {
                g.transform.GetComponent <Renderer>().enabled = isTracking;
            }

            if (isTracking)
            {
                if (mogrify)
                {
                    overlay3D.draw(pose, inputImage);
                }
            }
            OnStatus?.Invoke(isTracking);

            switch (display)
            {
            case DISPLAY.SolAR:
                enabled = (imageViewer.display(inputImage) == FrameworkReturnCode._SUCCESS);
                break;

            case DISPLAY.Unity:
                inputImage.ToUnity(ref inputTex);
                OnFrame?.Invoke(inputTex);
                break;
            }
        }
Exemplo n.º 16
0
        public void Frame()
        {
            _surface.Clear(Color.Purple);

            OnFrame.Raise(this, new FrameEventArgs(_surface));
        }