Exemplo n.º 1
0
        void mDisplayEventHandler(void *opaque, void *picture)
        {
            if (_bufferPtr == IntPtr.Zero)
            {
                return;
            }
            PixelFormat pixelFormat = PixelFormat.Format24bppRgb;

            if (framePixelBits == 3)
            {
                pixelFormat = PixelFormat.Format24bppRgb;
            }
            if (framePixelBits == 4)
            {
                pixelFormat = PixelFormat.Format32bppArgb;
            }
            tmpCopyBitmap = new Bitmap(frameWidth, frameHeight, pixelFormat);
            BitmapData bitmapData = tmpCopyBitmap.LockBits(new Rectangle(0, 0, frameWidth, frameHeight), System.Drawing.Imaging.ImageLockMode.ReadWrite, pixelFormat);

            memcpy((byte *)bitmapData.Scan0, (byte *)_bufferPtr.ToPointer(), frameWidth * frameHeight * framePixelBits);
            tmpCopyBitmap.UnlockBits(bitmapData);
            OnFrameReceived?.Invoke((Bitmap)tmpCopyBitmap.Clone());
            Marshal.FreeHGlobal(_bufferPtr);
            _bufferPtr = IntPtr.Zero;
            tmpCopyBitmap.Dispose();
            GC.Collect();
            mutex.ReleaseMutex();
        }
        public void KinectFrameReceivedEvent(object sender, EventArgs e)
        {
            KinectFrameEventArgs eventArgs = (KinectFrameEventArgs)e;

            Parallel.For(0, eventArgs.Faces.Count, i =>
            {
                if (eventArgs.Faces[i].IsTracked)
                {
                    if (this.people[i] == null || this.people[i].TrackingId != eventArgs.Faces[i].TrackingId)
                    {
                        this.people[i] = new Person(eventArgs.Faces[i].TrackingId);
                    }

                    Task.Factory.StartNew(() => this.UpdateDatabase(this.people[i], eventArgs.Faces[i], Sensor.CoordinateMapper, eventArgs.FramePixels));

                    //if ((bool)this.settings.GetSettingValue("General", "ManualFrameCollection"))
                    //{
                    //    Task.Factory.StartNew(() => ManualFrameSave(eventArgs.Faces[i], Sensor.CoordinateMapper, eventArgs.FramePixels));
                    //}

                    //CVFaceFrame faceFrame = new CVFaceFrame(eventArgs.Faces[i], this.Sensor.CoordinateMapper, eventArgs.FramePixels);
                    //this.people[i].Update(faceFrame, this.recModel);
                }
            });



            OnFrameReceived?.Invoke(this, eventArgs);
        }
Exemplo n.º 3
0
        private async Task DoWork()
        {
            while (true)
            {
                try
                {
                    using (var stream = await _client.GetStreamAsync(_uri).ConfigureAwait(false))
                    {
                        while (true)
                        {
                            int contentLength = GetContentLength(stream);

                            if (contentLength == 0)
                            {
                                break;
                            }

                            var content = GetContent(stream, contentLength);

                            if (content == null)
                            {
                                break;
                            }

                            //if (!CheckRawImage(content)) break;

                            // valid frame received

                            if (_updateUri)
                            {
                                _updateUri = false;
                                break;
                            }

                            lock (_locker)
                            {
                                _lastFrame = content;
                            }

                            OnFrameReceived?.Invoke(this, new FrameReceivedEventArgs()
                            {
                                Frame = content
                            });
                        }
                    }
                }
                catch (Exception e)
                {
                    Debug.WriteLine(e.Message);
                    Debug.WriteLine(e.StackTrace);
                }
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Called by the underlaying device when it receives a new capture bitmap,
        /// it broadcasts an <see cref="VideoCaptureFrameArgs"/> at <see cref="OnFrameReceived"/>.
        /// </summary>
        /// <param name="bmp"></param>
        protected void RaiseFrameReceived(PointerBitmap bmp)
        {
            if (_CheckDisposed())
            {
                return;
            }

            var time = DateTime.UtcNow;

            _FPS.AddFrame();

            if (_CurrentFrame.Value.CaptureDevice != this)
            {
                throw new InvalidOperationException("Owner mismatch");
            }

            _CurrentFrame.Value._CaptureTime    = time;
            _CurrentFrame.Value._CapturedBitmap = bmp;

            OnFrameReceived?.Invoke(this, _CurrentFrame.Value);
        }
Exemplo n.º 5
0
        public void ProcessReceivedFrames(IFrame frame)
        {
            switch (State)
            {
            case StreamState.Idle:
                if (frame.Type == FrameType.Headers)
                {
                    State = StreamState.Open;
                }
                else if (frame.Type == FrameType.PushPromise)
                {
                    State = StreamState.ReservedRemote;
                }
//                else if (frame.Type == FrameType.Priority)
//                    ;
//                else
//                    ;
                break;

            case StreamState.ReservedLocal:
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
                break;

            case StreamState.HalfClosedRemote:
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
                break;

            case StreamState.Open:
                if (frame.IsEndStream)
                {
                    State = StreamState.HalfClosedRemote;
                }
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
                break;

            case StreamState.ReservedRemote:
                if (frame.Type == FrameType.Headers)
                {
                    State = StreamState.HalfClosedLocal;
                }
                else if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
                break;

            case StreamState.HalfClosedLocal:
                if (frame.IsEndStream || frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
                break;
            }

            // Server has cleared up more window space
            // Add more to the available window
            if (frame.Type == FrameType.WindowUpdate)
            {
                var windowUpdateFrame = (WindowUpdateFrame)frame;
                flowControlStateManager.IncreaseWindowSize(StreamIdentifer, windowUpdateFrame.WindowSizeIncrement);
            }

            // Raise the event
            if (OnFrameReceived != null)
            {
                OnFrameReceived.Invoke(frame);
            }
        }
Exemplo n.º 6
0
        public void ProcessReceivedFrames(IFrame frame)
        {
            // Add frame to the list of history
            ReceivedFrames.Add(frame);

            if (State == StreamState.Idle)
            {
                if (frame.Type == FrameType.Headers)
                {
                    State = StreamState.Open;
                }
                else if (frame.Type == FrameType.PushPromise)
                {
                    State = StreamState.ReservedRemote;
                }
                else if (frame.Type == FrameType.Priority)
                {
                    ;
                }
                else
                {
                    ; // TODO: PROTOCOL_ERROR
                }
            }
            else if (State == StreamState.ReservedLocal)
            {
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
            }
            else if (State == StreamState.HalfClosedRemote)
            {
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
            }
            else if (State == StreamState.Open)
            {
                if (frame.IsEndStream)
                {
                    State = StreamState.HalfClosedRemote;
                }
                if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
            }
            else if (State == StreamState.ReservedRemote)
            {
                if (frame.Type == FrameType.Headers)
                {
                    State = StreamState.HalfClosedLocal;
                }
                else if (frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
            }
            else if (State == StreamState.HalfClosedLocal)
            {
                if (frame.IsEndStream || frame.Type == FrameType.RstStream)
                {
                    State = StreamState.Closed;
                }
            }

            // Server has cleared up more window space
            // Add more to the available window
            if (frame.Type == FrameType.WindowUpdate)
            {
                var windowUpdateFrame = (WindowUpdateFrame)frame;
                flowControlStateManager.IncreaseWindowSize(StreamIdentifer, windowUpdateFrame.WindowSizeIncrement);
            }

            // Raise the event
            OnFrameReceived?.Invoke(frame);
        }