예제 #1
0
        /// <summary>
        /// Respond to capture event. Frame and timings are passed to each buffer.
        /// </summary>
        public void HandleCapturedFrame(object sender, VideoDataEventArgs e)
        {
            // Run plug-ins, convert to YUV, then send YUV buffers to individual buffers for scaling.
            if (e.Frame != null)
            {
                try {
                    if (ProcessorQueue != null)                       // process the source image
                    {
                        foreach (var item in ProcessorQueue)
                        {
                            item.ProcessFrame(e.Frame, e.CaptureTime);
                        }
                    }
                } catch { }

                var ti = new TimedImage(e.CaptureTime, e.Frame.Width, e.Frame.Height);
                ResampleBuffer(e.Frame, ti);

                WaitingCaptures.Enqueue(ti);

                foreach (var buffer in this)
                {
                    if (buffer.WaitingCaptures != WaitingCaptures)
                    {
                        buffer.WaitingCaptures = WaitingCaptures;
                    }
                }
            }
        }
예제 #2
0
        public void ConsumeVideoData(object sender, VideoDataEventArgs args)
        {
            var frame = _videoAssembler.AssembleVideoFrame(args.VideoData);

            if (frame == null)
            {
                return;
            }

            _video.FeedVideoData(frame);
        }
예제 #3
0
        private void ProcessVideoData(Process process, CancellationToken cancellationToken)
        {
            BinaryReader br = new BinaryReader(process.StandardOutput.BaseStream);

            byte[] buffer;

            while ((buffer = br.ReadBytes(4096)).Length > 0)
            {
                VideoDataEventArgs args = new VideoDataEventArgs(buffer);
                OnVideoDataReceived?.Invoke(this, args);

                cancellationToken.ThrowIfCancellationRequested();
            }
        }
예제 #4
0
        public void ConsumeVideoData(object sender, VideoDataEventArgs args)
        {
            // TODO: Sorting
            var frame = _videoAssembler.AssembleVideoFrame(args.VideoData);

            if (frame == null)
            {
                return;
            }

            // Enqueue encoded video data in decoder
            if (_videoContextInitialized)
            {
                _videoHandler.PushData(frame);
            }
            else if (frame.PrimaryType == NalUnitType.SEQUENCE_PARAMETER_SET)
            {
                _videoHandler.UpdateCodecParameters(frame.GetCodecSpecificDataAvcc());
                _videoContextInitialized = true;
            }
        }
예제 #5
0
        void IVideoConsumer.ConsumeVideoData(object sender, VideoDataEventArgs args)
        {
            H264Frame frame = _videoAssembler.AssembleVideoFrame(args.VideoData);

            if (frame == null)
            {
                return;
            }

            if (_dumpSingleFrames)
            {
                string frameFilename = $"{_fileName}.video.{frame.FrameId}.{frame.TimeStamp}.raw";
                using (FileStream fs = new FileStream(frameFilename, FileMode.CreateNew))
                {
                    fs.Write(frame.RawData, 0, frame.RawData.Length);
                }
            }
            else
            {
                _videoFile.Write(frame.RawData, 0, frame.RawData.Length);
            }
        }
 public void ConsumeVideoData(object sender, VideoDataEventArgs args)
 {
     _video.ConsumeVideoData(args.VideoData);
 }
		public void HandleCapturedFrame (object sender, VideoDataEventArgs e) {
			throw new NotSupportedException("Audio buffer does not support video frames");
		}
예제 #8
0
 private void Controller_VideoDataReady(object sender, VideoDataEventArgs e)
 {
     ShowVideo(e.Bitmap);
 }
예제 #9
0
 private void service_VideoDataReady(object sender, VideoDataEventArgs e)
 {
     OnVideoDataReady(e.Bitmap);
 }