コード例 #1
0
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (OnVideoSourceEncodedSample != null)
                {
                    uint fps = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                    if (fps == 0)
                    {
                        fps = 1;
                    }

                    int stride     = (pixelFormat == VideoPixelFormatsEnum.Bgra) ? 4 * width : 3 * width;
                    var i420Buffer = PixelConverter.ToI420(width, height, stride, sample, pixelFormat);
                    byte[]? encodedBuffer = _ffmpegEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), i420Buffer, width, height, (int)fps, _forceKeyFrame);

                    if (encodedBuffer != null)
                    {
                        //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;

                        // Note the event handler can be removed while the encoding is in progress.
                        OnVideoSourceEncodedSample?.Invoke(durationRtpTS, encodedBuffer);
                    }

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
コード例 #2
0
        //private void FileSourceDecdoer_OnVideoFrame(byte[] buffer, int width, int height)
        private void FileSourceDecoder_OnVideoFrame(ref AVFrame frame)
        {
            if (OnVideoSourceEncodedSample != null)
            {
                int frameRate = (int)_fileSourceDecoder.VideoAverageFrameRate;
                frameRate = (frameRate <= 0) ? DEFAULT_FRAME_RATE : frameRate;
                uint timestampDuration = (uint)(VIDEO_SAMPLING_RATE / frameRate);

                //Console.WriteLine($"framerate {frameRate}, timestamp duration {timestampDuration}.");

                //var frame = _videoEncoder.MakeFrame(buffer, width, height);
                var encodedSample = _videoEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), frame, frameRate, _forceKeyFrame);

                if (encodedSample != null)
                {
                    // Note the event handler can be removed while the encoding is in progress.
                    OnVideoSourceEncodedSample?.Invoke(timestampDuration, encodedSample);

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
コード例 #3
0
        private void GenerateTestPattern(object state)
        {
            lock (_sendTestPatternTimer)
            {
                if (!_isClosed && (OnVideoSourceRawSample != null || OnVideoSourceEncodedSample != null))
                {
                    _frameCount++;

                    StampI420Buffer(_testI420Buffer, TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT, _frameCount);

                    if (OnVideoSourceRawSample != null)
                    {
                        GenerateRawSample(TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT, _testI420Buffer);
                    }

                    if (_videoEncoder != null && OnVideoSourceEncodedSample != null && !_formatManager.SelectedFormat.IsEmpty())
                    {
                        var encodedBuffer = _videoEncoder.EncodeVideo(TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT, _testI420Buffer, VideoPixelFormatsEnum.I420, _formatManager.SelectedFormat.Codec);

                        if (encodedBuffer != null)
                        {
                            uint fps           = (_frameSpacing > 0) ? 1000 / (uint)_frameSpacing : DEFAULT_FRAMES_PER_SECOND;
                            uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                            OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                        }
                    }

                    if (_frameCount == int.MaxValue)
                    {
                        _frameCount = 0;
                    }
                }
            }
        }
コード例 #4
0
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (_vp8Encoder == null)
                {
                    _vp8Encoder = new Vp8Codec();
                    _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
                }

                if (_encodeBmp == null)
                {
                    _encodeBmp = new SoftwareBitmap(BitmapPixelFormat.Rgba8, width, height);
                }

                if (OnVideoSourceEncodedSample != null)
                {
                    //byte[] i420Buffer = PixelConverter.RGBtoI420(rgb24Sample, width, height);
                    //byte[] encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);

                    SetBitmapData(sample, _encodeBmp, pixelFormat);

                    var    nv12bmp    = SoftwareBitmap.Convert(_encodeBmp, BitmapPixelFormat.Nv12);
                    byte[] nv12Buffer = null;

                    using (BitmapBuffer buffer = nv12bmp.LockBuffer(BitmapBufferAccessMode.Read))
                    {
                        using (var reference = buffer.CreateReference())
                        {
                            unsafe
                            {
                                byte *dataInBytes;
                                uint  capacity;
                                ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);

                                nv12Buffer = new byte[capacity];
                                Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity);
                            }
                        }
                    }

                    byte[] encodedBuffer = _vp8Encoder.Encode(nv12Buffer, _forceKeyFrame);

                    if (encodedBuffer != null)
                    {
                        //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
                        uint fps           = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                        OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                    }

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
コード例 #5
0
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (OnVideoSourceEncodedSample != null)
                {
                    var encodedBuffer = _vp8Codec.EncodeVideo(width, height, sample, pixelFormat, VideoCodecsEnum.VP8);

                    if (encodedBuffer != null)
                    {
                        uint fps           = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                        OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                    }
                }
            }
        }
コード例 #6
0
        private void ExternalSource_OnVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] rgb24Sample)
        {
            if (_vp8Encoder == null)
            {
                _vp8Encoder = new Vp8Codec();
                _vp8Encoder.InitialiseEncoder((uint)width, (uint)height);
            }

            if (OnVideoSourceEncodedSample != null)
            {
                byte[] encodedBuffer = null;

                if (_selectedSourceFormat == VideoCodecsEnum.VP8)
                {
                    byte[] i420Buffer = PixelConverter.RGBtoI420(rgb24Sample, width, height);
                    encodedBuffer = _vp8Encoder.Encode(i420Buffer, _forceKeyFrame);
                }
                //else if (VIDEO_CODEC == SDPMediaFormatsEnum.H264)
                //{
                //    var i420Frame = _videoFrameConverter.Convert(sampleBuffer);

                //    _presentationTimestamp += VIDEO_TIMESTAMP_SPACING;

                //    i420Frame.key_frame = _forceKeyFrame ? 1 : 0;
                //    i420Frame.pts = _presentationTimestamp;

                //    encodedBuffer = _ffmpegEncoder.Encode(i420Frame);
                //}
                else
                {
                    throw new ApplicationException($"Video codec is not supported.");
                }

                if (encodedBuffer != null)
                {
                    //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
                    OnVideoSourceEncodedSample.Invoke(_selectedSourceFormat, VIDEO_TIMESTAMP_SPACING, encodedBuffer);
                }

                if (_forceKeyFrame)
                {
                    _forceKeyFrame = false;
                }
            }
        }
コード例 #7
0
        private void GenerateTestPattern(object state)
        {
            lock (_sendTestPatternTimer)
            {
                if (!_isClosed && (OnVideoSourceRawSample != null || OnVideoSourceEncodedSample != null))
                {
                    _frameCount++;

                    //var stampedTestPattern = _testPattern.Clone() as System.Drawing.Image;
                    //AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                    //// This event handler could get removed while the timestamp text is being added.
                    //OnVideoSourceRawSample?.Invoke((uint)_frameSpacing, _testPattern.Width, _testPattern.Height, BitmapToBGR24(stampedTestPattern as Bitmap), VideoPixelFormatsEnum.Bgr);
                    //stampedTestPattern?.Dispose();
                    //OnVideoSourceRawSample?.Invoke((uint)_frameSpacing, _testPatternWidth, _testPatternHeight, _testPatternI420, VideoPixelFormatsEnum.I420);
                    StampI420Buffer(_testI420Buffer, TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT, _frameCount);

                    OnVideoSourceRawSample?.Invoke((uint)_frameSpacing, TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT,
                                                   _testI420Buffer, VideoPixelFormatsEnum.I420);

                    if (_videoEncoder != null && OnVideoSourceEncodedSample != null)
                    {
                        var encodedBuffer = _videoEncoder.EncodeVideo(TEST_PATTERN_WIDTH, TEST_PATTERN_HEIGHT,
                                                                      _testI420Buffer, VideoPixelFormatsEnum.I420, VideoCodecsEnum.VP8);

                        if (encodedBuffer != null)
                        {
                            uint fps           = (_frameSpacing > 0) ? 1000 / (uint)_frameSpacing : DEFAULT_FRAMES_PER_SECOND;
                            uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                            OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                        }
                    }

                    if (_frameCount == int.MaxValue)
                    {
                        _frameCount = 0;
                    }
                }
            }
        }
コード例 #8
0
        /// <summary>
        /// Event handler for video frames for the local video capture device.
        /// </summary>
        private async void FrameArrivedHandler(MediaFrameReader sender, MediaFrameArrivedEventArgs e)
        {
            if (!_isClosed)
            {
                using (var frame = sender.TryAcquireLatestFrame())
                {
                    if (_isClosed || frame == null)
                    {
                        return;
                    }

                    var vmf        = frame.VideoMediaFrame;
                    var videoFrame = vmf.GetVideoFrame();

                    var sbmp = await SoftwareBitmap.CreateCopyFromSurfaceAsync(videoFrame.Direct3DSurface);

                    if (sbmp == null)
                    {
                        logger.LogWarning("Failed to get bitmap from video frame reader.");
                    }
                    else
                    {
                        if (!_isClosed && OnVideoSourceEncodedSample != null)
                        {
                            lock (_vp8Encoder)
                            {
                                SoftwareBitmap nv12bmp = null;

                                // If the bitmap is not in the required pixel format for the encoder convert it.
                                if (_mediaFrameSource.CurrentFormat.Subtype != VIDEO_DESIRED_PIXEL_FORMAT)
                                {
                                    nv12bmp = SoftwareBitmap.Convert(sbmp, BitmapPixelFormat.Nv12);
                                }

                                byte[]         nv12Buffer = null;
                                SoftwareBitmap inputBmp   = nv12bmp ?? sbmp;

                                using (BitmapBuffer buffer = inputBmp.LockBuffer(BitmapBufferAccessMode.Read))
                                {
                                    using (var reference = buffer.CreateReference())
                                    {
                                        unsafe
                                        {
                                            byte *dataInBytes;
                                            uint  capacity;
                                            ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);

                                            nv12Buffer = new byte[capacity];
                                            Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity);
                                        }
                                    }
                                }

                                byte[] encodedBuffer = null;

                                encodedBuffer = _vp8Encoder.Encode(nv12Buffer, _forceKeyFrame);

                                if (encodedBuffer != null)
                                {
                                    uint fps           = (_fpsDenominator > 0 && _fpsNumerator > 0) ? _fpsNumerator / _fpsDenominator : DEFAULT_FRAMES_PER_SECOND;
                                    uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                                    OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                                }

                                if (_forceKeyFrame)
                                {
                                    _forceKeyFrame = false;
                                }

                                nv12bmp?.Dispose();
                            }
                        }

                        sbmp.Dispose();
                        videoFrame.Dispose();
                    }
                }
            }
        }
コード例 #9
0
        /// <summary>
        /// Event handler for video frames for the local video capture device.
        /// </summary>
        private async void FrameArrivedHandler(MediaFrameReader sender, MediaFrameArrivedEventArgs e)
        {
            if (!_isClosed)
            {
                if (!_videoFormatManager.SelectedFormat.IsEmpty() && (OnVideoSourceEncodedSample != null || OnVideoSourceRawSample != null))
                {
                    using (var mediaFrameReference = sender.TryAcquireLatestFrame())
                    {
                        var videoMediaFrame = mediaFrameReference?.VideoMediaFrame;
                        var softwareBitmap  = videoMediaFrame?.SoftwareBitmap;

                        if (softwareBitmap == null && videoMediaFrame != null)
                        {
                            var videoFrame = videoMediaFrame.GetVideoFrame();
                            softwareBitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(videoFrame.Direct3DSurface);
                        }

                        if (softwareBitmap != null)
                        {
                            int width  = softwareBitmap.PixelWidth;
                            int height = softwareBitmap.PixelHeight;

                            if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Nv12)
                            {
                                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Nv12, BitmapAlphaMode.Ignore);
                            }

                            // Swap the processed frame to _backBuffer and dispose of the unused image.
                            softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap);

                            using (BitmapBuffer buffer = _backBuffer.LockBuffer(BitmapBufferAccessMode.Read))
                            {
                                using (var reference = buffer.CreateReference())
                                {
                                    unsafe
                                    {
                                        byte *dataInBytes;
                                        uint  capacity;
                                        reference.As <IMemoryBufferByteAccess>().GetBuffer(out dataInBytes, out capacity);
                                        byte[] nv12Buffer = new byte[capacity];
                                        Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity);

                                        if (OnVideoSourceEncodedSample != null)
                                        {
                                            lock (_videoEncoder)
                                            {
                                                var encodedBuffer = _videoEncoder.EncodeVideo(width, height, nv12Buffer, EncoderInputFormat, _videoFormatManager.SelectedFormat.Codec);

                                                if (encodedBuffer != null)
                                                {
                                                    uint fps           = (_fpsDenominator > 0 && _fpsNumerator > 0) ? _fpsNumerator / _fpsDenominator : DEFAULT_FRAMES_PER_SECOND;
                                                    uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
                                                    OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
                                                }

                                                if (_forceKeyFrame)
                                                {
                                                    _forceKeyFrame = false;
                                                }
                                            }
                                        }

                                        if (OnVideoSourceRawSample != null)
                                        {
                                            uint frameSpacing = 0;
                                            if (_lastFrameAt != DateTime.MinValue)
                                            {
                                                frameSpacing = Convert.ToUInt32(DateTime.Now.Subtract(_lastFrameAt).TotalMilliseconds);
                                            }

                                            var bgrBuffer = PixelConverter.NV12toBGR(nv12Buffer, width, height, width * 3);

                                            OnVideoSourceRawSample(frameSpacing, width, height, bgrBuffer, VideoPixelFormatsEnum.Bgr);
                                        }
                                    }
                                }
                            }

                            _backBuffer?.Dispose();
                            softwareBitmap?.Dispose();
                        }

                        _lastFrameAt = DateTime.Now;
                    }
                }
            }
        }
コード例 #10
0
        private unsafe void VideoDecoder_OnVideoFrame(ref AVFrame frame)
        {
            if ((OnVideoSourceEncodedSample != null) || (OnVideoSourceRawSampleFaster != null))
            {
                int frameRate = (int)_videoDecoder.VideoAverageFrameRate;
                frameRate = (frameRate <= 0) ? Helper.DEFAULT_VIDEO_FRAME_RATE : frameRate;
                uint timestampDuration = (uint)(VideoFormat.DEFAULT_CLOCK_RATE / frameRate);

                var width  = frame.width;
                var height = frame.height;

                // Manage Raw Sample
                if (OnVideoSourceRawSampleFaster != null)
                {
                    if (_videoFrameBGR24Converter == null ||
                        _videoFrameBGR24Converter.SourceWidth != width ||
                        _videoFrameBGR24Converter.SourceHeight != height)
                    {
                        _videoFrameBGR24Converter = new VideoFrameConverter(
                            width, height,
                            (AVPixelFormat)frame.format,
                            width, height,
                            AVPixelFormat.AV_PIX_FMT_BGR24);
                        logger.LogDebug($"Frame format: [{frame.format}]");
                    }

                    var frameBGR24 = _videoFrameBGR24Converter.Convert(frame);
                    if ((frameBGR24.width != 0) && (frameBGR24.height != 0))
                    {
                        RawImage imageRawSample = new RawImage
                        {
                            Width       = width,
                            Height      = height,
                            Stride      = frameBGR24.linesize[0],
                            Sample      = (IntPtr)frameBGR24.data[0],
                            PixelFormat = VideoPixelFormatsEnum.Rgb
                        };
                        OnVideoSourceRawSampleFaster?.Invoke(timestampDuration, imageRawSample);
                    }
                }

                // Manage Encoded Sample
                if (OnVideoSourceEncodedSample != null)
                {
                    if (_videoFrameYUV420PConverter == null ||
                        _videoFrameYUV420PConverter.SourceWidth != width ||
                        _videoFrameYUV420PConverter.SourceHeight != height)
                    {
                        _videoFrameYUV420PConverter = new VideoFrameConverter(
                            width, height,
                            (AVPixelFormat)frame.format,
                            width, height,
                            AVPixelFormat.AV_PIX_FMT_YUV420P);
                        logger.LogDebug($"Frame format: [{frame.format}]");
                    }

                    var frameYUV420P = _videoFrameYUV420PConverter.Convert(frame);
                    if ((frameYUV420P.width != 0) && (frameYUV420P.height != 0))
                    {
                        AVCodecID aVCodecId = FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec);

                        byte[]? encodedSample = _videoEncoder.Encode(aVCodecId, frameYUV420P, frameRate);

                        if (encodedSample != null)
                        {
                            // Note the event handler can be removed while the encoding is in progress.
                            OnVideoSourceEncodedSample?.Invoke(timestampDuration, encodedSample);
                        }
                        _forceKeyFrame = false;
                    }
                    else
                    {
                        _forceKeyFrame = true;
                    }
                }
            }
        }
コード例 #11
0
 private void _FFmpegVideoSource_OnVideoSourceEncodedSample(uint durationRtpUnits, byte[] sample)
 {
     OnVideoSourceEncodedSample?.Invoke(durationRtpUnits, sample);
 }