public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) { if (!_isClosed) { if (OnVideoSourceEncodedSample != null) { uint fps = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND; if (fps == 0) { fps = 1; } int stride = (pixelFormat == VideoPixelFormatsEnum.Bgra) ? 4 * width : 3 * width; var i420Buffer = PixelConverter.ToI420(width, height, stride, sample, pixelFormat); byte[]? encodedBuffer = _ffmpegEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), i420Buffer, width, height, (int)fps, _forceKeyFrame); if (encodedBuffer != null) { //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}"); uint durationRtpTS = VIDEO_SAMPLING_RATE / fps; // Note the event handler can be removed while the encoding is in progress. OnVideoSourceEncodedSample?.Invoke(durationRtpTS, encodedBuffer); } if (_forceKeyFrame) { _forceKeyFrame = false; } } } }
//private void FileSourceDecdoer_OnVideoFrame(byte[] buffer, int width, int height) private void FileSourceDecoder_OnVideoFrame(ref AVFrame frame) { if (OnVideoSourceEncodedSample != null) { int frameRate = (int)_fileSourceDecoder.VideoAverageFrameRate; frameRate = (frameRate <= 0) ? DEFAULT_FRAME_RATE : frameRate; uint timestampDuration = (uint)(VIDEO_SAMPLING_RATE / frameRate); //Console.WriteLine($"framerate {frameRate}, timestamp duration {timestampDuration}."); //var frame = _videoEncoder.MakeFrame(buffer, width, height); var encodedSample = _videoEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), frame, frameRate, _forceKeyFrame); if (encodedSample != null) { // Note the event handler can be removed while the encoding is in progress. OnVideoSourceEncodedSample?.Invoke(timestampDuration, encodedSample); if (_forceKeyFrame) { _forceKeyFrame = false; } } } }
public void GotVideoFrame(IPEndPoint remoteEndPoint, uint timestamp, byte[] payload, VideoFormat format) { if ((!_isClosed) && (payload != null)) { AVCodecID codecID = FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec); var imageRawSamples = _ffmpegEncoder.DecodeFaster(codecID, payload, out var width, out var height); if (imageRawSamples == null || width == 0 || height == 0) { logger.LogWarning($"Decode of video sample failed, width {width}, height {height}."); } else { foreach (var imageRawSample in imageRawSamples) { OnVideoSinkDecodedSampleFaster?.Invoke(imageRawSample); } } } }
public void GotVideoFrame(IPEndPoint remoteEndPoint, uint timestamp, byte[] payload, VideoFormat format) { if (!_isClosed) { AVCodecID codecID = FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec); var rgbFrames = _ffmpegEncoder.Decode(codecID, payload, out var width, out var height); if (rgbFrames == null || width == 0 || height == 0) { logger.LogWarning($"Decode of video sample failed, width {width}, height {height}."); } else { foreach (var rgbFrame in rgbFrames) { OnVideoSinkDecodedSample?.Invoke(rgbFrame, (uint)width, (uint)height, (int)(width * 3), VideoPixelFormatsEnum.Rgb); } } } }
private unsafe void VideoDecoder_OnVideoFrame(ref AVFrame frame) { if ((OnVideoSourceEncodedSample != null) || (OnVideoSourceRawSampleFaster != null)) { int frameRate = (int)_videoDecoder.VideoAverageFrameRate; frameRate = (frameRate <= 0) ? Helper.DEFAULT_VIDEO_FRAME_RATE : frameRate; uint timestampDuration = (uint)(VideoFormat.DEFAULT_CLOCK_RATE / frameRate); var width = frame.width; var height = frame.height; // Manage Raw Sample if (OnVideoSourceRawSampleFaster != null) { if (_videoFrameBGR24Converter == null || _videoFrameBGR24Converter.SourceWidth != width || _videoFrameBGR24Converter.SourceHeight != height) { _videoFrameBGR24Converter = new VideoFrameConverter( width, height, (AVPixelFormat)frame.format, width, height, AVPixelFormat.AV_PIX_FMT_BGR24); logger.LogDebug($"Frame format: [{frame.format}]"); } var frameBGR24 = _videoFrameBGR24Converter.Convert(frame); if ((frameBGR24.width != 0) && (frameBGR24.height != 0)) { RawImage imageRawSample = new RawImage { Width = width, Height = height, Stride = frameBGR24.linesize[0], Sample = (IntPtr)frameBGR24.data[0], PixelFormat = VideoPixelFormatsEnum.Rgb }; OnVideoSourceRawSampleFaster?.Invoke(timestampDuration, imageRawSample); } } // Manage Encoded Sample if (OnVideoSourceEncodedSample != null) { if (_videoFrameYUV420PConverter == null || _videoFrameYUV420PConverter.SourceWidth != width || _videoFrameYUV420PConverter.SourceHeight != height) { _videoFrameYUV420PConverter = new VideoFrameConverter( width, height, (AVPixelFormat)frame.format, width, height, AVPixelFormat.AV_PIX_FMT_YUV420P); logger.LogDebug($"Frame format: [{frame.format}]"); } var frameYUV420P = _videoFrameYUV420PConverter.Convert(frame); if ((frameYUV420P.width != 0) && (frameYUV420P.height != 0)) { AVCodecID aVCodecId = FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec); byte[]? encodedSample = _videoEncoder.Encode(aVCodecId, frameYUV420P, frameRate); if (encodedSample != null) { // Note the event handler can be removed while the encoding is in progress. OnVideoSourceEncodedSample?.Invoke(timestampDuration, encodedSample); } _forceKeyFrame = false; } else { _forceKeyFrame = true; } } } }