private void decodeNextFrame(AVPacket *packet, AVFrame *receiveFrame) { // read data from input into AVPacket. // only read if the packet is empty, otherwise we would overwrite what's already there which can lead to visual glitches. int readFrameResult = 0; if (packet->buf == null) { readFrameResult = ffmpeg.av_read_frame(formatContext, packet); } if (readFrameResult >= 0) { State = DecoderState.Running; bool unrefPacket = true; if (packet->stream_index == stream->index) { int sendPacketResult = sendPacket(receiveFrame, packet); // keep the packet data for next frame if we didn't send it successfully. if (sendPacketResult == -AGffmpeg.EAGAIN) { unrefPacket = false; } } if (unrefPacket) { ffmpeg.av_packet_unref(packet); } } else if (readFrameResult == AGffmpeg.AVERROR_EOF) { // Flush decoder. sendPacket(receiveFrame, null); if (Looping) { Seek(0); } else { // This marks the video stream as no longer relevant (until a future potential Seek operation). State = DecoderState.EndOfStream; } } else if (readFrameResult == -AGffmpeg.EAGAIN) { State = DecoderState.Ready; Thread.Sleep(1); } else { Logger.Log($"Failed to read data into avcodec packet: {getErrorMessage(readFrameResult)}"); Thread.Sleep(1); } }
public AudioStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { _pFormatContext = ffmpeg.avformat_alloc_context(); _receivedFrame = ffmpeg.av_frame_alloc(); var pFormatContext = _pFormatContext; ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); AVCodec *videoCodec = null; _streamVideoIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &videoCodec, 0).ThrowExceptionIfError(); _pVideoCodecContext = ffmpeg.avcodec_alloc_context3(videoCodec); if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { ffmpeg.av_hwdevice_ctx_create(&_pVideoCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError(); } ffmpeg.avcodec_parameters_to_context(_pVideoCodecContext, _pFormatContext->streams[_streamVideoIndex]->codecpar).ThrowExceptionIfError(); if (_pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den != 0) { Fps = _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.num / _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den; Console.WriteLine("计算得到FPS"); } else { Console.WriteLine("默认FPS"); Fps = 25; } ffmpeg.avcodec_open2(_pVideoCodecContext, videoCodec, null).ThrowExceptionIfError(); CodecName = ffmpeg.avcodec_get_name(videoCodec->id); FrameSize = new Size(_pVideoCodecContext->width, _pVideoCodecContext->height); PixelFormat = _pVideoCodecContext->pix_fmt; _pPacket = ffmpeg.av_packet_alloc(); _pFrame = ffmpeg.av_frame_alloc(); AVCodec *audioCodec = null; _streamAudioIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audioCodec, 0).ThrowExceptionIfError(); _pAudioCodecContext = ffmpeg.avcodec_alloc_context3(audioCodec); ffmpeg.avcodec_parameters_to_context(_pAudioCodecContext, _pFormatContext->streams[_streamAudioIndex]->codecpar).ThrowExceptionIfError(); ffmpeg.avcodec_open2(_pAudioCodecContext, audioCodec, null).ThrowExceptionIfError(); if (_streamAudioIndex > 0) { AVStream *avs = _pFormatContext->streams[_streamAudioIndex]; Console.WriteLine($"codec_id:{avs->codecpar->codec_id}"); Console.WriteLine($"format:{avs->codecpar->format}"); Console.WriteLine($"sample_rate:{avs->codecpar->sample_rate}"); Console.WriteLine($"channels:{avs->codecpar->channels}"); Console.WriteLine($"frame_size:{avs->codecpar->frame_size}"); in_sample_fmt = _pAudioCodecContext->sample_fmt; in_sample_rate = _pAudioCodecContext->sample_rate; //输入的采样率 in_ch_layout = _pAudioCodecContext->channel_layout; //输入的声道布局 in_channels = _pAudioCodecContext->channels; in_start_time = avs->start_time; } }
/// <summary> /// Determines whether the specified packet is a Null Packet (data = null, size = 0) /// These null packets are used to read multiple frames from a single packet. /// </summary> /// <param name="packet">The packet.</param> /// <returns> /// <c>true</c> if [is empty packet] [the specified packet]; otherwise, <c>false</c>. /// </returns> protected static bool IsEmptyPacket(AVPacket *packet) { if (packet == null) { return(false); } return(packet->data == null && packet->size == 0); }
public unsafe FfccVariableGroup() { Format = ffmpeg.avformat_alloc_context(); Packet = ffmpeg.av_packet_alloc(); Frame = ffmpeg.av_frame_alloc(); CodecContext = null; StreamIndex = -1; }
internal MediaPacket(AVPacket *packetPtrIn, MediaStream streamIn) { PacketPtr = packetPtrIn; Stream = streamIn; StartTime = Stream.TimeBase.ToTimeSpan(packetPtrIn->pts); EndTime = Stream.TimeBase.ToTimeSpan(packetPtrIn->pts + packetPtrIn->duration); KeyFrame = (packetPtrIn->flags & ffmpeg.AV_PKT_FLAG_KEY) == ffmpeg.AV_PKT_FLAG_KEY; }
/// <summary> /// Pushes a packet into the decoding Packet Queue /// and processes the packet in order to try to decode /// 1 or more frames. The packet has to be within the range of /// the start time and end time of /// </summary> /// <param name="packet">The packet.</param> internal void SendPacket(AVPacket *packet) { if (packet == null) { return; } Packets.Push(packet); }
public Initialised(AVFormatContext *inputContext, int streamIndex, AVCodecContext *decoderContext, AVPacket *packet, AVFrame *spareFrame) { this.inputContext = inputContext; this.streamIndex = streamIndex; this.decoderContext = decoderContext; this.packet = packet; this.spareFrame = spareFrame; }
public FFmpegBase() { Initialized = false; ContextCreated = false; pDecodedFrame = ffmpeg.av_frame_alloc(); pPacket = ffmpeg.av_packet_alloc(); }
public unsafe FFMPEGDecoder(Stream stream) { _state = PlayState.Stopped; //allocate a new format context _fmtCtx = ffmpeg.avformat_alloc_context(); //create a custom avstream to read from C#'s Stream _avStream = new AvStream(stream); _avStream.Attach(_fmtCtx); fixed(AVFormatContext **ctxPtr = &_fmtCtx) { if (ffmpeg.avformat_open_input(ctxPtr, "", null, null) != 0) { throw new InvalidDataException("Cannot open input"); } } if (ffmpeg.avformat_find_stream_info(_fmtCtx, null) < 0) { throw new InvalidDataException("Cannot find stream info"); } double ms = _fmtCtx->duration / (double)ffmpeg.AV_TIME_BASE; _duration = TimeSpan.FromSeconds(ms); _bitRate = _fmtCtx->bit_rate; _targetStreamIndex = 0; _audioFormat.SampleRate = _DESIRED_SAMPLE_RATE; _audioFormat.Channels = _DESIRED_CHANNELS; _audioFormat.BitsPerSample = 16; _numSamples = (int)(_duration.TotalSeconds * _DESIRED_SAMPLE_RATE * _DESIRED_CHANNELS); //Iterate over all streams to get the overall number for (int i = 0; i < _fmtCtx->nb_streams; i++) { var avStream = _fmtCtx->streams[i]; switch (avStream->codec->codec_type) { case AVMediaType.AVMEDIA_TYPE_VIDEO: break; case AVMediaType.AVMEDIA_TYPE_AUDIO: _targetStreamIndex = i; _stream = new AudioContext(avStream, this, _audioFormat); break; default: throw new NotSupportedException("Invalid stream type, which is not suppurted!"); } } _avPacket = ffmpeg.av_packet_alloc(); }
public StreamDecoder(string url, DecoderConfiguration configuration) { FFmpegBinariesHelper.RegisterFFmpegBinaries(); //Should not be here this.decoderConfiguration = configuration; _pFormatContext = ffmpeg.avformat_alloc_context(); var pFormatContext = _pFormatContext; ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); // find the first video stream AVStream *pStream = null; for (var i = 0; i < _pFormatContext->nb_streams; i++) { if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = _pFormatContext->streams[i]; break; } } if (pStream == null) { throw new InvalidOperationException("Could not found video stream."); } _streamIndex = pStream->index; _pCodecContext = pStream->codec; var codecId = _pCodecContext->codec_id; var pCodec = ffmpeg.avcodec_find_decoder(codecId); if (pCodec == null) { throw new InvalidOperationException("Unsupported codec."); } ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError(); decoderConfiguration.codec = FormatHelper.FFmpegToOT2(codecId); decoderConfiguration.inputResolution = new Resolution(_pCodecContext->height, _pCodecContext->width); //CodecName = ffmpeg.avcodec_get_name(codecId); //PixelFormat = _pCodecContext->pix_fmt; decoderConfiguration.inputPixelFormat = FormatHelper.FFmpegToOT2(_pCodecContext->pix_fmt); _pPacket = ffmpeg.av_packet_alloc(); _pFrame = ffmpeg.av_frame_alloc(); decodingThread = new Thread(() => { while (DecodeFrame() == 0) { ; } }); }
public VideoStreamDecoder(string url, VideoInputType inputType, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { try { ffmpeg.avdevice_register_all(); iFormatContext = ffmpeg.avformat_alloc_context(); receivedFrame = ffmpeg.av_frame_alloc(); var _iFormatContext = iFormatContext; AVDictionary *avDict; ffmpeg.av_dict_set(&avDict, "reorder_queue_size", "1", 0); switch (inputType) { case VideoInputType.CAM_DEVICE: AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow"); ffmpeg.avformat_open_input(&_iFormatContext, url, iformat, null).ThrowExceptionIfError(); break; case VideoInputType.RTP_RTSP: ffmpeg.avformat_open_input(&_iFormatContext, url, null, &avDict).ThrowExceptionIfError(); break; default: break; } ffmpeg.avformat_find_stream_info(iFormatContext, null).ThrowExceptionIfError(); AVCodec *codec; dec_stream_index = ffmpeg.av_find_best_stream(iFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0).ThrowExceptionIfError(); iCodecContext = ffmpeg.avcodec_alloc_context3(codec); if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { ffmpeg.av_hwdevice_ctx_create(&iCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError(); } ffmpeg.avcodec_parameters_to_context(iCodecContext, iFormatContext->streams[dec_stream_index]->codecpar).ThrowExceptionIfError(); ffmpeg.avcodec_open2(iCodecContext, codec, null).ThrowExceptionIfError(); CodecName = ffmpeg.avcodec_get_name(codec->id); FrameSize = new Size(iCodecContext->width, iCodecContext->height); PixelFormat = iCodecContext->pix_fmt; rawPacket = ffmpeg.av_packet_alloc(); decodedFrame = ffmpeg.av_frame_alloc(); } catch (AccessViolationException ex) { throw new AccessViolationException("Access Violation Exception", ex); } }
private Packet(AVPacket *pointer) { if (pointer == null) { throw new ArgumentNullException(nameof(pointer)); } this.pointer = pointer; }
/// <summary> /// Pushes the specified packet into the queue. /// In other words, enqueues the packet. /// </summary> /// <param name="packet">The packet.</param> public void Push(AVPacket *packet) { lock (SyncRoot) { PacketPointers.Add((IntPtr)packet); BufferLength += packet->size; Duration += packet->duration; } }
public void Dispose() { if (Pointer != null) { var packet = Pointer; ffmpeg.av_packet_unref(packet); Pointer = null; } }
public void WriteInterleavedFramePacket(AVPacket *packet) { int ret; if ((ret = ffmpeg.av_interleaved_write_frame(fmtContext, packet)) < 0) { throw new FFmpegException(ret, "Failed to write encoded packet to output."); } }
public FFmpegContext() { _codec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264); _context = ffmpeg.avcodec_alloc_context3(_codec); ffmpeg.avcodec_open2(_context, _codec, null); _packet = ffmpeg.av_packet_alloc(); }
/// <summary> /// Pushes a packet into the decoding Packet Queue /// and processes the packet in order to try to decode /// 1 or more frames. The packet has to be within the range of /// the start time and end time of /// </summary> /// <param name="packet">The packet.</param> internal void SendPacket(AVPacket *packet) { if (packet == null) { return; } Packets.Push(packet); ReceivedPacketCount += 1; }
public unsafe void AddPacket(AVPacket *avp, long customPtsValue) { avp->dts = customPtsValue; avp->pts = customPtsValue; lastFramePts = customPtsValue; stream.PushAsIs(avp); }
public VideoStreamDecoder(string device) { _pFormatContext = ffmpeg.avformat_alloc_context(); var pFormatContext = _pFormatContext; //ffmpeg.av_register_all(); ffmpeg.avdevice_register_all(); //webcam AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow"); ffmpeg.avformat_open_input(&pFormatContext, device, iformat, null).ThrowExceptionIfError(); //미디어 파일 열기 url주소 또는 파일 이름 필요 //ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ////미디어 정보 가져옴, blocking 함수라서 network protocol으로 가져올 시, 블락될수도 있슴 ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); // find the first video stream AVStream *pStream = null; for (var i = 0; i < _pFormatContext->nb_streams; i++) { if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = _pFormatContext->streams[i]; break; } } if (pStream == null) { throw new InvalidOperationException("Could not found video stream."); } _streamIndex = pStream->index; _pCodecContext = pStream->codec; var codecId = _pCodecContext->codec_id; var pCodec = ffmpeg.avcodec_find_decoder(codecId); //H264 if (pCodec == null) { throw new InvalidOperationException("Unsupported codec."); } //open codec ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError(); CodecName = ffmpeg.avcodec_get_name(codecId); FrameSize = new System.Windows.Size(_pCodecContext->width, _pCodecContext->height); // 640 480 PixelFormat = _pCodecContext->pix_fmt; _pPacket = ffmpeg.av_packet_alloc(); _pFrame = ffmpeg.av_frame_alloc(); }
private void WritePacket(AVPacket *packet) { /* prepare packet for muxing */ packet->stream_index = _stream->index; ffmpeg.av_packet_rescale_ts(packet, _pCodecContext->time_base, _stream->time_base); ffmpeg.av_interleaved_write_frame(_outputContext, packet).ThrowExceptionIfError(); }
public VideoDecoder(DecoderConfiguration configuration) { FFmpegBinariesHelper.RegisterFFmpegBinaries(); //outputResolution = resolution; Console.WriteLine("Current directory: " + Environment.CurrentDirectory); Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32"); Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}"); //FFMPEG initialization _pFormatContext = ffmpeg.avformat_alloc_context(); var pFormatContext = _pFormatContext; //ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); // find the first video stream //AVStream* pStream = null; //for (var i = 0; i < _pFormatContext->nb_streams; i++) // if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) // { // pStream = _pFormatContext->streams[i]; // break; //} //if (pStream == null) throw new InvalidOperationException("Could not found video stream."); // GET DECODER FOR STREAM //_streamIndex = pStream->index; //_pCodecContext = pStream->codec; //var codecId = _pCodecContext->codec_id; var codecId = FormatHelper.OT2ToFFmpeg(configuration.codec); var pCodec = ffmpeg.avcodec_find_decoder(codecId); _pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec); if (pCodec == null) { throw new InvalidOperationException("Unsupported codec."); } ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError(); var codecName = ffmpeg.avcodec_get_name(codecId); var pixelFormat = _pCodecContext->pix_fmt; // ALLOC FRAME AND PACKET packet = ffmpeg.av_packet_alloc(); frame = ffmpeg.av_frame_alloc(); decodingThread = new Thread(DecodeFrames); }
public void Dispose() { AVFrame *frame = this.frame; av_frame_free(&frame); AVPacket *packet = this.packet; av_packet_free(&packet); }
internal Packet (IntPtr avPacket, MediaStream stream) { if (stream == null) throw new ArgumentNullException ("stream"); if (avPacket == IntPtr.Zero) throw new ArgumentException ("Null pointer", "avPacket"); this.avPacket = avPacket; this.stream = stream; packet = (AVPacket*)avPacket.ToPointer(); }
public FFmpegContext(AVCodecID codecId) { _codec = ffmpeg.avcodec_find_decoder(codecId); _context = ffmpeg.avcodec_alloc_context3(_codec); ffmpeg.avcodec_open2(_context, _codec, null); _packet = ffmpeg.av_packet_alloc(); _decodeFrame = Marshal.GetDelegateForFunctionPointer <AVCodec_decode>(_codec->decode.Pointer); }
public FFmpegContext(AVCodecID codecId) { _codec = FFmpegApi.avcodec_find_decoder(codecId); if (_codec == null) { Logger.Error?.PrintMsg(LogClass.FFmpeg, $"Codec wasn't found. Make sure you have the {codecId} codec present in your FFmpeg installation."); return; } _context = FFmpegApi.avcodec_alloc_context3(_codec); if (_context == null) { Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec context couldn't be allocated."); return; } if (FFmpegApi.avcodec_open2(_context, _codec, null) != 0) { Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec couldn't be opened."); return; } _packet = FFmpegApi.av_packet_alloc(); if (_packet == null) { Logger.Error?.PrintMsg(LogClass.FFmpeg, "Packet couldn't be allocated."); return; } int avCodecRawVersion = FFmpegApi.avcodec_version(); int avCodecMajorVersion = avCodecRawVersion >> 16; int avCodecMinorVersion = (avCodecRawVersion >> 8) & 0xFF; // libavcodec 59.24 changed AvCodec to move its private API and also move the codec function to an union. if (avCodecMajorVersion > 59 || (avCodecMajorVersion == 59 && avCodecMinorVersion > 24)) { _decodeFrame = Marshal.GetDelegateForFunctionPointer <FFCodec.AVCodec_decode>(((FFCodec *)_codec)->CodecCallback); } // libavcodec 59.x changed AvCodec private API layout. else if (avCodecMajorVersion == 59) { _decodeFrame = Marshal.GetDelegateForFunctionPointer <FFCodec.AVCodec_decode>(((FFCodecLegacy <AVCodec> *)_codec)->Decode); } // libavcodec 58.x and lower else { _decodeFrame = Marshal.GetDelegateForFunctionPointer <FFCodec.AVCodec_decode>(((FFCodecLegacy <AVCodecLegacy> *)_codec)->Decode); } }
/// <summary> /// Pushes a packet into the decoding Packet Queue /// and processes the packet in order to try to decode /// 1 or more frames. The packet has to be within the range of /// the start time and end time of /// </summary> /// <param name="packet">The packet.</param> internal void SendPacket(AVPacket *packet) { // TODO: check if packet is in play range // ffplay.c reference: pkt_in_play_range if (packet == null) { return; } Packets.Push(packet); ReceivedPacketCount += 1; }
/// <summary> /// Pushes a packet into the decoding Packet Queue /// and processes the packet in order to try to decode /// 1 or more frames. The packet has to be within the range of /// the start time and end time of /// </summary> /// <param name="packet">The packet.</param> public void SendPacket(AVPacket *packet) { if (packet == null) { return; } Packets.Push(packet); if (packet->size > 0) { TotalBytesRead += (ulong)packet->size; } }
public VideoStreamDecoder(string url) { _pFormatContext = ffmpeg.avformat_alloc_context(); var pFormatContext = _pFormatContext; ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); // find the first video stream AVStream *pStream = null; for (var i = 0; i < _pFormatContext->nb_streams; i++) { if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { if ((_pFormatContext->streams[i]->codec->width != 0) && (_pFormatContext->streams[i]->codec->height != 0)) { pStream = _pFormatContext->streams[i]; break; } } } if (pStream == null) { throw new InvalidOperationException("Could not found video stream."); } _streamIndex = pStream->index; _pCodecContext = pStream->codec; var codecId = _pCodecContext->codec_id; var pCodec = ffmpeg.avcodec_find_decoder(codecId); if (pCodec == null) { throw new InvalidOperationException("Unsupported codec."); } Frame_Number = pStream->nb_frames; Frame_fps = (float)pStream->r_frame_rate.num / pStream->r_frame_rate.den; ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError(); CodecName = ffmpeg.avcodec_get_name(codecId); FrameSize = new Size(_pCodecContext->width, _pCodecContext->height); PixelFormat = _pCodecContext->pix_fmt; _pPacket = ffmpeg.av_packet_alloc(); _pFrame = ffmpeg.av_frame_alloc(); }
public static int enc_func(int stream_index, AVCodecContext *ctx, AVPacket *pct, AVFrame *format, int *frame) { bool isVideo = ifmt_ctx->streams[stream_index]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO; if (isVideo) { return(ffmpeg.avcodec_encode_video2(ctx, pct, format, frame)); } else { return(ffmpeg.avcodec_encode_audio2(ctx, pct, format, frame)); } }
public VideoStreamDecoder(string url) { this.pFormatContext = ffmpeg.avformat_alloc_context(); var pFormatContext = this.pFormatContext; pFormatContext->audio_codec = null; pFormatContext->video_codec_id = AVCodecID.AV_CODEC_ID_H264; pFormatContext->duration = 5 * ffmpeg.AV_TIME_BASE; // 5 sec ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); ffmpeg.avformat_find_stream_info(this.pFormatContext, null).ThrowExceptionIfError(); // find the first video stream AVStream *pStream = null; for (var i = 0; i < this.pFormatContext->nb_streams; i++) { if (this.pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { pStream = this.pFormatContext->streams[i]; break; } } if (pStream == null) { throw new InvalidOperationException("Could not find video stream."); } streamIndex = pStream->index; pCodecContext = pStream->codec; var codecId = pCodecContext->codec_id; var pCodec = ffmpeg.avcodec_find_decoder(codecId); if (pCodec == null) { throw new InvalidOperationException("Unsupported codec."); } ffmpeg.avcodec_open2(pCodecContext, pCodec, null).ThrowExceptionIfError(); CodecName = ffmpeg.avcodec_get_name(codecId); FrameSize = new Size(pCodecContext->width, pCodecContext->height); PixelFormat = pCodecContext->pix_fmt; pPacket = ffmpeg.av_packet_alloc(); pFrame = ffmpeg.av_frame_alloc(); }
/// <summary> /// Pushes a packet into the decoding Packet Queue /// and processes the packet in order to try to decode /// 1 or more frames. /// </summary> /// <param name="packet">The packet.</param> public void SendPacket(AVPacket *packet) { if (packet == null) { return; } if (packet->size > 0) { LifetimeBytesRead += (ulong)packet->size; } Packets.Push(packet); }