public bool NextVideoFrame(IntPtr target, FFmpeg.PixelFormat desiredFormat, ref double time) { if (!hasVideo) { return(false); } int got_picture = 0; long pts = -1; // Allocate video frame vFrame = FFmpeg.avcodec_alloc_frame(); // Decode packets until we've got a full frame while (got_picture == 0) { // If we need a new packet, get it if (vPacket.size <= 0) { if (vPacket.data != IntPtr.Zero) { FFmpeg.av_free_packet(vPacket.priv); } lock (locker) { // If there are no more packets in the queue, read them from stream while (videoPacketQueue.Count < 1) { if (!ReadPacket()) { return(false); } } vPacket = videoPacketQueue.Dequeue(); } } // Do nothing if timing is too early if (pts == -1) { pts = vPacket.pts; if (pts * videoTimebase > time) { time = pts * videoTimebase - time; return(true); } time = 0; } // Decode packet int length = FFmpeg.avcodec_decode_video(videoStream.codec, vFrame, ref got_picture, vPacket.data, vPacket.size); // Error, skip packet if (length < 0) { vPacket.size = 0; continue; } // Move forward in packet vPacket.data = new IntPtr(vPacket.data.ToInt64() + length); vPacket.size -= length; } // Create RGB frame IntPtr rgbFrame = FFmpeg.avcodec_alloc_frame(); FFmpeg.avpicture_fill(rgbFrame, target, (int)desiredFormat, width, height); // Convert video frame to RGB FFmpeg.img_convert(rgbFrame, (int)desiredFormat, vFrame, (int)originalVideoFormat, width, height); // Free memory FFmpeg.av_free(rgbFrame); FFmpeg.av_free(vFrame); return(true); }
public MediaFile(Stream inStream) { // Create unique name string filename = "stream://" + counter; // Register stream streams.Add(filename, inStream); // Open stream with FFmpeg if (FFmpeg.av_open_input_file(out pFormatContext, filename, IntPtr.Zero, 0, IntPtr.Zero) < 0) { throw new Exception("Unable to open stream"); } // Get context FFmpeg.AVFormatContext formatContext = PtrToStructure <FFmpeg.AVFormatContext>(pFormatContext); // Get stream info if (FFmpeg.av_find_stream_info(pFormatContext) < 0) { throw new Exception("Unable to find stream info"); } // Loop through streams in this file for (int i = 0; i < formatContext.nb_streams; ++i) { FFmpeg.AVStream stream = PtrToStructure <FFmpeg.AVStream>(formatContext.streams[i]); FFmpeg.AVCodecContext codecContext = PtrToStructure <FFmpeg.AVCodecContext>(stream.codec); // Get codec IntPtr pCodec = FFmpeg.avcodec_find_decoder(codecContext.codec_id); FFmpeg.AVCodec codec = PtrToStructure <FFmpeg.AVCodec>(pCodec); if (pCodec == IntPtr.Zero) { continue; } // Check codec type bool open = false; switch (codecContext.codec_type) { case FFmpeg.CodecType.CODEC_TYPE_AUDIO: // We only need 1 audio stream if (hasAudio) { break; } // Get stream information hasAudio = true; numChannels = codecContext.channels; audioDepth = (codecContext.sample_fmt == FFmpeg.SampleFormat.SAMPLE_FMT_U8) ? 8 : 16; frequency = codecContext.sample_rate; audioStream = stream; audioTimebase = (double)stream.time_base.num / (double)stream.time_base.den; open = true; break; case FFmpeg.CodecType.CODEC_TYPE_VIDEO: // We only need 1 video stream if (hasVideo) { break; } // Set codec flags if ((codec.capabilities & FFmpeg.CODEC_CAP_TRUNCATED) != 0) { codecContext.flags = codecContext.flags | FFmpeg.CODEC_FLAG_TRUNCATED; } // Get stream information hasVideo = true; width = codecContext.width; height = codecContext.height; videoStream = stream; originalVideoFormat = codecContext.pix_fmt; videoTimebase = (double)codecContext.time_base.num / (double)codecContext.time_base.den; open = true; break; } // Update codec context Marshal.StructureToPtr(codecContext, stream.codec, false); // Open codec if (open) { if (FFmpeg.avcodec_open(stream.codec, pCodec) < 0) { throw new Exception("Unable to open codec"); } } } // No video or audio found if (!hasAudio && !hasVideo) { throw new Exception("No codecs or streams found"); } }