コード例 #1
0
        bool ReadPacket()
        {
            // Allocate memory for packet
            IntPtr pPacket = Allocate <FFmpeg.AVPacket>();

            // Read next frame into packet
            if (FFmpeg.av_read_frame(pFormatContext, pPacket) < 0)
            {
                return(false);
            }

            // Get packet from pointer
            FFmpeg.AVPacket packet = PtrToStructure <FFmpeg.AVPacket>(pPacket);
            packet.priv = pPacket;

            // If packet belongs to our video or audio stream, enqueue it
            if (hasVideo && packet.stream_index == videoStream.index)
            {
                videoPacketQueue.Enqueue(packet);
            }
            if (hasAudio && packet.stream_index == audioStream.index)
            {
                audioPacketQueue.Enqueue(packet);
            }

            return(true);
        }
コード例 #2
0
ファイル: FFmpegDecoder.cs プロジェクト: zurgeg/riivolution
        public int Read(int count)
        {
            int offset = ReadCache(count, 0);

            while (count - offset > 0)
            {
                if (FFmpeg.av_read_frame(FormatPointer, PacketPointer) < 0)
                {
                    break;
                }
                FFmpeg.AVPacket packet = (FFmpeg.AVPacket)Marshal.PtrToStructure(PacketPointer, typeof(FFmpeg.AVPacket));

                Cache.Position = 0;
                CacheOffset    = 0;
                CacheLength    = 0;

                while (packet.size > 0)
                {
                    int datasize = FFmpegBufferSize;
                    int used     = FFmpeg.avcodec_decode_audio2(AVStream.codec, FFmpegBuffer, ref datasize, packet.data, packet.size);
                    packet.size -= used;
                    packet.data  = new IntPtr(packet.data.ToInt32() + used);

                    if (datasize <= 0)
                    {
                        break;
                    }

                    int read = Math.Max(Math.Min(datasize, (count - offset) * 2 * Channels), 0);
                    int left = datasize - read;
                    if (read > 0)
                    {
                        int     samples   = read / 2 / Channels;
                        short[] bitstream = new short[read / 2];
                        Marshal.Copy(FFmpegBuffer, bitstream, 0, read / 2);
                        AudioBuffer.DeinterlaceFrom(bitstream, samples, offset);
                        offset += samples;
                    }

                    if (left > 0)
                    {
                        Marshal.Copy(new IntPtr(FFmpegBuffer.ToInt32() + read), CacheBuffer, 0, left);
                        Cache.Write(CacheBuffer, read, left);
                        CacheLength += left;
                    }
                }
            }

            return(offset);
        }
コード例 #3
0
ファイル: MediaFile.cs プロジェクト: rvs76/Maranate
        private PendingFrame ReadVideoFrame()
        {
            int ret;

            while (true)
            {
                ret = FFmpeg.av_read_frame(_pFormatContext, _pPacket);
                if (ret < 0)
                {
                    var packet = new FFmpeg.AVPacket();
                    packet.dts      = _lastPacket.dts + PTSPerField * _videoCodecContext.ticks_per_frame;
                    packet.pts      = _lastPacket.pts + PTSPerField * _videoCodecContext.ticks_per_frame;
                    packet.duration = _lastPacket.duration;
                    int sizeOfPacket = Marshal.SizeOf(packet);
                    var pPacket      = Marshal.AllocHGlobal(sizeOfPacket);
                    RtlZeroMemory(pPacket, sizeOfPacket);
                    Marshal.StructureToPtr(packet, pPacket, true);
                    int frameFinished = 0;
                    ret = FFmpeg.avcodec_decode_video2(_pVideoCodecContext, _pFrameOrig, ref frameFinished, pPacket);
                    if (frameFinished != 0)
                    {
                        return(ProcessFrame());
                    }
                    break;
                }

                _formatContext = (FFmpeg.AVFormatContext)Marshal.PtrToStructure(_pFormatContext, typeof(FFmpeg.AVFormatContext));

                _lastPacket = (FFmpeg.AVPacket)Marshal.PtrToStructure(_pPacket, typeof(FFmpeg.AVPacket));

                if (_lastPacket.stream_index == _videoStreamIndex)
                {
                    // Decode the video frame
                    int frameFinished = 0;
                    ret = FFmpeg.avcodec_decode_video2(_pVideoCodecContext, _pFrameOrig, ref frameFinished, _pPacket);

                    FFmpeg.av_free_packet(_pPacket);

                    if (frameFinished != 0)
                    {
                        return(ProcessFrame());
                    }
                }
            }

            return(null);
        }
コード例 #4
0
        public void Rewind()
        {
            lock (locker)
            {
                if (hasVideo)
                {
                    FFmpeg.av_seek_frame(pFormatContext, videoStream.index, 0, 0);
                    FFmpeg.avcodec_flush_buffers(videoStream.codec);
                }

                if (hasAudio)
                {
                    FFmpeg.av_seek_frame(pFormatContext, audioStream.index, 0, 0);
                    FFmpeg.avcodec_flush_buffers(audioStream.codec);
                }

                videoPacketQueue.Clear();
                audioPacketQueue.Clear();

                vPacket = new FFmpeg.AVPacket();
                aPacket = new FFmpeg.AVPacket();
            }
        }
コード例 #5
0
        public bool Stream()
        {
            int result;

            //  FFmpeg.AVPacket packet = new FFmpeg.AVPacket();
            IntPtr pPacket = Marshal.AllocHGlobal(56);

            //Marshal.StructureToPtr(packet, pPacket, false);
            //  Marshal.PtrToStructure(

            result = FFmpeg.av_read_frame(pFormatContext, pPacket);
            if (result < 0)
            {
                return(false);
            }
            count++;

            int    frameSize = 0;
            IntPtr pSamples  = IntPtr.Zero;

            FFmpeg.AVPacket packet = (FFmpeg.AVPacket)
                                     Marshal.PtrToStructure(pPacket, typeof(FFmpeg.AVPacket));
            Marshal.FreeHGlobal(pPacket);

            if (LivtUpdateEvent != null)
            {
                int    cur   = (int)(packet.dts * timebase.num / timebase.den);
                int    total = (int)(formatContext.duration / TIMESTAMP_BASE);
                string time  = String.Format("{0} out of {1} seconds", cur, total);
                LivtUpdateEvent(time);
            }

            if (packet.stream_index != this.audioStartIndex)
            {
                this.isAudioStream = false;
                return(true);
            }
            this.isAudioStream = true;

            try
            {
                pSamples = Marshal.AllocHGlobal(AUDIO_FRAME_SIZE);
                //int size = FFmpeg.avcodec_decode_audio(pAudioCodecContext, pSamples,
                //        ref frameSize, packet.data, packet.size);

                //FFmpeg.av_free_packet(pPacket);

                this.sampleSize = frameSize;
                Marshal.Copy(pSamples, samples, 0, AUDIO_FRAME_SIZE);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
                return(false);
            }
            finally
            {
                Marshal.FreeHGlobal(pSamples);
            }

            return(true);
        }
コード例 #6
0
        public bool NextVideoFrame(IntPtr target, FFmpeg.PixelFormat desiredFormat, ref double time)
        {
            if (!hasVideo)
            {
                return(false);
            }

            int  got_picture = 0;
            long pts         = -1;

            // Allocate video frame
            vFrame = FFmpeg.avcodec_alloc_frame();

            // Decode packets until we've got a full frame
            while (got_picture == 0)
            {
                // If we need a new packet, get it
                if (vPacket.size <= 0)
                {
                    if (vPacket.data != IntPtr.Zero)
                    {
                        FFmpeg.av_free_packet(vPacket.priv);
                    }

                    lock (locker)
                    {
                        // If there are no more packets in the queue, read them from stream
                        while (videoPacketQueue.Count < 1)
                        {
                            if (!ReadPacket())
                            {
                                return(false);
                            }
                        }

                        vPacket = videoPacketQueue.Dequeue();
                    }
                }

                // Do nothing if timing is too early
                if (pts == -1)
                {
                    pts = vPacket.pts;
                    if (pts * videoTimebase > time)
                    {
                        time = pts * videoTimebase - time;
                        return(true);
                    }
                    time = 0;
                }

                // Decode packet
                int length = FFmpeg.avcodec_decode_video(videoStream.codec, vFrame, ref got_picture, vPacket.data, vPacket.size);

                // Error, skip packet
                if (length < 0)
                {
                    vPacket.size = 0;
                    continue;
                }

                // Move forward in packet
                vPacket.data  = new IntPtr(vPacket.data.ToInt64() + length);
                vPacket.size -= length;
            }

            // Create RGB frame
            IntPtr rgbFrame = FFmpeg.avcodec_alloc_frame();

            FFmpeg.avpicture_fill(rgbFrame, target, (int)desiredFormat, width, height);

            // Convert video frame to RGB
            FFmpeg.img_convert(rgbFrame, (int)desiredFormat, vFrame, (int)originalVideoFormat, width, height);

            // Free memory
            FFmpeg.av_free(rgbFrame);
            FFmpeg.av_free(vFrame);

            return(true);
        }
コード例 #7
0
        public bool NextAudioFrame(IntPtr target, ref int targetsize, int minbuffer)
        {
            if (!hasAudio)
            {
                return(false);
            }

            int byteswritten = 0;

            // Decode packets until we're satisfied
            while (true)
            {
                // If we need a new packet, get it
                if (aPacket.size == 0)
                {
                    if (aPacket.data != IntPtr.Zero)
                    {
                        FFmpeg.av_free_packet(aPacket.priv);
                    }
                }

                lock (locker)
                {
                    // If there are no more packets in the queue, read them from stream
                    while (audioPacketQueue.Count < 1)
                    {
                        if (!ReadPacket())
                        {
                            targetsize = byteswritten;
                            return(false);
                        }
                    }
                    aPacket = audioPacketQueue.Dequeue();
                }

                // Decode packet
                int datasize = targetsize - byteswritten;
                int length   =
                    FFmpeg.avcodec_decode_audio(audioStream.codec, target, ref datasize, aPacket.data, aPacket.size);

                if (length < 0)
                {
                    // Error, skip packet
                    aPacket.size = 0;
                    continue;
                }

                // Move forward in packet
                aPacket.size -= length;
                aPacket.data  = new IntPtr(aPacket.data.ToInt64() + length);

                // Frame not finished yet
                if (datasize <= 0)
                {
                    continue;
                }

                // Move forward in target buffer
                target        = new IntPtr(target.ToInt64() + datasize);
                byteswritten += datasize;

                // Load next frame when minimum buffer size is not reached
                if (byteswritten < minbuffer)
                {
                    continue;
                }

                break;
            }

            // Output buffer size
            targetsize = byteswritten;

            return(true);
        }