Represents a video or audio frame.
Inheritance: IDisposable
        /// <summary>
        /// Adds the specified frame at the right location.
        /// This method ensures the collection stays ordered
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <exception cref="System.IndexOutOfRangeException">Buffer is already at capacity.</exception>
        public void Add(FFmpegMediaFrame frame)
        {
            lock (SyncLock)
            {
                if (Frames.Count >= Capacity)
                {
                    throw new IndexOutOfRangeException("Buffer is already at capacity.");
                }

                if (frame.Type != Type)
                {
                    this.ThrowInvalidFrameTypeException(frame.Type);
                }

                try
                {
                    Frames.Add(frame);

                    if (frame.StartTime < LastFrameTime)
                    {
                        Frames.Sort(FrameStartTimeComparer);
                    }
                }
                finally
                {
                    RecomputeProperties();
                }
            }
        }
示例#2
0
        /// <summary>
        /// Renders the video image. This method is called on a Dispatcher timer.
        /// It is responsible for rendering the decoded video image continuously.
        /// It also avoids rendering the same image again.
        /// </summary>
        /// <param name="sender">The sender.</param>
        /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param>
        private void RenderVideoImage(object sender, EventArgs e)
        {
            MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
            var renderTime = RealtimeClock.PositionSeconds;

            try
            {
                var videoFrame = VideoFramesCache.GetFrame(renderTime, false);
                if (videoFrame == null || videoFrame == LastRenderedVideoFrame)
                {
                    return;
                }
                if (videoFrame.PictureBufferPtr != IntPtr.Zero)
                {
                    VideoRenderer.Lock();
                    Helper.NativeMethods.RtlMoveMemory(VideoRenderer.BackBuffer, videoFrame.PictureBufferPtr, videoFrame.PictureBufferLength);
                    VideoRenderer.AddDirtyRect(new Int32Rect(0, 0, VideoRenderer.PixelWidth, VideoRenderer.PixelHeight));
                    VideoRenderer.Unlock();
                    LastRenderedVideoFrame = videoFrame;
                }
            }
            finally
            {
                this.Position = renderTime;
            }
        }
        /// <summary>
        /// Gets the index of the frame. Returns -1 for not found.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <returns></returns>
        public int IndexOf(FFmpegMediaFrame frame)
        {
            if (frame == null)
            {
                return(-1);
            }

            return(Frames.IndexOf(frame));
        }
        private FFmpegMediaFrame CreateMediaFrameFromDecodedWaveHolder()
        {
            // Resample
            IntPtr bufferPtr = IntPtr.Zero;

            byte[] audioBuffer;

            try
            {
                var inputSampleCount  = DecodedWaveHolder->nb_samples;
                var outputDelay       = ffmpeg.swr_get_delay(AudioResampler, AudioSampleRate);
                var outputSampleCount = (int)ffmpeg.av_rescale_rnd(outputDelay + inputSampleCount, AudioOutputSampleRate, AudioSampleRate, AVRounding.AV_ROUND_UP);

                var outputLineSize  = outputSampleCount * (this.AudioOutputBitsPerSample / 8);
                var maxBufferLength = outputLineSize * Constants.AudioOutputChannelCount;

                bufferPtr = System.Runtime.InteropServices.Marshal.AllocHGlobal(maxBufferLength);
                var bufferNativePtr = (sbyte *)bufferPtr.ToPointer();

                var convertSampleCount = ffmpeg.swr_convert(AudioResampler, &bufferNativePtr, outputSampleCount, DecodedWaveHolder->extended_data, inputSampleCount);
                var outputBufferLength = ffmpeg.av_samples_get_buffer_size(&outputLineSize, Constants.AudioOutputChannelCount, convertSampleCount, Constants.AudioOutputSampleFormat, 1);

                if (outputBufferLength < 0)
                {
                    return(null);
                }

                audioBuffer = new byte[outputBufferLength];
                System.Runtime.InteropServices.Marshal.Copy(bufferPtr, audioBuffer, 0, audioBuffer.Length);
            }
            finally
            {
                if (bufferPtr != IntPtr.Zero)
                {
                    System.Runtime.InteropServices.Marshal.FreeHGlobal(bufferPtr);
                }
            }


            // Create the managed audio frame
            var mediaFrame = new FFmpegMediaFrame()
            {
                AudioBuffer        = audioBuffer,
                Duration           = Helper.TimestampToSeconds(DecodedWaveHolder->pkt_duration, InputAudioStream->time_base),
                CodedPictureNumber = -1,
                Flags       = FFmpegMediaFrameFlags.None,
                PictureType = FFmpegPictureType.None,
                StartTime   = Helper.TimestampToSeconds(DecodedWaveHolder->best_effort_timestamp, InputAudioStream->time_base),
                StreamIndex = InputAudioStream->index,
                Timestamp   = DecodedWaveHolder->best_effort_timestamp,
                Type        = MediaFrameType.Audio
            };

            return(mediaFrame);
        }
        /// <summary>
        /// Pulls the next-available frame. This does not queue the frame in either the video or audio queue.
        /// Please keep in mind that you will need to manually call the Release() method the returned object
        /// are done with it. If working with Media Caches, the cache will automatically release the frame
        /// </summary>
        /// <returns></returns>
        /// <exception cref="System.Exception">Error while decoding frame</exception>
        private FFmpegMediaFrame PullMediaFrame()
        {
            // Setup the holding packet
            var readingPacket = new AVPacket();

            ffmpeg.av_init_packet(&readingPacket);
            var readFrameResult = Constants.SuccessCode;
            FFmpegMediaFrame mediaFrameToReturn = null;
            var emptyPacket     = false;
            var receivedFrame   = false;
            var attemptDecoding = false;
            var isVideoPacket   = false;
            var isAudioPacket   = false;

            while (readFrameResult == Constants.SuccessCode || readFrameResult == Constants.EndOfFileErrorCode)
            {
                readFrameResult = ffmpeg.av_read_frame(InputFormatContext, &readingPacket);
                emptyPacket     = readFrameResult == Constants.EndOfFileErrorCode;
                attemptDecoding = (readFrameResult >= Constants.SuccessCode || readFrameResult == Constants.EndOfFileErrorCode);
                isVideoPacket   = HasVideo && readingPacket.stream_index == InputVideoStream->index;
                isAudioPacket   = HasAudio && readingPacket.stream_index == InputAudioStream->index;

                if (attemptDecoding)
                {
                    if (isVideoPacket)
                    {
                        receivedFrame = this.FillDecodedPictureHolderFrame(&readingPacket, emptyPacket);
                        if (receivedFrame)
                        {
                            mediaFrameToReturn = CreateMediaFrameFromDecodedPictureHolder();
                            break;
                        }
                    }
                    else if (isAudioPacket)
                    {
                        receivedFrame = this.FillDecodedWaveHolderFrame(&readingPacket, emptyPacket);
                        if (receivedFrame)
                        {
                            mediaFrameToReturn = CreateMediaFrameFromDecodedWaveHolder();
                            break;
                        }
                    }
                }

                if (receivedFrame == false && readFrameResult == Constants.EndOfFileErrorCode)
                {
                    mediaFrameToReturn = null;
                    break;
                }
            }

            IsAtEndOfStream = readFrameResult == Constants.EndOfFileErrorCode && mediaFrameToReturn == null;
            return(mediaFrameToReturn);
        }
        /// <summary>
        /// Waits for the frame extractor to be ready for playback.
        /// Returns true if successful, false if it timed out.
        /// </summary>
        private bool WaitForPlaybackReadyState()
        {
            RealtimeClock.Pause();

            var renderTime = RealtimeClock.PositionSeconds;
            var startTime  = DateTime.UtcNow;
            var cycleCount = -1;
            FFmpegMediaFrame playbackFrame = null;

            while (IsCancellationPending == false)
            {
                if (DateTime.UtcNow.Subtract(startTime) > Constants.WaitForPlaybackReadyStateTimeout)
                {
                    ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.WaitForPlaybackReadyState,
                                                                           MediaPlaybackErrorCode.WaitForPlaybackTimedOut,
                                                                           string.Format("Waiting for playback ready state @ {0:0.000} timed Out in {1} cycles", renderTime, cycleCount)));
                    return(false);
                }

                cycleCount++;

                // Wait for a decoding cycle.
                MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
                renderTime    = RealtimeClock.PositionSeconds;
                playbackFrame = PrimaryFramesCache.GetFrame(renderTime, CheckFrameBounds);

                if (playbackFrame == null && PrimaryFramesCache.Count > 0)
                {
                    playbackFrame = PrimaryFramesCache.FirstFrame;
                    RealtimeClock.PositionSeconds = playbackFrame.StartTime;
                }

                if (playbackFrame != null)
                {
                    break;
                }
            }

            // Do some additional logging
            System.Diagnostics.Debug.WriteLineIf(
                cycleCount >= 0,
                string.Format("WaitForPlaybackReadyState @ {0:0.000} = {1} cycles. Leading Frames: {2}, Frame Index: {3}, Frame Start: {4}",
                              renderTime,
                              cycleCount,
                              PrimaryFramesCache.Count,
                              PrimaryFramesCache.IndexOf(playbackFrame),
                              (playbackFrame != null ?
                               playbackFrame.StartTime.ToString("0.000") : "NULL")));

            return(true);
        }
        private FFmpegMediaFrame CreateMediaFrameFromDecodedPictureHolder()
        {
            // Create the output picture. Once the DecodeFrameHolder has the frame in YUV, the SWS API is
            // then used to convert to BGR24 and display on the screen.
            var outputPicture       = (AVPicture *)ffmpeg.av_frame_alloc();
            var outputPictureBuffer = (sbyte *)ffmpeg.av_malloc((uint)OutputPictureBufferLength);

            ffmpeg.avpicture_fill(outputPicture, outputPictureBuffer, Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight);

            // convert the colorspace from (typically) YUV to BGR24
            sbyte **sourceScan0 = &DecodedPictureHolder->data0;
            sbyte **targetScan0 = &outputPicture->data0;

            ffmpeg.sws_scale(
                VideoResampler, sourceScan0, DecodedPictureHolder->linesize, 0,
                VideoFrameHeight, targetScan0, outputPicture->linesize);

            // Compute data size and data pointer (stride and scan0, respectively)
            var imageStride   = outputPicture->linesize[0];
            var imageDataSize = Convert.ToUInt32(VideoFrameHeight * imageStride);
            var imageDataPtr  = new IntPtr(outputPicture->data0);

            // Create a MediaFrame object with the info we have -- we will return this
            var mediaFrame = new FFmpegMediaFrame()
            {
                Picture             = outputPicture,
                PictureBuffer       = outputPictureBuffer,
                PictureBufferPtr    = imageDataPtr,
                PictureBufferLength = imageDataSize,
                StartTime           = Helper.TimestampToSeconds(DecodedPictureHolder->best_effort_timestamp, InputVideoStream->time_base),
                Flags              = (FFmpegMediaFrameFlags)DecodedPictureHolder->flags,
                PictureType        = (FFmpegPictureType)DecodedPictureHolder->pict_type,
                CodedPictureNumber = DecodedPictureHolder->coded_picture_number,
                Duration           = Helper.TimestampToSeconds(DecodedPictureHolder->pkt_duration, InputVideoStream->time_base),
                Timestamp          = DecodedPictureHolder->best_effort_timestamp,
                Type        = MediaFrameType.Video,
                StreamIndex = InputVideoStream->index
            };

            return(mediaFrame);
        }
示例#8
0
        /// <summary>
        /// Gets the index of the frame. Returns -1 for not found.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <returns></returns>
        public int IndexOf(FFmpegMediaFrame frame)
        {
            lock (SyncLock)
            {
                if (frame == null)
                    return -1;

                return Frames.IndexOf(frame);
            }

        }
示例#9
0
        /// <summary>
        /// Adds the specified frame at the right location.
        /// This method ensures the collection stays ordered
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <exception cref="System.IndexOutOfRangeException">Buffer is already at capacity.</exception>
        public void Add(FFmpegMediaFrame frame)
        {
            lock (SyncLock)
            {
                if (Frames.Count >= Capacity)
                    throw new IndexOutOfRangeException("Buffer is already at capacity.");

                if (frame.Type != Type)
                    this.ThrowInvalidFrameTypeException(frame.Type);

                try
                {
                    Frames.Add(frame);

                    if (frame.StartTime < LastFrameTime)
                        Frames.Sort(FrameStartTimeComparer);
                }
                finally
                {
                    RecomputeProperties();
                }
            }
        }
示例#10
0
 /// <summary>
 /// Renders the video image. This method is called on a Dispatcher timer.
 /// It is responsible for rendering the decoded video image continuously.
 /// It also avoids rendering the same image again.
 /// </summary>
 /// <param name="sender">The sender.</param>
 /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param>
 private void RenderVideoImage(object sender, EventArgs e)
 {
     MediaFramesExtractedDone.Wait(Constants.FrameExtractorWaitMs);
     var renderTime = RealtimeClock.PositionSeconds;
     try
     {
         var videoFrame = VideoFramesCache.GetFrame(renderTime, false);
         if (videoFrame == null || videoFrame == LastRenderedVideoFrame) return;
         if (videoFrame.PictureBufferPtr != IntPtr.Zero)
         {
             VideoRenderer.Lock();
             Helper.NativeMethods.RtlMoveMemory(VideoRenderer.BackBuffer, videoFrame.PictureBufferPtr, videoFrame.PictureBufferLength);
             VideoRenderer.AddDirtyRect(new Int32Rect(0, 0, VideoRenderer.PixelWidth, VideoRenderer.PixelHeight));
             VideoRenderer.Unlock();
             LastRenderedVideoFrame = videoFrame;
         }
     }
     finally
     {
         this.Position = renderTime;
     }
 }
示例#11
0
        private FFmpegMediaFrame CreateMediaFrameFromDecodedPictureHolder()
        {
            // Create the output picture. Once the DecodeFrameHolder has the frame in YUV, the SWS API is
            // then used to convert to BGR24 and display on the screen.
            var outputPicture = (AVPicture*)ffmpeg.av_frame_alloc();
            var outputPictureBuffer = (sbyte*)ffmpeg.av_malloc((uint)OutputPictureBufferLength);
            ffmpeg.avpicture_fill(outputPicture, outputPictureBuffer, Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight);

            // convert the colorspace from (typically) YUV to BGR24
            sbyte** sourceScan0 = &DecodedPictureHolder->data0;
            sbyte** targetScan0 = &outputPicture->data0;
            ffmpeg.sws_scale(
                VideoResampler, sourceScan0, DecodedPictureHolder->linesize, 0,
                VideoFrameHeight, targetScan0, outputPicture->linesize);

            // Compute data size and data pointer (stride and scan0, respectively)
            var imageStride = outputPicture->linesize[0];
            var imageDataSize = Convert.ToUInt32(VideoFrameHeight * imageStride);
            var imageDataPtr = new IntPtr(outputPicture->data0);

            // Create a MediaFrame object with the info we have -- we will return this 
            var mediaFrame = new FFmpegMediaFrame()
            {
                Picture = outputPicture,
                PictureBuffer = outputPictureBuffer,
                PictureBufferPtr = imageDataPtr,
                PictureBufferLength = imageDataSize,
                StartTime = Helper.TimestampToSeconds(DecodedPictureHolder->best_effort_timestamp, InputVideoStream->time_base),
                Flags = (FFmpegMediaFrameFlags)DecodedPictureHolder->flags,
                PictureType = (FFmpegPictureType)DecodedPictureHolder->pict_type,
                CodedPictureNumber = DecodedPictureHolder->coded_picture_number,
                Duration = Helper.TimestampToSeconds(DecodedPictureHolder->pkt_duration, InputVideoStream->time_base),
                Timestamp = DecodedPictureHolder->best_effort_timestamp,
                Type = MediaFrameType.Video,
                StreamIndex = InputVideoStream->index
            };

            return mediaFrame;
        }
示例#12
0
        private FFmpegMediaFrame CreateMediaFrameFromDecodedWaveHolder()
        {
            // Resample 
            IntPtr bufferPtr = IntPtr.Zero;
            byte[] audioBuffer;

            try
            {
                var inputSampleCount = DecodedWaveHolder->nb_samples;
                var outputDelay = ffmpeg.swr_get_delay(AudioResampler, AudioSampleRate);
                var outputSampleCount = (int)ffmpeg.av_rescale_rnd(outputDelay + inputSampleCount, AudioOutputSampleRate, AudioSampleRate, AVRounding.AV_ROUND_UP);

                var outputLineSize = outputSampleCount * (this.AudioOutputBitsPerSample / 8);
                var maxBufferLength = outputLineSize * Constants.AudioOutputChannelCount;

                bufferPtr = System.Runtime.InteropServices.Marshal.AllocHGlobal(maxBufferLength);
                var bufferNativePtr = (sbyte*)bufferPtr.ToPointer();

                var convertSampleCount = ffmpeg.swr_convert(AudioResampler, &bufferNativePtr, outputSampleCount, DecodedWaveHolder->extended_data, inputSampleCount);
                var outputBufferLength = ffmpeg.av_samples_get_buffer_size(&outputLineSize, Constants.AudioOutputChannelCount, convertSampleCount, Constants.AudioOutputSampleFormat, 1);

                if (outputBufferLength < 0)
                    return null;

                audioBuffer = new byte[outputBufferLength];
                System.Runtime.InteropServices.Marshal.Copy(bufferPtr, audioBuffer, 0, audioBuffer.Length);

            }
            finally
            {
                if (bufferPtr != IntPtr.Zero)
                    System.Runtime.InteropServices.Marshal.FreeHGlobal(bufferPtr);
            }


            // Create the managed audio frame
            var mediaFrame = new FFmpegMediaFrame()
            {
                AudioBuffer = audioBuffer,
                Duration = Helper.TimestampToSeconds(DecodedWaveHolder->pkt_duration, InputAudioStream->time_base),
                CodedPictureNumber = -1,
                Flags = FFmpegMediaFrameFlags.None,
                PictureType = FFmpegPictureType.None,
                StartTime = Helper.TimestampToSeconds(DecodedWaveHolder->best_effort_timestamp, InputAudioStream->time_base),
                StreamIndex = InputAudioStream->index,
                Timestamp = DecodedWaveHolder->best_effort_timestamp,
                Type = MediaFrameType.Audio
            };

            return mediaFrame;
        }