/// <summary> /// Replaces the internally-held frames with the specified new frames. /// </summary> /// <param name="newFrames">The new frames.</param> /// <exception cref="System.IndexOutOfRangeException">Buffer does not support the capacity of new elements</exception> public void Replace(FFmpegMediaFrameCache newFrames) { if (newFrames.Count > Capacity) { throw new IndexOutOfRangeException("Buffer does not support the capacity of new elements"); } foreach (var frame in Frames) { frame.EnqueueRelease(); } Frames.Clear(); foreach (var frame in newFrames.Frames) { if (frame.Type != this.Type) { this.ThrowInvalidFrameTypeException(frame.Type); } Frames.Add(frame); } RecomputeProperties(); }
/// <summary> /// Initializes a new instance of the <see cref="FFmpegMediaFrameCache"/> class. /// This copies all properties from an existing cache excluding frames of course. /// </summary> /// <param name="otherCache">The other cache.</param> public FFmpegMediaFrameCache(FFmpegMediaFrameCache otherCache) { var capacity = otherCache.Capacity; this.Type = otherCache.Type; this.Capacity = capacity; RecomputeProperties(); }
/// <summary> /// Initializes a new instance of the <see cref="FFmpegMedia"/> class. /// </summary> /// <param name="filePath">The file path.</param> /// <param name="errorCallback">The error callback.</param> /// <exception cref="System.ArgumentException"> /// errorCallback cannot be null /// or /// filePath cannot be null or empty /// </exception> /// <exception cref="System.Exception"></exception> public FFmpegMedia(string filePath, MediaErrorOccurredCallback errorCallback) { // Argument validation if (errorCallback == null) { throw new ArgumentException("errorCallback cannot be null"); } if (string.IsNullOrWhiteSpace(filePath)) { throw new ArgumentException("filePath cannot be null or empty"); } // Error callback this.ErrorOccurredCallback = errorCallback; // Register the property state change handler this.RealtimeClock.PropertyChanged += (s, e) => { NotifyPlayStateChanged(); }; // Make sure we registwered the library Helper.RegisterFFmpeg(); // Create the audio provider and audio renderer this.PcmAudioProvider = new AudioBufferProvider(this); this.AudioRenderer = new AudioRenderer(); // load input, codec and output contexts this.InitializeMedia(filePath); // Setup the frames Cache this.VideoFramesCache = new FFmpegMediaFrameCache(this.VideoFrameRate, MediaFrameType.Video); this.AudioFramesCache = new FFmpegMediaFrameCache(this.AudioSampleRate / 1000M, MediaFrameType.Audio); // Setup the Leading and Lagging frames cache if (HasVideo && (HasAudio == false || InputAudioStream->index > InputVideoStream->index)) { this.LeadingFramesCache = VideoFramesCache; this.LaggingFramesCache = AudioFramesCache; this.StartDts = InputVideoStream->start_time; LeadingStreamType = MediaFrameType.Video; LaggingStreamType = HasAudio ? MediaFrameType.Audio : MediaFrameType.Unknown; } else { this.LeadingFramesCache = AudioFramesCache; this.LaggingFramesCache = VideoFramesCache; this.StartDts = InputAudioStream->start_time; LeadingStreamType = MediaFrameType.Audio; LaggingStreamType = HasVideo ? MediaFrameType.Video : MediaFrameType.Unknown; } if (Helper.IsNoPtsValue(StartDts)) { StartDts = 0; } // Setup Video Renderer and Video Frames Cache if (HasVideo) { this.VideoRenderer = new WriteableBitmap(this.VideoFrameWidth, this.VideoFrameHeight, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null); } else { this.VideoRenderer = new WriteableBitmap(1, 1, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null); } // Setup Audio Renderer and Audio Frames Cache if (HasAudio) { this.StartAudioRenderer(); } // Start the continuous Decoder thread that fills up our queue. MediaFrameExtractorThread = new Thread(ExtractMediaFramesContinuously) { IsBackground = true, Priority = ThreadPriority.AboveNormal }; // Begin the media extractor MediaFrameExtractorThread.Start(); MediaFramesExtractedDone.Reset(); if (MediaFramesExtractedDone.Wait(Constants.WaitForPlaybackReadyStateTimeout) == false) { throw new Exception(string.Format("Could not load sream frames in a timely manner. Timed out in {0}", Constants.WaitForPlaybackReadyStateTimeout)); } // Initialize the Speed Ratio to 1.0 (Default) this.SpeedRatio = Constants.DefaultSpeedRatio; // Start the render timer on the UI thread. this.VideoRenderTimer.Tick += RenderVideoImage; this.VideoRenderTimer.Interval = TimeSpan.FromMilliseconds(Constants.VideoRenderTimerIntervalMillis); this.VideoRenderTimer.IsEnabled = true; this.VideoRenderTimer.Start(); }
/// <summary> /// Internals the seek input. /// </summary> /// <param name="renderTime">The render time.</param> private void InternalSeekInput(decimal renderTime) { if (IsLiveStream) { if (LeadingFramesCache.IsEmpty == false) { RealtimeClock.Seek(LeadingFramesCache.FirstFrameTime); } return; } #if DEBUG var seekStopwatch = new System.Diagnostics.Stopwatch(); seekStopwatch.Start(); #endif if (renderTime < StartTime) { renderTime = StartTime; } RealtimeClock.Seek(renderTime); var allowedThreshold = Constants.SeekThresholdSeconds; var seekOffsetLength = Constants.SeekOffsetSeconds; var seekTime = renderTime - seekOffsetLength; var maxSeekStartTime = seekTime - allowedThreshold; var bufferedLeadingFrames = new FFmpegMediaFrameCache(LeadingFramesCache); var bufferedLaggingFrames = new FFmpegMediaFrameCache(LaggingFramesCache); var outerLoopCount = 0; var innerLoopCount = 0; var frameReleaseCount = 0; var doSeekInStream = true; var doSeekByPullingFrames = true; var seekFlag = 0; var seekFrameResult = 0; var startTime = DateTime.UtcNow; var lastFailedTimestamp = long.MinValue; var seekToLastFrame = false; var seekTimeBase = LeadingFramesCache.Type == MediaFrameType.Video ? InputVideoStream->time_base : InputAudioStream->time_base; var seekStreamIndex = LeadingFramesCache.Type == MediaFrameType.Video ? InputVideoStream->index : InputAudioStream->index; var leadingFrameIndex = -1; try { while (doSeekInStream) { outerLoopCount++; if (seekTime < StartTime) { seekTime = StartTime; } if (lastFailedTimestamp == StartDts) { if (LeadingFramesCache.IsEmpty == false) { RealtimeClock.Seek(LeadingFramesCache.FirstFrameTime); } ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedCritical, string.Format("Target Postion @ {0:0.000}s has already failed to seek. First DTS {1} also failed and will not retry.", seekTime, StartDts))); return; } var targetTimestamp = Helper.SecondsToTimestamp(seekTime, seekTimeBase); if (lastFailedTimestamp == targetTimestamp) { ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedWillRetry, string.Format("Target Postion @ {0:0.000}s has already failed to seek. Target timestamp will now be First DTS {1}.", seekTime, StartDts))); targetTimestamp = StartDts; } seekFlag = (seekTime < renderTime || seekTime <= StartTime ? (int)ffmpeg.AVSEEK_FLAG_BACKWARD : 0) | 0; // FFmpegInvoke.AVSEEK_FLAG_ANY; //seekFlag = FFmpegInvoke.AVSEEK_FLAG_BACKWARD; // | FFmpegInvoke.AVSEEK_FLAG_ANY; seekFrameResult = ffmpeg.av_seek_frame(InputFormatContext, seekStreamIndex, targetTimestamp, seekFlag); // significantly faster than seek_file //seekFrameResult = FFmpegInvoke.avformat_seek_file(InputFormatContext, streamIndex, long.MinValue, targetTimestamp, long.MaxValue, seekFlag); if (seekFrameResult < Constants.SuccessCode) { if (LeadingFramesCache.IsEmpty == false) { RealtimeClock.Seek(LeadingFramesCache.FirstFrameTime); } var errorMessage = Helper.GetFFmpegErrorMessage(seekFrameResult); ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedFFmpeg, string.Format("FFmpeg av_seek_frame @ {1:0.000}: Failed with error code {0}. {2}", seekFrameResult, seekTime, errorMessage))); return; } else { if (VideoCodecContext != null) { ffmpeg.avcodec_flush_buffers(VideoCodecContext); } if (AudioCodecContext != null) { ffmpeg.avcodec_flush_buffers(AudioCodecContext); } } leadingFrameIndex = -1; bufferedLeadingFrames.Clear(); bufferedLaggingFrames.Clear(); doSeekInStream = false; doSeekByPullingFrames = true; while (doSeekByPullingFrames) { innerLoopCount++; var frame = this.PullMediaFrame(); if (frame != null) { if (frame.StartTime < maxSeekStartTime) { frame.EnqueueRelease(); frameReleaseCount++; continue; } if (frame.Type == bufferedLeadingFrames.Type) { leadingFrameIndex++; if (leadingFrameIndex == 0 && frame.Type == bufferedLeadingFrames.Type && frame.StartTime - frame.Duration > maxSeekStartTime && maxSeekStartTime > 0M) { seekTime -= seekOffsetLength; frame.EnqueueRelease(); doSeekInStream = true; lastFailedTimestamp = targetTimestamp; break; } // We are Full minus 1 at this point. We'll stop buffering if (bufferedLeadingFrames.Count >= bufferedLeadingFrames.Capacity - 1) { doSeekByPullingFrames = false; } bufferedLeadingFrames.Add(frame); } else if (frame.Type == bufferedLaggingFrames.Type) { // add the lagging frame no matter what if (bufferedLaggingFrames.IsFull) { bufferedLaggingFrames.RemoveFirst(); } bufferedLaggingFrames.Add(frame); } // Find out if we have the frame var seekFrameIndex = bufferedLeadingFrames.IndexOf(renderTime, true); var minimumFrameCount = (seekFrameIndex - 1) * 2; // if we have more than enough frames in the buffer or we have reached a full or end condition, stop buffering frames if (seekFrameIndex > 0) { if (bufferedLeadingFrames.Count >= minimumFrameCount || bufferedLeadingFrames.IsFull || IsAtEndOfStream) { doSeekByPullingFrames = false; } } } // We're already padt the end of the stream. Natural duration was wron for the leading frames cache. if (IsAtEndOfStream && bufferedLeadingFrames.Count <= 0) { doSeekInStream = true; seekTime -= seekOffsetLength; maxSeekStartTime = seekTime - allowedThreshold; seekToLastFrame = true; } if (doSeekInStream) { break; } if (doSeekByPullingFrames == false || IsAtEndOfStream) { LeadingFramesCache.Replace(bufferedLeadingFrames); LaggingFramesCache.Replace(bufferedLaggingFrames); if (seekToLastFrame && LeadingFramesCache.Count > 0) { RealtimeClock.Seek(LeadingFramesCache.LastFrameTime); } return; } } } } finally { #if DEBUG seekStopwatch.Stop(); SeekTimes.Add(seekStopwatch.ElapsedMilliseconds); InnerLoopCounts.Add(innerLoopCount); System.Diagnostics.Debug.WriteLine("Seek @ {6:0.000} = Long: {0:00}\t Short: {1:000}\t Short (AVG): {2:0.000}\t Waste Count: {3:000}\t Elapsed: {4}\tElapsed (AVG): {5:0.000}", outerLoopCount, innerLoopCount, InnerLoopCounts.Average(), frameReleaseCount, seekStopwatch.ElapsedMilliseconds, SeekTimes.Average(), renderTime); #endif } }
/// <summary> /// Replaces the internally-held frames with the specified new frames. /// </summary> /// <param name="newFrames">The new frames.</param> /// <exception cref="System.IndexOutOfRangeException">Buffer does not support the capacity of new elements</exception> public void Replace(FFmpegMediaFrameCache newFrames) { lock (SyncLock) { if (newFrames.Count > Capacity) throw new IndexOutOfRangeException("Buffer does not support the capacity of new elements"); foreach (var frame in Frames) frame.EnqueueRelease(); Frames.Clear(); try { foreach (var frame in newFrames.Frames) { if (frame.Type != this.Type) this.ThrowInvalidFrameTypeException(frame.Type); Frames.Add(frame); } } finally { RecomputeProperties(); } } }
/// <summary> /// Initializes a new instance of the <see cref="FFmpegMediaFrameCache"/> class. /// This copies all properties from an existing cache excluding frames of course. /// </summary> /// <param name="otherCache">The other cache.</param> public FFmpegMediaFrameCache(FFmpegMediaFrameCache otherCache) { var capacity = otherCache.Capacity; Type = otherCache.Type; Capacity = capacity; RecomputeProperties(); }
/// <summary> /// Initializes a new instance of the <see cref="FFmpegMedia" /> class. /// </summary> /// <param name="filePath">The file path.</param> /// <param name="errorCallback">The error callback.</param> /// <param name="referer">The referer. Leave null or emtpy to skip setting it.</param> /// <param name="userAgent">The user agent. Leave null or empty in order to skip setting a User Agent</param> /// <exception cref="ArgumentException">errorCallback cannot be null /// or /// filePath cannot be null or empty</exception> /// <exception cref="Exception"></exception> /// <exception cref="System.ArgumentException">errorCallback cannot be null /// or /// filePath cannot be null or empty</exception> /// <exception cref="System.Exception"></exception> public FFmpegMedia(string filePath, MediaErrorOccurredCallback errorCallback, string referer, string userAgent) { // Argument validation if (errorCallback == null) throw new ArgumentException("errorCallback cannot be null"); if (string.IsNullOrWhiteSpace(filePath)) throw new ArgumentException("filePath cannot be null or empty"); // Error callback this.ErrorOccurredCallback = errorCallback; // Register the property state change handler this.RealtimeClock.PropertyChanged += (s, e) => { NotifyPlayStateChanged(); }; // Make sure we registwered the library Helper.RegisterFFmpeg(); // Create the audio provider and audio renderer this.PcmAudioProvider = new AudioBufferProvider(this); this.AudioRenderer = new AudioRenderer(); // load input, codec and output contexts this.InitializeMedia(filePath, null, referer, userAgent); // Setup the frames Cache this.VideoFramesCache = new FFmpegMediaFrameCache(this.VideoFrameRate * (int)FramesCacheLength.TotalSeconds, MediaFrameType.Video); this.AudioFramesCache = new FFmpegMediaFrameCache(this.AudioSampleRate / (int)FramesCacheLength.TotalMilliseconds, MediaFrameType.Audio); // Setup the Leading and Lagging frames cache if (HasVideo && (HasAudio == false || InputAudioStream->index > InputVideoStream->index)) { this.PrimaryFramesCache = VideoFramesCache; this.SecondaryFramesCache = AudioFramesCache; this.StartDts = InputVideoStream->start_time; LeadingStreamType = MediaFrameType.Video; LaggingStreamType = HasAudio ? MediaFrameType.Audio : MediaFrameType.Unknown; } else { this.PrimaryFramesCache = AudioFramesCache; this.SecondaryFramesCache = VideoFramesCache; this.StartDts = InputAudioStream->start_time; LeadingStreamType = MediaFrameType.Audio; LaggingStreamType = HasVideo ? MediaFrameType.Video : MediaFrameType.Unknown; } if (Helper.IsNoPtsValue(StartDts)) StartDts = 0; // Setup Video Renderer and Video Frames Cache if (HasVideo) this.VideoRenderer = new WriteableBitmap(this.VideoFrameWidth, this.VideoFrameHeight, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null); else this.VideoRenderer = new WriteableBitmap(1, 1, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null); // Setup Audio Renderer and Audio Frames Cache if (HasAudio) { this.StartAudioRenderer(); } // Start the continuous Decoder thread that fills up our queue. MediaFrameExtractorThread = new Thread(ExtractMediaFramesContinuously) { IsBackground = true, Priority = ThreadPriority.AboveNormal }; // Begin the media extractor MediaFrameExtractorThread.Start(); MediaFramesExtractedDone.Reset(); if (MediaFramesExtractedDone.Wait(Constants.WaitForPlaybackReadyStateTimeout) == false) { throw new Exception(string.Format("Could not load sream frames in a timely manner. Timed out in {0}", Constants.WaitForPlaybackReadyStateTimeout)); } // Initialize the Speed Ratio to 1.0 (Default) this.SpeedRatio = Constants.DefaultSpeedRatio; // Start the render timer on the UI thread. this.VideoRenderTimer.Tick += RenderVideoImage; this.VideoRenderTimer.Interval = TimeSpan.FromMilliseconds(Constants.VideoRenderTimerIntervalMillis); this.VideoRenderTimer.IsEnabled = true; this.VideoRenderTimer.Start(); }
/// <summary> /// Internals the seek input. /// </summary> /// <param name="renderTime">The render time.</param> private void InternalSeekInput(decimal renderTime) { if (IsLiveStream) { if (PrimaryFramesCache.IsEmpty == false) RealtimeClock.Seek(PrimaryFramesCache.FirstFrameTime); return; } #if DEBUG var seekStopwatch = new System.Diagnostics.Stopwatch(); seekStopwatch.Start(); #endif if (renderTime < StartTime) renderTime = StartTime; RealtimeClock.Seek(renderTime); var allowedThreshold = Constants.SeekThresholdSeconds; var seekOffsetLength = Constants.SeekOffsetSeconds; var seekTime = renderTime - seekOffsetLength; var maxSeekStartTime = seekTime - allowedThreshold; var bufferedLeadingFrames = new FFmpegMediaFrameCache(PrimaryFramesCache); var bufferedLaggingFrames = new FFmpegMediaFrameCache(SecondaryFramesCache); var outerLoopCount = 0; var innerLoopCount = 0; var frameReleaseCount = 0; var doSeekInStream = true; var doSeekByPullingFrames = true; var seekFlag = 0; var seekFrameResult = 0; var startTime = DateTime.UtcNow; var lastFailedTimestamp = long.MinValue; var seekToLastFrame = false; var seekTimeBase = PrimaryFramesCache.Type == MediaFrameType.Video ? InputVideoStream->time_base : InputAudioStream->time_base; var seekStreamIndex = PrimaryFramesCache.Type == MediaFrameType.Video ? InputVideoStream->index : InputAudioStream->index; var leadingFrameIndex = -1; try { while (doSeekInStream) { outerLoopCount++; if (seekTime < StartTime) seekTime = StartTime; if (lastFailedTimestamp == StartDts) { if (PrimaryFramesCache.IsEmpty == false) RealtimeClock.Seek(PrimaryFramesCache.FirstFrameTime); ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedCritical, string.Format("Target Postion @ {0:0.000}s has already failed to seek. First DTS {1} also failed and will not retry.", seekTime, StartDts))); return; } var targetTimestamp = Helper.SecondsToTimestamp(seekTime, seekTimeBase); if (lastFailedTimestamp == targetTimestamp) { ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedWillRetry, string.Format("Target Postion @ {0:0.000}s has already failed to seek. Target timestamp will now be First DTS {1}.", seekTime, StartDts))); targetTimestamp = StartDts; } seekFlag = (seekTime < renderTime || seekTime <= StartTime ? (int)ffmpeg.AVSEEK_FLAG_BACKWARD : 0) | 0; // FFmpegInvoke.AVSEEK_FLAG_ANY; //seekFlag = ffmpeg.AVSEEK_FLAG_BACKWARD; // | ffmpeg.AVSEEK_FLAG_ANY; seekFrameResult = ffmpeg.av_seek_frame(InputFormatContext, seekStreamIndex, targetTimestamp, seekFlag); // significantly faster than seek_file //seekFrameResult = ffmpeg.avformat_seek_file(InputFormatContext, seekStreamIndex, targetTimestamp - 2, targetTimestamp, targetTimestamp + 2, seekFlag); if (seekFrameResult < Constants.SuccessCode) { if (PrimaryFramesCache.IsEmpty == false) RealtimeClock.Seek(PrimaryFramesCache.FirstFrameTime); var errorMessage = Helper.GetFFmpegErrorMessage(seekFrameResult); ErrorOccurredCallback(this, new MediaPlaybackException(MediaPlaybackErrorSources.InternalSeekInput, MediaPlaybackErrorCode.SeekFailedFFmpeg, string.Format("FFmpeg av_seek_frame @ {1:0.000}: Failed with error code {0}. {2}", seekFrameResult, seekTime, errorMessage))); return; } else { if (VideoCodecContext != null) ffmpeg.avcodec_flush_buffers(VideoCodecContext); if (AudioCodecContext != null) ffmpeg.avcodec_flush_buffers(AudioCodecContext); } leadingFrameIndex = -1; bufferedLeadingFrames.Clear(); bufferedLaggingFrames.Clear(); doSeekInStream = false; doSeekByPullingFrames = true; while (doSeekByPullingFrames) { innerLoopCount++; var frame = this.PullMediaFrame(); if (frame != null) { if (frame.StartTime < maxSeekStartTime) { frame.EnqueueRelease(); frameReleaseCount++; continue; } if (frame.Type == bufferedLeadingFrames.Type) { leadingFrameIndex++; if (leadingFrameIndex == 0 && frame.Type == bufferedLeadingFrames.Type && frame.StartTime - frame.Duration > maxSeekStartTime && maxSeekStartTime > 0M) { seekTime -= seekOffsetLength; frame.EnqueueRelease(); doSeekInStream = true; lastFailedTimestamp = targetTimestamp; break; } // We are Full minus 1 at this point. We'll stop buffering if (bufferedLeadingFrames.Count >= bufferedLeadingFrames.Capacity - 1) doSeekByPullingFrames = false; bufferedLeadingFrames.Add(frame); } else if (frame.Type == bufferedLaggingFrames.Type) { // add the lagging frame no matter what if (bufferedLaggingFrames.IsFull) bufferedLaggingFrames.RemoveFirst(); bufferedLaggingFrames.Add(frame); } // Find out if we have the frame var seekFrameIndex = bufferedLeadingFrames.IndexOf(renderTime, true); var minimumFrameCount = (seekFrameIndex - 1) * 2; // if we have more than enough frames in the buffer or we have reached a full or end condition, stop buffering frames if (seekFrameIndex > 0) if (bufferedLeadingFrames.Count >= minimumFrameCount || bufferedLeadingFrames.IsFull || IsAtEndOfStream) doSeekByPullingFrames = false; } // We're already padt the end of the stream. Natural duration was wron for the leading frames cache. if (IsAtEndOfStream && bufferedLeadingFrames.Count <= 0) { doSeekInStream = true; seekTime -= seekOffsetLength; maxSeekStartTime = seekTime - allowedThreshold; seekToLastFrame = true; } if (doSeekInStream) break; if (doSeekByPullingFrames == false || IsAtEndOfStream) { PrimaryFramesCache.Replace(bufferedLeadingFrames); SecondaryFramesCache.Replace(bufferedLaggingFrames); if (seekToLastFrame && PrimaryFramesCache.Count > 0) RealtimeClock.Seek(PrimaryFramesCache.LastFrameTime); return; } } } } finally { #if DEBUG seekStopwatch.Stop(); SeekTimes.Add(seekStopwatch.ElapsedMilliseconds); InnerLoopCounts.Add(innerLoopCount); System.Diagnostics.Debug.WriteLine("Seek @ {6:0.000} = Long: {0:00}\t Short: {1:000}\t Short (AVG): {2:0.000}\t Waste Count: {3:000}\t Elapsed: {4}\tElapsed (AVG): {5:0.000}", outerLoopCount, innerLoopCount, InnerLoopCounts.Average(), frameReleaseCount, seekStopwatch.ElapsedMilliseconds, SeekTimes.Average(), renderTime); #endif } }