public void play() { if (VideoState == VideoState.PLAYING || VideoState == VideoState.CLOSED) { return; } audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY); if (VideoState == VideoState.OPEN) { startDemuxing(); if (videoDecoder.HasVideo) { videoRefreshTimer.start(); } if (videoDecoder.HasAudio) { audioRefreshTimer.start(); } } VideoState = VideoState.PLAYING; }
void seekFunc(double positionSeconds, VideoLib.VideoPlayer.SeekKeyframeMode mode) { if (VideoState == VideoPlayerControl.VideoState.CLOSED) { return; } // wait for video and audio decoding to block // To make sure no packets are in limbo // before flushing any ffmpeg internal or external queues. videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.BLOCK, FrameQueue.FrameQueueState.BLOCK, FrameQueue.FrameQueueState.BLOCK); if (videoDecoder.seek(positionSeconds, mode) == true) { // flush the framequeue and audioplayer buffer videoDecoder.FrameQueue.flush(); audioPlayer.flush(); audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } if (VideoState == VideoPlayerControl.VideoState.PLAYING) { videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY); } else if (VideoState == VideoPlayerControl.VideoState.PAUSED) { // display the first new frame in paused mode videoDecoder.FrameQueue.startSingleFrame(); } }
void processVideoFrame() { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; Rectangle scaledVideoRec = ImageUtils.stretchRectangle( new Rectangle(0, 0, videoDecoder.Width, videoDecoder.Height), videoRender.Canvas); Rectangle canvas = ImageUtils.centerRectangle(videoRender.Canvas, scaledVideoRec); // grab a decoded frame, returns false if the queue is stopped VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (VideoState == VideoState.CLOSED && videoFrame == null) { return; } else if (VideoState == VideoState.PLAYING) { videoPts = videoFrame.Pts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false) { videoRender.display(videoFrame, canvas, Color.Black, VideoRender.RenderMode.NORMAL); videoDebug.VideoFrames = videoDebug.VideoFrames + 1; } actualDelay = synchronizeVideo(videoPts); } else if (VideoState == VideoState.PAUSED) { videoRender.display(null, canvas, Color.Black, VideoRender.RenderMode.PAUSED); } // do not update ui elements on main thread inside videoStateLock // or we can get a deadlock videoDebug.update(); updateUI(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; videoDebug.NrVideoFramesDropped = videoDebug.NrVideoFramesDropped + 1; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }
double getVideoClock() { if (VideoState == VideoState.PAUSED) { return(videoPts); } else { return(videoPtsDrift - HRTimer.getTimestamp()); } }
double getVideoClock() { if (videoDecoder.FrameQueue.VideoPacketQueueState != PacketQueue.PacketQueueState.OPEN || videoDecoder.FrameQueue.IsBuffering) { return(videoPts); } else { return(videoPtsDrift - HRTimer.getTimestamp()); } }
double synchronizeAudio(int frameLength) { // calculate delay to play next frame int bytesPerSecond = audioPlayer.SamplesPerSecond * videoDecoder.BytesPerSample * videoDecoder.NrChannels; double delay = frameLength / (double)bytesPerSecond; // adjust delay based on the actual current time audioFrameTimer += delay; double actualDelay = audioFrameTimer - HRTimer.getTimestamp(); return(actualDelay); }
double synchronizeVideo(double videoPts) { // assume delay to next frame equals delay between previous frames double delay = videoPts - previousVideoPts; if (delay <= 0 || delay >= 1.0) { // if incorrect delay, use previous one delay = previousVideoDelay; } previousVideoPts = videoPts; previousVideoDelay = delay; if (videoDecoder.HasAudio && syncMode == SyncMode.VIDEO_SYNCS_TO_AUDIO) { // synchronize video to audio double diff = getVideoClock() - audioPlayer.getAudioClock(); // Skip or repeat the frame. Take delay into account // FFPlay still doesn't "know if this is the best guess." double sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD; if (Math.Abs(diff) < AV_NOSYNC_THRESHOLD) { if (diff <= -sync_threshold) { delay = 0; } else if (diff >= sync_threshold) { delay = 2 * delay; } } } // adjust delay based on the actual current time videoFrameTimer += delay; double actualDelay = videoFrameTimer - HRTimer.getTimestamp(); videoDebug.VideoDelay = delay; videoDebug.ActualVideoDelay = actualDelay; videoDebug.VideoSync = getVideoClock(); videoDebug.AudioSync = audioPlayer.getAudioClock(); videoDebug.VideoQueueSize = videoDecoder.FrameQueue.VideoPacketsInQueue; videoDebug.AudioQueueSize = videoDecoder.FrameQueue.AudioPacketsInQueue; return(actualDelay); }
void videoRefreshTimer_Tick(Object sender, EventArgs e) { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; // grab a decoded frame, returns false if the queue is stopped VideoLib.VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (VideoState == VideoState.CLOSED && videoFrame == null) { return; } else if (VideoState == VideoState.PLAYING) { videoPts = videoFrame.Pts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false && displayVideoFrameCallback != null) { displayVideoFrameCallback(videoFrame); } actualDelay = synchronizeVideo(videoPts); } else if (VideoState == VideoState.PAUSED) { //videoRender.display(null, canvas, Color.Black, VideoRender.RenderMode.PAUSED); } updateObservableVariables(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; //videoDebug.NrVideoFramesDropped = videoDebug.NrVideoFramesDropped + 1; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }
void demuxPackets(CancellationToken token) { audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); bool success = true; // decode frames one by one, or handle seek requests do { if (seekRequest == true) { // wait for video and audio decoding to pause/block // To make sure no packets are in limbo // before flushing any ffmpeg internal or external queues. videoDecoder.FrameQueue.pause(); if (videoDecoder.seek(seekPosition) == true) { // flush the framequeue and audioplayer buffer videoDecoder.FrameQueue.flush(); audioPlayer.flush(); // refill/buffer the framequeue from the new position fillFrameQueue(); audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } seekRequest = false; // allow video and audio decoding to continue videoDecoder.FrameQueue.start(); } else { success = videoDecoder.demuxPacket(); } } while (success == true && !token.IsCancellationRequested); }
void audioRefreshTimer_Tick(Object sender, EventArgs e) { restartaudio: double actualDelay = 0.04; if (!videoDecoder.HasVideo) { updateObservableVariables(); } // returns null when framequeue is paused or closed VideoLib.AudioFrame audioFrame = videoDecoder.FrameQueue.getDecodedAudioFrame(); if (audioFrame == null) { // stop audio if playing audioPlayer.stop(); if (VideoState == VideoState.CLOSED) { audioPlayer.flush(); return; } // when paused spin idle } else { if (audioPlayer.Status == SharpDX.DirectSound.BufferStatus.None) { // reset audio frame timer before (re)starting playing audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } audioPts = audioFrame.Pts; audioDts = audioFrame.Dts; // if the audio is lagging behind too much, skip the buffer completely double diff = getVideoClock() - audioFrame.Pts; if (diff > 0.2 && diff < 3 && syncMode == SyncMode.AUDIO_SYNCS_TO_VIDEO) { //log.Warn("dropping audio buffer, lagging behind: " + (getVideoClock() - audioFrame.Pts).ToString() + " seconds"); goto restartaudio; } //adjustAudioSamplesPerSecond(audioFrame); adjustAudioLength(audioFrame); audioPlayer.play(audioFrame); int frameLength = audioFrame.Length; actualDelay = synchronizeAudio(frameLength); if (actualDelay < 0) { // delay too small, play next frame as quickly as possible goto restartaudio; } } // start timer with delay for next frame audioRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); audioRefreshTimer.start(); }
void videoRefreshTimer_Tick(Object sender, EventArgs e) { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; // grab a decoded frame, returns null if the queue is paused or closed VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (videoFrame == null) { if (VideoState == VideoState.CLOSED) { videoRender.display(null, Color.Black, RenderMode.CLEAR_SCREEN); videoRender.releaseResources(); return; } videoRender.display(null, Color.Black, RenderMode.PAUSED); } else { if (videoRender.RenderMode == RenderMode.PAUSED) { // reset videoFrameTimer before (re)starting rendering audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } videoPts = videoFrame.Pts; videoDts = videoFrame.Dts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false) { videoRender.display(videoFrame, Color.Black, RenderMode.NORMAL); } actualDelay = synchronizeVideo(videoPts); NrFramesRendered++; framePts = videoFrame.FramePts; //frameDts = videoFrame.FrameDts; isKeyFrame = videoFrame.IsKey; } updateObservableVariables(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; NrFramesDropped++; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }