void iTrackReader.seekToSample(StreamPosition index) { Mp4StreamPosition pos = (Mp4StreamPosition)index; sampleReader.seekToSample(pos.sample); state = default; }
MediaSeekPosition iDecoderThread.findStreamsPosition(TimeSpan where) { StreamPosition video = reader.findStreamPosition(where); StreamPosition audio = null; if (null != audioReader) { audio = audioReader.findStreamPosition(where); } return(new MediaSeekPosition(video, audio)); }
/// <summary>Called from the main loop of this thread in response to seekEventHandle event being signalled.</summary> bool handleSeek() { seekEventHandle.reset(); MediaSeekPosition seekPosition; lock ( syncRoot ) { if (!this.seekPosition.HasValue) { throw new ApplicationException(); } seekPosition = this.seekPosition.Value; } Logger.logVerbose("Processing seek request: {0}", seekPosition); StreamPosition audioSample = null; if (null != seekPosition.audio && null != audioReader) { audioSample = audioReader.findKeyFrame(seekPosition.audio); } StreamPosition videoSample = reader.findKeyFrame(seekPosition.video); eventsSink.discardDecoded(); presentationClock.waitForAudioDrain(); if (null != audioReader) { audioReader.seekToSample(audioSample); } reader.seekToSample(videoSample); Stopwatch sw = Stopwatch.StartNew(); if (!waitForVideoFrame(seekPosition.video.time)) { return(false); } TimeSpan elapsed = sw.Elapsed; /* int frames = seekPosition.video.Value.sample - videoSample + 1; * Logger.logDebug( "DecoderThread.handleSeek decoded the target frame of the video. It took {0:G3} seconds and {1}", * elapsed.TotalSeconds, frames.pluralString( "frame" ) ); */ presentationClock.signalVideoReady(); return(true); }
StreamPosition iTrackReader.findKeyFrame(StreamPosition seekFrame) => sampleReader.findKeyFrame((Mp4StreamPosition)seekFrame);