private void btnJump_Click(object sender, EventArgs e) { // 跳到 if (_reader != null) { // 指定帧的编号 double time = ((double)_reader.FrameCount / _reader.FrameRate) * 2 / 3; // 跳到指定帧附近的关键帧处,true 为跳到关键帧,false 为跳到任意帧 _reader.Seek(time, false); } ShowNextFrame(); }
public void Seek(float percentage) { int t = Convert.ToInt32((Duration / 1000d) * percentage); if (Seekable) { _sw.Stop(); _initialSeek = t; _vfr.Seek(t); _sw.Reset(); _sw.Start(); } }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _realtime = !IsFileSource; try { Program.FFMPEGMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = _realtime; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogErrorToFile(ex.Message + ": " + _source); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } if (_stopEvent.WaitOne(0)) { ShutDown(""); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; SampleChannel = new SampleChannel(_waveProvider); SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); } } HasAudioStream = null; Duration = _vfr.Duration; if (!_realtime) { _tOutput = new Thread(FrameEmitter) { Name = "ffmpeg frame emitter" }; _tOutput.Start(); } else { _tOutput = null; } _videoframes = new List <DelayedFrame>(); _audioframes = new List <DelayedAudio>(); double maxdrift = 0, firstmaxdrift = 0; const int analyseInterval = 10; DateTime dtAnalyse = DateTime.MinValue; //LastFrame = Helper.Now; if (_initialSeek > -1) { _vfr.Seek(_initialSeek); } try { while (!_stopEvent.WaitOne(5) && !MainForm.Reallyclose && NewFrame != null) { _bufferFull = !_realtime && (_videoframes.Count > MAXBuffer || _audioframes.Count > MAXBuffer); if (!_paused && !_bufferFull) { if (DecodeFrame(analyseInterval, hasaudio, ref firstmaxdrift, ref maxdrift, ref dtAnalyse)) { break; } if (_realtime && !_stopEvent.WaitOne(0)) { if (_videoframes.Count > 0) { DelayedFrame q = _videoframes[0]; if (q.B != null) { if (NewFrame != null) { NewFrame(this, new NewFrameEventArgs(q.B)); } q.B.Dispose(); } _videoframes.RemoveAt(0); } if (_audioframes.Count > 0) { DelayedAudio q = _audioframes[0]; if (q.A != null) { ProcessAudio(q.A); } _audioframes.RemoveAt(0); } } } } } catch (Exception e) { MainForm.LogExceptionToFile(e); errmsg = e.Message; } if (SampleChannel != null) { SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; SampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } } if (_tOutput != null) { try { if (!_tOutput.Join(TimeSpan.Zero)) { _tOutput.Join(); } } catch {} } ShutDown(errmsg); }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _realtime = !IsFileSource; try { Program.FFMPEGMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = _realtime; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogErrorToFile(ex.Message+": "+_source); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } if (_stopEvent.WaitOne(0)) { ShutDown(""); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) {DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)}; SampleChannel = new SampleChannel(_waveProvider); SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); } } HasAudioStream = null; Duration = _vfr.Duration; if (!_realtime) { _tOutput = new Thread(FrameEmitter) {Name="ffmpeg frame emitter"}; _tOutput.Start(); } else { _tOutput = null; } _videoframes = new List<DelayedFrame>(); _audioframes = new List<DelayedAudio>(); double maxdrift = 0, firstmaxdrift = 0; const int analyseInterval = 10; DateTime dtAnalyse = DateTime.MinValue; //LastFrame = Helper.Now; if (_initialSeek>-1) _vfr.Seek(_initialSeek); try { while (!_stopEvent.WaitOne(5) && !MainForm.Reallyclose && NewFrame!=null) { _bufferFull = !_realtime && (_videoframes.Count > MAXBuffer || _audioframes.Count > MAXBuffer); if (!_paused && !_bufferFull) { if (DecodeFrame(analyseInterval, hasaudio, ref firstmaxdrift, ref maxdrift, ref dtAnalyse)) break; if (_realtime && !_stopEvent.WaitOne(0)) { if (_videoframes.Count > 0) { DelayedFrame q = _videoframes[0]; if (q.B != null) { if (NewFrame != null) { NewFrame(this, new NewFrameEventArgs(q.B)); } q.B.Dispose(); } _videoframes.RemoveAt(0); } if (_audioframes.Count > 0) { DelayedAudio q = _audioframes[0]; if (q.A != null) { ProcessAudio(q.A); } _audioframes.RemoveAt(0); } } } } } catch (Exception e) { MainForm.LogExceptionToFile(e); errmsg = e.Message; } if (SampleChannel != null) { SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; SampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) _waveProvider.ClearBuffer(); } if (_tOutput != null) { try { if (!_tOutput.Join(TimeSpan.Zero)) _tOutput.Join(); } catch {} } ShutDown(errmsg); }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _eventing = null; _stopping = false; try { Program.FFMPEGMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = true; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } if (_stopEvent.WaitOne(0)) { ShutDown(""); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } Duration = _vfr.Duration; _videoQueue = new ConcurrentQueue<Bitmap>(); _audioQueue = new ConcurrentQueue<byte[]>(); _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true }; _eventing.Start(); if (_initialSeek > -1) _vfr.Seek(_initialSeek); try { while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown) { var nf = NewFrame; if (nf == null) break; if (!_paused) { object frame = _vfr.ReadFrame(); switch (_vfr.LastFrameType) { case 0: //null packet if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) throw new TimeoutException("Timeout reading from video stream"); break; case 1: LastFrame = DateTime.UtcNow; if (hasaudio) { var data = frame as byte[]; if (data != null) { if (data.Length > 0) { ProcessAudio(data); } } } break; case 2: LastFrame = DateTime.UtcNow; if (frame != null) { var bmp = frame as Bitmap; if (bmp != null) { if (_videoQueue.Count < 20) _videoQueue.Enqueue(bmp); } } break; } } } } catch (Exception e) { MainForm.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } _eventing.Join(); if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) _waveProvider.ClearBuffer(); } ShutDown(errmsg); }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _eventing = null; _stopping = false; try { Program.FFMPEGMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = true; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } if (_stopEvent.WaitOne(0)) { ShutDown(""); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } Duration = _vfr.Duration; _videoQueue = new ConcurrentQueue <Bitmap>(); _audioQueue = new ConcurrentQueue <byte[]>(); _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true }; _eventing.Start(); if (_initialSeek > -1) { _vfr.Seek(_initialSeek); } try { while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown) { var nf = NewFrame; if (nf == null) { break; } if (!_paused) { object frame = _vfr.ReadFrame(); switch (_vfr.LastFrameType) { case 0: //null packet if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) { throw new TimeoutException("Timeout reading from video stream"); } break; case 1: LastFrame = DateTime.UtcNow; if (hasaudio) { var data = frame as byte[]; if (data != null) { if (data.Length > 0) { ProcessAudio(data); } } } break; case 2: LastFrame = DateTime.UtcNow; if (frame != null) { var bmp = frame as Bitmap; if (bmp != null) { if (_videoQueue.Count < 20) { _videoQueue.Enqueue(bmp); } } } break; } } } } catch (Exception e) { MainForm.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } _eventing.Join(); if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } } ShutDown(errmsg); }