private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _eventing = null; _stopping = false; try { Program.FfmpegMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = true; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; SampleChannel = new SampleChannel(_waveProvider); SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } Duration = _vfr.Duration; _videoQueue = new ConcurrentQueue<Bitmap>(); _audioQueue = new ConcurrentQueue<byte[]>(); _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true }; _eventing.Start(); try { while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown) { var nf = NewFrame; if (nf == null) break; object frame = _vfr.ReadFrame(); switch (_vfr.LastFrameType) { case 0: //null packet if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) throw new TimeoutException("Timeout reading from video stream"); break; case 1: LastFrame = DateTime.UtcNow; if (hasaudio) { var data = frame as byte[]; if (data?.Length > 0) { ProcessAudio(data); } } break; case 2: LastFrame = DateTime.UtcNow; var bmp = frame as Bitmap; if (bmp != null) { if (_videoQueue.Count<20) _videoQueue.Enqueue(bmp); } break; } } } catch (Exception e) { Logger.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } _stopEvent.Set(); _eventing.Join(); if (SampleChannel != null) { SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; SampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) _waveProvider?.ClearBuffer(); ShutDown(errmsg); }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _eventing = null; _stopping = false; try { Program.FfmpegMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = true; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; SampleChannel = new SampleChannel(_waveProvider); SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } Duration = _vfr.Duration; _videoQueue = new ConcurrentQueue <Bitmap>(); _audioQueue = new ConcurrentQueue <byte[]>(); _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true }; _eventing.Start(); try { while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown) { var nf = NewFrame; if (nf == null) { break; } object frame = _vfr.ReadFrame(); switch (_vfr.LastFrameType) { case 0: //null packet if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) { throw new TimeoutException("Timeout reading from video stream"); } break; case 1: LastFrame = DateTime.UtcNow; if (hasaudio) { var data = frame as byte[]; if (data?.Length > 0) { ProcessAudio(data); } } break; case 2: LastFrame = DateTime.UtcNow; var bmp = frame as Bitmap; if (bmp != null) { if (_videoQueue.Count < 20) { _videoQueue.Enqueue(bmp); } } break; } } } catch (Exception e) { Logger.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } _stopEvent.Set(); _eventing.Join(); if (SampleChannel != null) { SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; SampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) { _waveProvider?.ClearBuffer(); } ShutDown(errmsg); }
private void btnTest_Click(object sender, EventArgs e) { btnTest.Enabled = false; string res = "OK"; try { Program.FfmpegMutex.WaitOne(); string source = cmbFile.Text; using (var vfr = new VideoFileReader()) { int i = source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { source = source.Substring(0, i).ToLower() + source.Substring(i); } vfr.Timeout = CameraControl.Camobject.settings.timeout; vfr.AnalyzeDuration = (int) numAnalyseDuration.Value; vfr.Cookies = CameraControl.Camobject.settings.cookies; vfr.UserAgent = CameraControl.Camobject.settings.useragent; vfr.Headers = CameraControl.Camobject.settings.headers; vfr.RTSPMode = ddlRTSP.SelectedIndex; vfr.Flags = -1; vfr.NoBuffer = true; vfr.Open(source); var f = vfr.ReadFrame(); if (f == null) throw new Exception("Could not read from url"); } } catch (Exception ex) { res = ex.Message; } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } MessageBox.Show(res); btnTest.Enabled = true; }
private bool DecodeFrame(int analyseInterval, bool hasaudio, ref double firstmaxdrift, ref double maxdrift, ref DateTime dtAnalyse) { object frame = _vfr.ReadFrame(); if (_stopEvent.WaitOne(0)) { return(false); } switch (_vfr.LastFrameType) { case 0: //null packet if (!_realtime) { //end of file //wait for all frames to be emitted while (!_stopEvent.WaitOne(2)) { if (_videoframes.Count == 0 && _audioframes.Count == 0) { break; } } return(true); } if ((Helper.Now - LastFrame).TotalMilliseconds > Timeout) { throw new TimeoutException("Timeout reading from video stream"); } break; case 1: LastFrame = Helper.Now; if (hasaudio) { var data = frame as byte[]; if (data != null) { if (data.Length > 0) { double t = _vfr.AudioTime; _audioframes.Add(new DelayedAudio(data, t, _delay)); } } } break; case 2: LastFrame = Helper.Now; if (frame != null) { var bmp = frame as Bitmap; if (dtAnalyse == DateTime.MinValue) { dtAnalyse = Helper.Now.AddSeconds(analyseInterval); } double t = _vfr.VideoTime; if (_realtime) { double drift = _vfr.VideoTime - _sw.ElapsedMilliseconds; if (dtAnalyse > Helper.Now) { if (Math.Abs(drift) > Math.Abs(maxdrift)) { maxdrift = drift; } } else { if (firstmaxdrift > 0) { _delay = 0 - (maxdrift - firstmaxdrift); } else { firstmaxdrift = maxdrift; } maxdrift = 0; dtAnalyse = Helper.Now.AddSeconds(analyseInterval); } } _videoframes.Add(new DelayedFrame(bmp, t, _delay)); } break; } return(false); }