Exemple #1
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);

            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
            {
                _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            }

            if (_vfr != null && _vfr.IsOpen)
            {
                try
                {
                    _vfr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex);
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, _reasonToStop);
            }
            if (AudioFinished != null)
            {
                AudioFinished(this, _reasonToStop);
            }

            ClearBuffer();
        }
Exemple #2
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _vfr          = null;
            bool   open   = false;
            string errmsg = "";

            _realtime = !IsFileSource;
            try
            {
                Program.FFMPEGMutex.WaitOne();
                _vfr = new VideoFileReader();

                //ensure http/https is lower case for string compare in ffmpeg library
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i > -1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _vfr.Timeout         = Timeout;
                _vfr.AnalyzeDuration = AnalyzeDuration;
                _vfr.Cookies         = Cookies;
                _vfr.UserAgent       = UserAgent;
                _vfr.Headers         = Headers;
                _vfr.Flags           = -1;
                _vfr.NoBuffer        = _realtime;
                _vfr.RTSPMode        = RTSPMode;
                _vfr.Open(_source);
                open = true;
            }
            catch (Exception ex)
            {
                MainForm.LogErrorToFile(ex.Message + ": " + _source);
            }
            finally
            {
                try
                {
                    Program.FFMPEGMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_vfr == null || !_vfr.IsOpen || !open)
            {
                ShutDown("Could not open stream" + ": " + _source);
                return;
            }
            if (_stopEvent.WaitOne(0))
            {
                ShutDown("");
                return;
            }

            bool hasaudio = false;


            if (_vfr.Channels > 0)
            {
                hasaudio        = true;
                RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels);
                _waveProvider   = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)
                };

                SampleChannel = new SampleChannel(_waveProvider);
                SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                }
            }
            HasAudioStream = null;

            Duration = _vfr.Duration;

            if (!_realtime)
            {
                _tOutput = new Thread(FrameEmitter)
                {
                    Name = "ffmpeg frame emitter"
                };
                _tOutput.Start();
            }
            else
            {
                _tOutput = null;
            }

            _videoframes = new List <DelayedFrame>();
            _audioframes = new List <DelayedAudio>();

            double    maxdrift = 0, firstmaxdrift = 0;
            const int analyseInterval = 10;
            DateTime  dtAnalyse       = DateTime.MinValue;

            //LastFrame = Helper.Now;

            if (_initialSeek > -1)
            {
                _vfr.Seek(_initialSeek);
            }
            try
            {
                while (!_stopEvent.WaitOne(5) && !MainForm.Reallyclose && NewFrame != null)
                {
                    _bufferFull = !_realtime && (_videoframes.Count > MAXBuffer || _audioframes.Count > MAXBuffer);
                    if (!_paused && !_bufferFull)
                    {
                        if (DecodeFrame(analyseInterval, hasaudio, ref firstmaxdrift, ref maxdrift, ref dtAnalyse))
                        {
                            break;
                        }
                        if (_realtime && !_stopEvent.WaitOne(0))
                        {
                            if (_videoframes.Count > 0)
                            {
                                DelayedFrame q = _videoframes[0];
                                if (q.B != null)
                                {
                                    if (NewFrame != null)
                                    {
                                        NewFrame(this, new NewFrameEventArgs(q.B));
                                    }
                                    q.B.Dispose();
                                }
                                _videoframes.RemoveAt(0);
                            }
                            if (_audioframes.Count > 0)
                            {
                                DelayedAudio q = _audioframes[0];

                                if (q.A != null)
                                {
                                    ProcessAudio(q.A);
                                }
                                _audioframes.RemoveAt(0);
                            }
                        }
                    }
                }
            }
            catch (Exception e)
            {
                MainForm.LogExceptionToFile(e);
                errmsg = e.Message;
            }

            if (SampleChannel != null)
            {
                SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                SampleChannel = null;
            }

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                {
                    _waveProvider.ClearBuffer();
                }
            }

            if (_tOutput != null)
            {
                try
                {
                    if (!_tOutput.Join(TimeSpan.Zero))
                    {
                        _tOutput.Join();
                    }
                }
                catch {}
            }

            ShutDown(errmsg);
        }
Exemple #3
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _vfr = null;
            bool open = false;
            string errmsg = "";
            _realtime = !IsFileSource;
            try
            {
                Program.FFMPEGMutex.WaitOne();
                _vfr = new VideoFileReader();

                //ensure http/https is lower case for string compare in ffmpeg library
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i > -1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _vfr.Timeout = Timeout;
                _vfr.AnalyzeDuration = AnalyzeDuration;
                _vfr.Cookies = Cookies;
                _vfr.UserAgent = UserAgent;
                _vfr.Headers = Headers;
                _vfr.Flags = -1;
                _vfr.NoBuffer = _realtime;
                _vfr.RTSPMode = RTSPMode;
                _vfr.Open(_source);
                open = true;
            }
            catch (Exception ex)
            {
                MainForm.LogErrorToFile(ex.Message+": "+_source);
            }
            finally
            {
                try
                {
                    Program.FFMPEGMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_vfr == null || !_vfr.IsOpen || !open)
            {
                ShutDown("Could not open stream" + ": " + _source);
                return;
            }
            if (_stopEvent.WaitOne(0))
            {
                ShutDown("");
                return;
            }

            bool hasaudio = false;

            if (_vfr.Channels > 0)
            {
                hasaudio = true;
                RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels);
                _waveProvider = new BufferedWaveProvider(RecordingFormat) {DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)};

                SampleChannel = new SampleChannel(_waveProvider);
                SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                }
            }
            HasAudioStream = null;

            Duration = _vfr.Duration;

            if (!_realtime)
            {
                _tOutput = new Thread(FrameEmitter) {Name="ffmpeg frame emitter"};
                _tOutput.Start();
            }
            else
            {
                _tOutput = null;
            }

            _videoframes = new List<DelayedFrame>();
            _audioframes = new List<DelayedAudio>();

            double maxdrift = 0, firstmaxdrift = 0;
            const int analyseInterval = 10;
            DateTime dtAnalyse = DateTime.MinValue;
            //LastFrame = Helper.Now;

            if (_initialSeek>-1)
                _vfr.Seek(_initialSeek);
            try
            {
                while (!_stopEvent.WaitOne(5) && !MainForm.Reallyclose && NewFrame!=null)
                {
                    _bufferFull = !_realtime && (_videoframes.Count > MAXBuffer || _audioframes.Count > MAXBuffer);
                    if (!_paused && !_bufferFull)
                    {
                        if (DecodeFrame(analyseInterval, hasaudio, ref firstmaxdrift, ref maxdrift, ref dtAnalyse)) break;
                        if (_realtime && !_stopEvent.WaitOne(0))
                        {
                            if (_videoframes.Count > 0)
                            {
                                DelayedFrame q = _videoframes[0];
                                if (q.B != null)
                                {
                                    if (NewFrame != null)
                                    {
                                        NewFrame(this, new NewFrameEventArgs(q.B));
                                    }
                                    q.B.Dispose();
                                }
                                _videoframes.RemoveAt(0);
                            }
                            if (_audioframes.Count > 0)
                            {
                                DelayedAudio q = _audioframes[0];

                                if (q.A != null)
                                {
                                    ProcessAudio(q.A);
                                }
                                _audioframes.RemoveAt(0);
                            }
                        }
                    }
                }

            }
            catch (Exception e)
            {
                MainForm.LogExceptionToFile(e);
                errmsg = e.Message;
            }

            if (SampleChannel != null)
            {
                SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                SampleChannel = null;
            }

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                    _waveProvider.ClearBuffer();
            }

            if (_tOutput != null)
            {
                try
                {
                    if (!_tOutput.Join(TimeSpan.Zero))
                        _tOutput.Join();
                }
                catch {}
            }

            ShutDown(errmsg);
        }
Exemple #4
0
        private void ShutDown(string errmsg)
        {
            bool err=!String.IsNullOrEmpty(errmsg);
            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
                _reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            if (_vfr!=null && _vfr.IsOpen)  {
                try
                {
                    _vfr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex);
                }
            }

            if (PlayingFinished != null)
                PlayingFinished(this, _reasonToStop);
            if (AudioFinished != null)
                AudioFinished(this, _reasonToStop);

            ClearBuffer();
        }
Exemple #5
0
        private void FfmpegListener()
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            VideoFileReader vfr = null;

            Program.WriterMutex.WaitOne();
            try
            {
                vfr = new VideoFileReader();
                vfr.Open(_source);
            }
            catch (Exception ex)
            {
                Log.Error("", ex);//MainForm.LogExceptionToFile(ex);
            }
            Program.WriterMutex.ReleaseMutex();
            if (vfr == null || !vfr.IsOpen)
            {
                if (PlayingFinished != null)
                {
                    PlayingFinished(this, ReasonToFinishPlaying.VideoSourceError);
                }
                return;
            }
            bool hasaudio = false;

            if (vfr.Channels > 0)
            {
                hasaudio        = true;
                RecordingFormat = new WaveFormat(vfr.SampleRate, 16, vfr.Channels);

                WaveOutProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true
                };
                _waveProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true
                };


                _sampleChannel    = new SampleChannel(_waveProvider);
                _meteringProvider = new MeteringSampleProvider(_sampleChannel);
                _meteringProvider.StreamVolume += MeteringProviderStreamVolume;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                }
            }

            int interval = 1000 / ((vfr.FrameRate == 0) ? 25 : vfr.FrameRate);

            byte[] data;
            Bitmap frame;

            try
            {
                while (!_stopEvent.WaitOne(0, false))
                {
                    DateTime start = DateTime.Now;
                    frame = vfr.ReadVideoFrame();
                    if (frame == null)
                    {
                        reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                        break;
                    }

                    if (NewFrame != null)
                    {
                        NewFrame(this, new NewFrameEventArgs(frame));
                    }
                    frame.Dispose();

                    if (hasaudio)
                    {
                        data = vfr.ReadAudioFrame();
                        if (DataAvailable != null)
                        {
                            _waveProvider.AddSamples(data, 0, data.Length);

                            if (Listening)
                            {
                                WaveOutProvider.AddSamples(data, 0, data.Length);
                            }

                            _mFramesReceived++;

                            //forces processing of volume level without piping it out
                            var sampleBuffer = new float[data.Length];

                            _meteringProvider.Read(sampleBuffer, 0, data.Length);
                            DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone()));
                        }
                    }

                    if (interval > 0)
                    {
                        // get frame extract duration
                        TimeSpan span = DateTime.Now.Subtract(start);

                        // miliseconds to sleep
                        int msec = interval - (int)span.TotalMilliseconds;

                        if ((msec > 0) && (_stopEvent.WaitOne(msec, false)))
                        {
                            break;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message));
                }
                Log.Error("", e);//MainForm.LogExceptionToFile(e);
                reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }
            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }