Esempio n. 1
0
 public void Restart()
 {
     if (!IsRunning)
     {
         return;
     }
     _res = ReasonToFinishPlaying.Restart;
     _stopReadingFrames = true;
 }
Esempio n. 2
0
 public void Stop()
 {
     if (!IsRunning)
     {
         return;
     }
     _res = ReasonToFinishPlaying.StoppedByUser;
     _stopReadingFrames = true;
 }
Esempio n. 3
0
 public void Stop()
 {
     if (!IsRunning)
     {
         return;
     }
     _res   = ReasonToFinishPlaying.StoppedByUser;
     _abort = true;
 }
Esempio n. 4
0
        public void InvokePlayingFinished(ReasonToFinishPlaying reason)
        {
            PlayingFinishedEventHandler handler = PlayingFinished;

            if (handler != null)
            {
                handler(this, reason);
            }
        }
Esempio n. 5
0
        // Worker thread
        private void WorkerThread( )
        {
            try
            {
                _camera.StartAcquisition( );

                // while there is no request for stop
                while (!_abort && !MainForm.ShuttingDown)
                {
                    // start time
                    DateTime start = DateTime.Now;

                    // get next frame
                    Bitmap bitmap = _camera.GetImage(15000, false);

                    _framesReceived++;
                    _bytesReceived += bitmap.Width * bitmap.Height * (Image.GetPixelFormatSize(bitmap.PixelFormat) >> 3);

                    NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap));

                    // free image
                    bitmap.Dispose( );

                    // wait for a while ?
                    if (_frameInterval > 0)
                    {
                        // get frame duration
                        TimeSpan span = DateTime.Now.Subtract(start);

                        // miliseconds to sleep
                        int msec = _frameInterval - (int)span.TotalMilliseconds;

                        if ((msec > 0) || _abort)
                        {
                            break;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "XIMEA");
                _res = ReasonToFinishPlaying.VideoSourceError;
            }
            finally
            {
                try
                {
                    _camera?.StopAcquisition( );
                }
                catch
                {
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
        }
Esempio n. 6
0
 void WaveInRecordingStopped(object sender, StoppedEventArgs e)
 {
     _started = false;
     if (e.Exception != null && e.Exception.Message.IndexOf("NoDriver", StringComparison.Ordinal) != -1)
     {
         _res = ReasonToFinishPlaying.DeviceLost;
     }
     AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
 }
Esempio n. 7
0
 public void Restart()
 {
     if (!IsRunning)
     {
         return;
     }
     _res = ReasonToFinishPlaying.Restart;
     _abort?.Set();
 }
Esempio n. 8
0
 public void Start()
 {
     if (_starting || IsRunning)
     {
         return;
     }
     _stopReadingFrames = false;
     _res      = ReasonToFinishPlaying.DeviceLost;
     _starting = true;
     Task.Factory.StartNew(DoStart);
 }
Esempio n. 9
0
 public void CheckTimestamp()
 {
     //some feeds keep returning frames even when the connection is lost
     //this detects that by comparing timestamps from the eventstimechanged event
     //and signals an error if more than 8 seconds ago
     if (LastFrame > DateTime.MinValue && (Helper.Now - LastFrame).TotalMilliseconds > TimeOut)
     {
         _res = ReasonToFinishPlaying.DeviceLost;
         _abort?.Set();
     }
 }
Esempio n. 10
0
 /// <summary>
 /// Stop audio source.
 /// </summary>
 ///
 /// <remarks><para>Stops audio source.</para>
 /// </remarks>
 ///
 public void Stop()
 {
     _res = ReasonToFinishPlaying.StoppedByUser;
     if (_waveIn == null)
     {
         AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
     }
     else
     {
         StopSource();
     }
 }
Esempio n. 11
0
        // Worker thread
        private void WorkerThread( )
        {
            try
            {
                _camera.StartAcquisition( );

                // while there is no request for stop
                while (!_abort.WaitOne(0) && !MainForm.ShuttingDown)
                {
                    // start time
                    DateTime start = DateTime.Now;

                    // get next frame
                    if (EmitFrame)
                    {
                        using (var bitmap = _camera.GetImage(15000, false))
                        {
                            NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap));
                        }
                    }

                    // wait for a while ?
                    if (FrameInterval > 0)
                    {
                        // get download duration
                        var span = DateTime.UtcNow.Subtract(start);
                        // milliseconds to sleep
                        var msec = FrameInterval - (int)span.TotalMilliseconds;
                        if (msec > 0)
                        {
                            _abort.WaitOne(msec);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "XIMEA");
                _res = ReasonToFinishPlaying.VideoSourceError;
            }
            finally
            {
                try
                {
                    _camera?.StopAcquisition( );
                }
                catch
                {
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
        }
Esempio n. 12
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        ///
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        ///
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        ///
        public void Start()
        {
            if (string.IsNullOrEmpty(_source))
            {
                throw new ArgumentException("Audio source is not specified.");
            }


            if (_started)
            {
                return;
            }

            // check source
            lock (_lock)
            {
                if (_started)
                {
                    return;
                }

                int i = 0, selind = -1;
                for (var n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                    {
                        selind = i;
                    }
                    i++;
                }
                if (selind == -1)
                {
                    AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                    return;
                }
                _started = true;
                _res     = ReasonToFinishPlaying.DeviceLost;
                _waveIn  = new WaveInEvent
                {
                    BufferMilliseconds = 200,
                    DeviceNumber       = selind,
                    WaveFormat         = RecordingFormat
                };
                _waveIn.DataAvailable    += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider  = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                _waveIn.StartRecording();
            }
        }
Esempio n. 13
0
 /// <summary>
 ///     Stop video source.
 /// </summary>
 /// <remarks>
 ///     <para>Stops video source aborting its thread.</para>
 ///     <para>
 ///         <note>
 ///             Since the method aborts background thread, its usage is highly not preferred
 ///             and should be done only if there are no other options. The correct way of stopping camera
 ///             is <see cref="SignalToStop">signaling it stop</see> and then
 ///             <see cref="WaitForStop">waiting</see> for background thread's completion.
 ///         </note>
 ///     </para>
 /// </remarks>
 public void Stop()
 {
     if (IsRunning)
     {
         _res = ReasonToFinishPlaying.StoppedByUser;
         _abort.Set();
     }
     else
     {
         _res = ReasonToFinishPlaying.StoppedByUser;
         PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
     }
 }
Esempio n. 14
0
 public int InterruptCb(void *ctx)
 {
     if ((DateTime.UtcNow - _lastPacket).TotalMilliseconds > _timeout || _abort)
     {
         if (!_abort)
         {
             _res = ReasonToFinishPlaying.DeviceLost;
         }
         _abort = true;
         return(1);
     }
     return(0);
 }
Esempio n. 15
0
        private void Camera_PlayingFinished(object sender, ReasonToFinishPlaying reason)
        {
            Invoke(new Action(() =>
            {
                pictureBox.Image      = null;
                motionLevelLabel.Text = string.Empty;
            }));

            if (videoWriter.IsOpen)
            {
                videoWriter.Close();
            }
        }
Esempio n. 16
0
 public int InterruptCb(void *ctx)
 {
     //don't check abort here as breaks teardown of rtsp streams
     if ((DateTime.UtcNow - _lastPacket).TotalMilliseconds * 1000 > _timeoutMicroSeconds)
     {
         if (!_abort)
         {
             _res = ReasonToFinishPlaying.DeviceLost;
         }
         _abort = true;
         return(1);
     }
     return(0);
 }
Esempio n. 17
0
 public void Tick()
 {
     if (IsRunning && !_quit)
     {
         var ms = _connecting ? _connectMilliSeconds : _timeoutMilliSeconds;
         if ((DateTime.UtcNow - _lastFrame).TotalMilliseconds > ms)
         {
             Debug.WriteLine("TIMEOUT");
             _lastFrame = DateTime.MaxValue;
             Stop();
             _res = ReasonToFinishPlaying.DeviceLost;
         }
     }
 }
Esempio n. 18
0
 private void videoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     if (PlayingFinished != null)
     {
         if (SynchronizingObject != null &&
             SynchronizingObject.InvokeRequired)
         {
             SynchronizingObject.BeginInvoke(
                 PlayingFinished, new object[] { this, reason });
         }
         else
         {
             PlayingFinished(this, reason);
         }
     }
 }
Esempio n. 19
0
 private void VideoPlayer_PlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     playing = false;
     if (!IsDisposed)
     {
         try
         {
             Invoke((Proc) delegate
             {
                 btnStartStop.Text       = "Start";
                 videoPlayer.VideoSource = null;
             });
         }
         catch (Exception) { }
     }
 }
Esempio n. 20
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);

            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
            {
                _reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
            }

            try
            {
                _afr.Dispose();//calls close!
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
            }

            // release events
            if (_stopEvent != null)
            {
                _stopEvent.Close();
                _stopEvent.Dispose();
                _stopEvent = null;
            }
            _stopEvent = null;

            if (AudioFinished != null)
            {
                AudioFinished(this, _reasonToStop);
            }

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                {
                    _waveProvider.ClearBuffer();
                }
            }

            _isrunning = false;
        }
        // Video source has finished playing video
        private void videoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason)
        {
            switch (reason)
            {
            case ReasonToFinishPlaying.EndOfStreamReached:
                lastMessage = "Video has finished";
                break;

            case ReasonToFinishPlaying.StoppedByUser:
                lastMessage = "Video was stopped";
                break;

            default:
                lastMessage = "Video has finished for unknown reason";
                break;
            }
            Invalidate( );
        }
Esempio n. 22
0
        public void Start()
        {
            if (IsRunning)
            {
                return;
            }

            // create events
            _res = ReasonToFinishPlaying.DeviceLost;

            // create and start new thread
            _thread = new Thread(WorkerThread)
            {
                Name = "desktop" + _screenindex, IsBackground = true
            };

            _thread.Start();
        }
Esempio n. 23
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        ///
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        ///
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        ///
        public void Start()
        {
            if (!IsRunning)
            {
                // check source
                if (string.IsNullOrEmpty(_source))
                {
                    throw new ArgumentException("Audio source is not specified.");
                }

                _res    = ReasonToFinishPlaying.DeviceLost;
                _thread = new Thread(StreamMP3)
                {
                    Name = "MP3 Audio Receiver (" + _source + ")"
                };
                _thread.Start();
            }
        }
Esempio n. 24
0
 public void Restart()
 {
     Debug.WriteLine("RESTART");
     _res = ReasonToFinishPlaying.Restart;
     if (_mediaPlayer != null)
     {
         if (!_mediaPlayer.IsPlaying)
         {
             _res = ReasonToFinishPlaying.DeviceLost;
             Start();
             return;
         }
         _commands.Enqueue("stop");
     }
     else
     {
         Start();
     }
 }
Esempio n. 25
0
 private void DoStart()
 {
     if (_stopping)
     {
         return;
     }
     try
     {
         _connecting = true;
         Init();
     }
     catch (Exception ex)
     {
         Logger.LogException(ex, "VLCStream");
         ErrorHandler?.Invoke("Invalid Source (" + Source + ")");
         _res = ReasonToFinishPlaying.VideoSourceError;
         //Cleanup();
     }
 }
Esempio n. 26
0
        /// <summary>
        ///     Start video source.
        /// </summary>
        /// <remarks>
        ///     Starts video source and return execution to caller. Video source
        ///     object creates background thread and notifies about new frames with the
        ///     help of <see cref="NewFrame" /> event.
        /// </remarks>
        /// <exception cref="ArgumentException">Video source is not specified.</exception>
        public void Start()
        {
            if (!IsRunning)
            {
                // check source
                if (string.IsNullOrEmpty(_source.settings.videosourcestring))
                {
                    throw new ArgumentException("Video source is not specified.");
                }

                _res = ReasonToFinishPlaying.DeviceLost;

                // create and start new thread
                _thread = new Thread(WorkerThread)
                {
                    Name = _source.settings.videosourcestring, IsBackground = true
                };
                _thread.Start();
            }
        }
Esempio n. 27
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);

            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
            {
                _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            }

            if (_vfr != null && _vfr.IsOpen)
            {
                try
                {
                    _vfr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex, "FFMPEG");
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, _reasonToStop);
            }
            if (AudioFinished != null)
            {
                AudioFinished(this, _reasonToStop);
            }


            _stopEvent.Close();
            _stopEvent.Dispose();
            _stopEvent = null;
            _stopping  = false;
        }
Esempio n. 28
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        ///
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        ///
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        ///
        public void Start()
        {
            if (!IsRunning)
            {
                // check source
                if (_socket == null)
                {
                    throw new ArgumentException("Audio source is not specified.");
                }

                _waveProvider  = new BufferedWaveProvider(RecordingFormat);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                _res    = ReasonToFinishPlaying.DeviceLost;
                _thread = new Thread(WebStreamListener)
                {
                    Name = "WebStream Audio Receiver"
                };
                _thread.Start();
            }
        }
Esempio n. 29
0
        // Worker thread
        private void WorkerThread( )
        {
            try
            {
                _camera.StartAcquisition( );

                // while there is no request for stop
                while (!_abort.WaitOne(10) && !MainForm.ShuttingDown)
                {
                    // start time
                    DateTime start = DateTime.Now;

                    // get next frame
                    if (ShouldEmitFrame)
                    {
                        using (var bitmap = _camera.GetImage(15000, false))
                        {
                            NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap));
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "XIMEA");
                _res = ReasonToFinishPlaying.VideoSourceError;
            }
            finally
            {
                try
                {
                    _camera?.StopAcquisition( );
                }
                catch
                {
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
        }
Esempio n. 30
0
        /// <summary>
        /// Start video source.
        /// </summary>
        ///
        /// <remarks>Starts video source and return execution to caller. Video source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="NewFrame"/> event.</remarks>
        ///
        /// <exception cref="ArgumentException">Video source is not specified.</exception>
        ///
        public void Start()
        {
            if (!VlcHelper.VlcInstalled)
            {
                return;
            }

            if (IsRunning)
            {
                return;
            }

            _res = ReasonToFinishPlaying.DeviceLost;

            // create and start new thread

            _thread = new Thread(WorkerThread)
            {
                Name = Source, IsBackground = true
            };
            _thread.SetApartmentState(ApartmentState.MTA);
            _thread.Start();
        }
Esempio n. 31
0
 // On finishing video playing
 private void videoSourcePlayer_PlayingFinished( object sender, ReasonToFinishPlaying reason )
 {
     if ( arForm != null )
     {
         arForm.UpdateScene( null, new List<VirtualModel>( ) );
     }
 }
Esempio n. 32
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);
            if (err)
            {

                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
                _reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;

            try
            {
                _afr.Dispose();//calls close!
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
            }

            // release events
            if (_stopEvent != null)
            {
                _stopEvent.Close();
                _stopEvent.Dispose();
                _stopEvent = null;
            }
            _stopEvent = null;

            if (AudioFinished != null)
                AudioFinished(this, _reasonToStop);

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                    _waveProvider.ClearBuffer();
            }

            _isrunning = false;
        }
Esempio n. 33
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _vfr = null;
            bool open = false;
            string errmsg = "";
            _eventing = null;
            _stopping = false;
            try
            {
                Program.FfmpegMutex.WaitOne();
                _vfr = new VideoFileReader();

                //ensure http/https is lower case for string compare in ffmpeg library
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i > -1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _vfr.Timeout = Timeout;
                _vfr.AnalyzeDuration = AnalyzeDuration;
                _vfr.Cookies = Cookies;
                _vfr.UserAgent = UserAgent;
                _vfr.Headers = Headers;
                _vfr.Flags = -1;
                _vfr.NoBuffer = true;
                _vfr.RTSPMode = RTSPMode;
                _vfr.Open(_source);
                open = true;
            }
            catch (Exception ex)
            {
                Logger.LogExceptionToFile(ex, "FFMPEG");
            }
            finally
            {
                try
                {
                    Program.FfmpegMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_vfr == null || !_vfr.IsOpen || !open)
            {
                ShutDown("Could not open stream" + ": " + _source);
                return;
            }

            bool hasaudio = false;

            if (_vfr.Channels > 0)
            {
                hasaudio = true;
                RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels);
                _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) };
                SampleChannel = new SampleChannel(_waveProvider);
                SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
            }

            Duration = _vfr.Duration;

            _videoQueue = new ConcurrentQueue<Bitmap>();
            _audioQueue = new ConcurrentQueue<byte[]>();
            _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true };
            _eventing.Start();

            try
            {
                while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown)
                {
                    var nf = NewFrame;
                    if (nf == null)
                        break;

                    object frame = _vfr.ReadFrame();
                    switch (_vfr.LastFrameType)
                    {
                        case 0:
                            //null packet
                            if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout)
                                throw new TimeoutException("Timeout reading from video stream");
                            break;
                        case 1:
                            LastFrame = DateTime.UtcNow;
                            if (hasaudio)
                            {
                                var data = frame as byte[];
                                if (data?.Length > 0)
                                {
                                    ProcessAudio(data);
                                }
                            }
                            break;
                        case 2:
                            LastFrame = DateTime.UtcNow;

                            var bmp = frame as Bitmap;
                            if (bmp != null)
                            {
                                if (_videoQueue.Count<20)
                                    _videoQueue.Enqueue(bmp);
                            }
                            break;
                    }
                }

            }
            catch (Exception e)
            {
                Logger.LogExceptionToFile(e, "FFMPEG");
                errmsg = e.Message;
            }

            _stopEvent.Set();
            _eventing.Join();

            if (SampleChannel != null)
            {
                SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                SampleChannel = null;
            }

            if (_waveProvider?.BufferedBytes > 0)
                _waveProvider?.ClearBuffer();

            ShutDown(errmsg);
        }
Esempio n. 34
0
 // On finishing video playing
 private void videoSourcePlayer_PlayingFinished( object sender, ReasonToFinishPlaying reason )
 {
 }
Esempio n. 35
0
        /// <summary>
        /// Si se para el video por alguna u otra razón el archivo debe mantener lo poco que grabó hasta ese momento.
        /// La excepción es porque si no he presionado el botón de grabar no he creado el recurso de grabación.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void finVideo(object sender, ReasonToFinishPlaying e)
        {
            try
            {
                this.grabacion.Close();
            }
            catch (NullReferenceException error) { }

            /* Si el dispositivo se desconecta entonces que muestre el error*/
            if (e == ReasonToFinishPlaying.DeviceLost)
            {
                MessageBox.Show("Se ha perdido la comunicación con el dispositivo", "Dispositivo desconectado", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Esempio n. 36
0
 private void _talkSource_AudioFinished(object sender, ReasonToFinishPlaying reason)
 {
     //LogMessageToFile("Talk Finished: " + reason);
 }
Esempio n. 37
0
        private void VideoDeviceVideoFinished(object sender, ReasonToFinishPlaying reason)
        {
            if (IsReconnect)
                return;

            switch (reason)
            {
                case ReasonToFinishPlaying.DeviceLost:
                    SetErrorState("Device Lost");
                    break;
                case ReasonToFinishPlaying.EndOfStreamReached:
                    SetErrorState("End of Stream");
                    break;
                case ReasonToFinishPlaying.VideoSourceError:
                    SetErrorState("Source Error");
                    break;
                case ReasonToFinishPlaying.StoppedByUser:
                    Disable(false);
                    break;
            }

            //LastFrame = null;
        }
Esempio n. 38
0
        // Video source has finished playing video
        private void videoSource_PlayingFinished( object sender, ReasonToFinishPlaying reason )
        {
            switch ( reason )
            {
                case ReasonToFinishPlaying.EndOfStreamReached:
                    lastMessage = "Video has finished";
                    break;

                case ReasonToFinishPlaying.StoppedByUser:
                    lastMessage = "Video was stopped";
                    break;

                case ReasonToFinishPlaying.DeviceLost:
                    lastMessage = "Video device was unplugged";
                    break;

                case ReasonToFinishPlaying.VideoSourceError:
                    lastMessage = "Video has finished because of error in video source";
                    break;

                default:
                    lastMessage = "Video has finished for unknown reason";
                    break;
            }
            Invalidate( );

            // notify users
            if ( PlayingFinished != null )
            {
                PlayingFinished( this, reason );
            }
        }
Esempio n. 39
0
 void videoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
   log.Info("videoSource_PlayingFinished: Event occurred");
 }
Esempio n. 40
0
        // Video source has finished playing video
        private void videoSource_PlayingFinished( object sender, ReasonToFinishPlaying reason )
        {
            switch ( reason )
            {
                case ReasonToFinishPlaying.EndOfStreamReached:
                    lastMessage = "Video has finished";
                    break;

                case ReasonToFinishPlaying.StoppedByUser:
                    lastMessage = "Video was stopped";
                    break;

                default:
                    lastMessage = "Video has finished for unknown reason";
                    break;
            }
            Invalidate( );
        }
Esempio n. 41
0
 private void VideoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     _videoSourceTime.Stop();
     _videoSourceTime.Dispose();
     _videoSourceTime = new Timer();
 }
Esempio n. 42
0
        private void ShutDown(string errmsg)
        {
            bool err = !string.IsNullOrEmpty(errmsg);
            if (err)
            {

                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
                _reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;

            if (_afr != null && _afr.IsOpen)
            {
                try
                {
                    _afr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    Logger.LogExceptionToFile(ex, "FFMPEG");
                }
            }

            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_reasonToStop));
        }
Esempio n. 43
0
 /// <summary>
 /// Initializes a new instance of the <see cref="PlayingFinishedEventArgs"/> class.
 /// </summary>
 /// 
 /// <param name="reason">Reason</param>
 /// 
 public PlayingFinishedEventArgs(ReasonToFinishPlaying reason)
 {
     ReasonToFinishPlaying = reason;
 }
Esempio n. 44
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _afr = null;
            bool open = false;
            string errmsg = "";

            try
            {
                Program.FfmpegMutex.WaitOne();
                _afr = new AudioFileReader();
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i>-1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _afr.Timeout = Timeout;
                _afr.AnalyzeDuration = AnalyseDuration;
                _afr.Headers = Headers;
                _afr.Cookies = Cookies;
                _afr.UserAgent = UserAgent;
                _afr.Open(_source);
                open = true;
            }
            catch (Exception ex)
            {
                Logger.LogExceptionToFile(ex,"FFMPEG");
            }
            finally
            {
                try
                {
                    Program.FfmpegMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_afr == null || !_afr.IsOpen || !open)
            {
                ShutDown("Could not open audio stream" + ": " + _source);
                return;
            }

            RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels);
            _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) };

            _sampleChannel = new SampleChannel(_waveProvider);
            _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

            LastFrame = DateTime.UtcNow;

            try
            {
                while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown)
                {
                    byte[] data = _afr.ReadAudioFrame();
                    if (data!=null && data.Length > 0)
                    {
                        LastFrame = DateTime.UtcNow;
                        var da = DataAvailable;
                        if (da != null)
                        {
                            //forces processing of volume level without piping it out
                            _waveProvider.AddSamples(data, 0, data.Length);

                            var sampleBuffer = new float[data.Length];
                            int read = _sampleChannel.Read(sampleBuffer, 0, data.Length);

                            da(this, new DataAvailableEventArgs((byte[])data.Clone(),read));

                            if (Listening)
                            {
                                WaveOutProvider?.AddSamples(data, 0, read);
                            }

                        }

                        if (_stopEvent.WaitOne(30, false))
                            break;

                    }
                    else
                    {
                        if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout)
                        {
                            throw new Exception("Audio source timeout");
                        }
                        if (_stopEvent.WaitOne(30, false))
                            break;
                    }

                }

            }
            catch (Exception e)
            {
                Logger.LogExceptionToFile(e,"FFMPEG");
                errmsg = e.Message;
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider?.BufferedBytes > 0)
                _waveProvider.ClearBuffer();

            if (WaveOutProvider?.BufferedBytes > 0) WaveOutProvider?.ClearBuffer();

            ShutDown(errmsg);
        }
Esempio n. 45
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);
            if (err)
            {

                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
                _reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;

            if (_afr != null && _afr.IsOpen)
            {
                try
                {
                    _afr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex, "FFMPEG");
                }
            }

            if (AudioFinished != null)
                AudioFinished(this, _reasonToStop);
        }
Esempio n. 46
0
        private void ShutDown(string errmsg)
        {
            bool err = !String.IsNullOrEmpty(errmsg);
            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

            if (IsFileSource && !err)
                _reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            if (_vfr != null && _vfr.IsOpen)
            {
                try
                {
                    _vfr.Dispose(); //calls close
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex, "FFMPEG");
                }
            }

            if (PlayingFinished != null)
                PlayingFinished(this, _reasonToStop);
            if (AudioFinished != null)
                AudioFinished(this, _reasonToStop);

            _stopEvent.Close();
            _stopEvent.Dispose();
            _stopEvent = null;
            _stopping = false;
        }
Esempio n. 47
0
 private void videoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     if (PlayingFinished != null)
     {
         if (SynchronizingObject != null &&
             SynchronizingObject.InvokeRequired)
         {
             SynchronizingObject.BeginInvoke(
                 PlayingFinished, new object[] { this, reason });
         }
         else
         {
             PlayingFinished(this, reason);
         }
     }
 }
Esempio n. 48
0
        public void AudioDeviceAudioFinished(object sender, ReasonToFinishPlaying reason)
        {
            if (IsReconnect)
                return;

            if (IsClone)
            {
                SetErrorState("Mic source offline");
                Levels = null;

                if (!ShuttingDown)
                    _requestRefresh = true;

                return;
            }

            switch (reason)
            {
                case ReasonToFinishPlaying.DeviceLost:
                    SetErrorState("Device Lost");
                    break;
                case ReasonToFinishPlaying.EndOfStreamReached:
                    SetErrorState("End of Stream");
                    break;
                case ReasonToFinishPlaying.VideoSourceError:
                    SetErrorState("Source Error");
                    break;
                case ReasonToFinishPlaying.StoppedByUser:
                    Disable(false);
                    break;
            }

            Levels = null;
        }
Esempio n. 49
0
 private void SourcePlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     Camobject.settings.active = false;
     Invalidate();
 }
Esempio n. 50
0
        private void SourcePlayingFinished(object sender, ReasonToFinishPlaying reason)
        {
            if (IsReconnect)
                return;
            switch (reason)
            {
                case ReasonToFinishPlaying.DeviceLost:
                case ReasonToFinishPlaying.EndOfStreamReached:
                case ReasonToFinishPlaying.VideoSourceError:
                    if (!VideoSourceErrorState)
                    {
                        VideoSourceErrorState = true;
                        Log.Error("Camera " + Camobject.id, new Exception("VideoSourceFinished: " + reason));
                        _reconnectTime = DateTime.Now;
                        if (_errorTime == DateTime.MinValue)
                            _errorTime = DateTime.Now;
                        _camera.LastFrameNull = true;

                        if (VolumeControl != null && VolumeControl.AudioSource != null)
                        {
                            VolumeControl.AudioSource.Stop();
                            VolumeControl.AudioSourceErrorState = true;
                        }
                    }
                    break;
                case ReasonToFinishPlaying.StoppedByUser:
                    Camobject.settings.active = false;
                    break;
            }
            if (!ShuttingDown)
                Invalidate();
        }
Esempio n. 51
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _afr = null;
            bool open = false;
            string errmsg = "";

            try
            {
                Program.FFMPEGMutex.WaitOne();
                _afr = new AudioFileReader();
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i > -1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _afr.Timeout = Timeout;
                _afr.AnalyzeDuration = AnalyseDuration;
                _afr.Open(_source);

                open = true;
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex, "FFMPEG");
            }
            finally
            {
                try
                {
                    Program.FFMPEGMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_afr == null || !_afr.IsOpen || !open)
            {
                ShutDown("Could not open audio stream" + ": " + _source);
                return;
            }

            RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels);
            _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) };

            _sampleChannel = new SampleChannel(_waveProvider);
            _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

            int mult = _afr.BitsPerSample / 8;
            double btrg = Convert.ToDouble(_afr.SampleRate * mult * _afr.Channels);
            LastFrame = DateTime.UtcNow;
            bool realTime = !IsFileSource;

            try
            {
                DateTime req = DateTime.UtcNow;
                while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown)
                {
                    byte[] data = _afr.ReadAudioFrame();
                    if (data == null || data.Equals(0))
                    {
                        if (!realTime)
                        {
                            break;
                        }
                    }
                    if (data != null && data.Length > 0)
                    {
                        LastFrame = DateTime.UtcNow;
                        var da = DataAvailable;
                        if (da != null)
                        {
                            //forces processing of volume level without piping it out
                            _waveProvider.AddSamples(data, 0, data.Length);

                            var sampleBuffer = new float[data.Length];
                            _sampleChannel.Read(sampleBuffer, 0, data.Length);

                            da(this, new DataAvailableEventArgs((byte[])data.Clone()));

                            if (WaveOutProvider != null && Listening)
                            {
                                WaveOutProvider.AddSamples(data, 0, data.Length);
                            }

                        }

                        if (realTime)
                        {
                            if (_stopEvent.WaitOne(30, false))
                                break;
                        }
                        else
                        {
                            //
                            double f = (data.Length / btrg) * 1000;
                            if (f > 0)
                            {
                                var span = DateTime.UtcNow.Subtract(req);
                                var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds);
                                if ((msec > 0) && (_stopEvent.WaitOne(msec, false)))
                                    break;
                                req = DateTime.UtcNow;
                            }
                        }
                    }
                    else
                    {
                        if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout)
                        {
                            throw new Exception("Audio source timeout");
                        }
                        if (_stopEvent.WaitOne(30, false))
                            break;
                    }

                }

            }
            catch (Exception e)
            {
                MainForm.LogExceptionToFile(e, "FFMPEG");
                errmsg = e.Message;
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                    _waveProvider.ClearBuffer();
            }

            ShutDown(errmsg);
        }
Esempio n. 52
0
        private void ShutDown(string errmsg)
        {
            bool err=!string.IsNullOrEmpty(errmsg);
            if (err)
            {
                _reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }

                try
                {
                    if (_vfr != null && _vfr.IsOpen)
                    {
                        _vfr?.Dispose(); //calls close
                    }
                }
                catch (Exception ex)
                {
                    Logger.LogExceptionToFile(ex, "FFMPEG");
                }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_reasonToStop));
            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_reasonToStop));

            _stopEvent.Close();
            _stopEvent = null;
            _stopping = false;
        }
Esempio n. 53
0
        private void HandleFinishedPlaying(object sender, ReasonToFinishPlaying e)
        {
            string dateTime = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss");

            Bitmap bitmap = new Bitmap(CameraResolution.Width, CameraResolution.Height);
            Graphics graphics = Graphics.FromImage(bitmap);

            graphics.FillRectangle(Brushes.Blue, frameRectF);

            Overlays.Draw(ref bitmap, dateTime + "\nNO SIGNAL");

            if (FrameUpdate != null)
            {
                CurrentFrame = (Bitmap)bitmap.Clone();

                frameUpdateEventArgs.Frame = bitmap;
                frameUpdateEventArgs.IsError = true;

                FrameUpdate(this, frameUpdateEventArgs);
            }
        }
Esempio n. 54
0
 void VideoSourcePlayingFinished(object sender, ReasonToFinishPlaying reason)
 {
     if (PlayingFinished != null)
         PlayingFinished(sender, reason);
 }
 public void InvokePlayingFinished(ReasonToFinishPlaying reason)
 {
     PlayingFinishedEventHandler handler = PlayingFinished;
     if (handler != null) handler(this, reason);
 }