private void AudioThread() { _abort = new ManualResetEvent(false); while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length); if (DataAvailable != null) { _waveProvider.AddSamples(_audioBuffer, 0, dataLength); if (Listening) { WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength); } //forces processing of volume level without piping it out var sampleBuffer = new float[dataLength]; int read = _sampleChannel.Read(sampleBuffer, 0, dataLength); DataAvailable?.Invoke(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone(), read)); } } try { if (_sensor != null) { _sensor.AudioSource?.Stop(); _sensor.Stop(); _sensor.SkeletonFrameReady -= SensorSkeletonFrameReady; _sensor.ColorFrameReady -= SensorColorFrameReady; _sensor.DepthFrameReady -= SensorDepthFrameReady; _sensor.Dispose(); _sensor = null; } } catch { // ignored } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } Listening = false; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
// Video source has finished playing video private void VideoSource_PlayingFinished(object sender, ReasonToFinishPlaying reason) { switch (reason) { case ReasonToFinishPlaying.EndOfStreamReached: lastMessage = "Video has finished"; break; case ReasonToFinishPlaying.StoppedByUser: lastMessage = "Video was stopped"; break; case ReasonToFinishPlaying.DeviceLost: lastMessage = "Video device was unplugged"; break; case ReasonToFinishPlaying.VideoSourceError: lastMessage = "Video has finished because of error in video source"; break; default: lastMessage = "Video has finished for unknown reason"; break; } Invalidate(); // notify users PlayingFinished?.Invoke(this, reason); }
private void VideoSourcePlayingFinished(object sender, ReasonToFinishPlaying reason) { switch (reason) { case ReasonToFinishPlaying.EndOfStreamReached: lastMessage = "Показ видео окончен!"; break; case ReasonToFinishPlaying.StoppedByUser: lastMessage = "Показ видео остановлен!"; break; case ReasonToFinishPlaying.DeviceLost: lastMessage = "Устройство отключено!"; break; case ReasonToFinishPlaying.VideoSourceError: lastMessage = "Ошибка источника видео!"; break; default: lastMessage = "Неизвестная ошибка!"; break; } Invalidate(); PlayingFinished?.Invoke(this, reason); }
private void ShutDown(string errmsg) { bool err = !string.IsNullOrEmpty(errmsg); if (err) { _reasonToStop = ReasonToFinishPlaying.DeviceLost; } try { if (_vfr != null && _vfr.IsOpen) { _vfr?.Dispose(); //calls close } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_reasonToStop)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_reasonToStop)); _stopEvent.Close(); _stopEvent = null; _stopping = false; }
private void WorkerThread() { int width = region.Width; int height = region.Height; int x = region.Location.X; int y = region.Location.Y; Size size = region.Size; Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format32bppArgb); Graphics graphics = Graphics.FromImage(bitmap); DateTime start; TimeSpan span; while (!stopEvent.WaitOne(0, false)) { start = DateTime.Now; try { graphics.CopyFromScreen(x, y, 0, 0, size, CopyPixelOperation.SourceCopy); framesReceived++; NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); if (frameInterval > 0) { span = DateTime.Now.Subtract(start); int msec = frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (stopEvent.WaitOne(msec, false))) { break; } } } catch (ThreadAbortException) { break; } catch (Exception exception) { VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); Thread.Sleep(250); } if (stopEvent.WaitOne(0, false)) { break; } } graphics.Dispose(); bitmap.Dispose(); PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); }
// Worker thread private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; try { _camera.StartAcquisition( ); // while there is no request for stop while (!_stopEvent.WaitOne(0, false)) { // start time DateTime start = DateTime.Now; // get next frame Bitmap bitmap = _camera.GetImage(15000, false); _framesReceived++; _bytesReceived += bitmap.Width * bitmap.Height * (Image.GetPixelFormatSize(bitmap.PixelFormat) >> 3); NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); // free image bitmap.Dispose( ); // wait for a while ? if (_frameInterval > 0) { // get frame duration TimeSpan span = DateTime.Now.Subtract(start); // miliseconds to sleep int msec = _frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "XIMEA"); reasonToStop = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(reasonToStop)); }
private async Task ProcessVideoAsync(CancellationToken token) { await Task.Yield(); try { using (Socket clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { await Task.Factory.FromAsync(clientSocket.BeginConnect, clientSocket.EndConnect, $"roborio-{teamNumber}-FRC.local", CameraServerPort, null); using (var socketStream = new NetworkStream(clientSocket)) { await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(framesPerSecond)), 0, 4, token); bytesReceived += 4; await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(HwCompressionId)), 0, 4, token); bytesReceived += 4; await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(Size640x480)), 0, 4, token); bytesReceived += 4; while (true) { token.ThrowIfCancellationRequested(); var magicToken = new byte[4]; await socketStream.ReadAsync(magicToken, 0, 4, token); bytesReceived += 4; if (BitConverter.ToInt32(magicToken, 0) != 0x1000) { //Magic token did not match return; } var imageLengthBytes = new byte[4]; await socketStream.ReadAsync(imageLengthBytes, 0, 4, token); bytesReceived += 4; using (var frame = new System.Drawing.Bitmap(socketStream)) { NewFrame?.Invoke(this, new NewFrameEventArgs(frame)); } bytesReceived += IPAddress.NetworkToHostOrder(BitConverter.ToInt32(imageLengthBytes, 0)); framesReceived++; } } } } catch (OperationCanceledException) { PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); } catch (Exception ex) { PlayingFinished?.Invoke(this, ReasonToFinishPlaying.VideoSourceError); VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(ex.Message)); } }
private void Cleanup() { Debug.WriteLine("CLEANUP"); _connecting = false; IsRunning = false; _quit = false; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
// Worker thread private void WorkerThread( ) { try { _camera.StartAcquisition( ); // while there is no request for stop while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { // start time DateTime start = DateTime.Now; // get next frame if (EmitFrame) { using (var bitmap = _camera.GetImage(15000, false)) { NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); } } // wait for a while ? if (FrameInterval > 0) { // get download duration var span = DateTime.UtcNow.Subtract(start); // milliseconds to sleep var msec = FrameInterval - (int)span.TotalMilliseconds; if (msec > 0) { _abort.WaitOne(msec); } } } } catch (Exception ex) { Logger.LogException(ex, "XIMEA"); _res = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
/// <summary> /// Stop video source. /// </summary> /// <remarks> /// <para>Stops video source aborting its thread.</para> /// <para> /// <note> /// Since the method aborts background thread, its usage is highly not preferred /// and should be done only if there are no other options. The correct way of stopping camera /// is <see cref="SignalToStop">signaling it stop</see> and then /// <see cref="WaitForStop">waiting</see> for background thread's completion. /// </note> /// </para> /// </remarks> public void Stop() { if (IsRunning) { _res = ReasonToFinishPlaying.StoppedByUser; _abort.Set(); } else { _res = ReasonToFinishPlaying.StoppedByUser; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); } }
// Worker thread private void WorkerThread( ) { try { _camera.StartAcquisition( ); // while there is no request for stop while (!_abort.WaitOne(10) && !MainForm.ShuttingDown) { // start time DateTime start = DateTime.Now; // get next frame if (ShouldEmitFrame) { using (var bitmap = _camera.GetImage(15000, false)) { NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); } } } } catch (Exception ex) { Logger.LogException(ex, "XIMEA"); _res = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; try { // run mediaControl.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); if (mediaEvent != null) { if (mediaEvent.GetEvent(out DsEvCode code, out IntPtr p1, out IntPtr p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } } mediaControl.StopWhenReady(); } catch (Exception exception) { // provide information to clients VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); } finally { DestroyFilters(); } PlayingFinished?.Invoke(this, reasonToStop); }
// Worker thread private void WorkerThread() { _abort = new ManualResetEvent(false); // buffer to read stream var buffer = new byte[BufferSize]; // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for JPEG downloading Stream stream = null; // random generator to add fake parameter for cache preventing var rand = new Random((int)DateTime.UtcNow.Ticks); // download start time and duration var err = 0; var connectionFactory = new ConnectionFactory(); while (!_abort.WaitOne(10) && !MainForm.ShuttingDown) { var total = 0; if (ShouldEmitFrame) { try { // set download start time var start = DateTime.UtcNow; var vss = Tokenise(_source.settings.videosourcestring); var url = vss + (vss.IndexOf('?') == -1 ? '?' : '&') + "fake=" + rand.Next(); response = connectionFactory.GetResponse(url, _cookies, _headers, _httpUserAgent, _login, _password, "GET", "", "", _useHttp10, out request); // get response stream if (response == null) { throw new Exception("Connection failed"); } stream = response.GetResponseStream(); stream.ReadTimeout = _requestTimeout; bool frameComplete = false; // loop while (!_abort.WaitOne(0)) { // check total read if (total > BufferSize - ReadSize) { total = 0; } // read next portion from stream int read; if ((read = stream.Read(buffer, total, ReadSize)) == 0) { frameComplete = true; break; } total += read; } // provide new image to clients if (frameComplete && NewFrame != null) { using (var ms = new MemoryStream(buffer, 0, total)) { using (var bitmap = (Bitmap)Image.FromStream(ms)) { NewFrame(this, new NewFrameEventArgs(bitmap)); } } } err = 0; } catch (ThreadAbortException) { break; } catch (Exception ex) { Logger.LogException(ex, "JPEG"); err++; if (err > 3) { _res = ReasonToFinishPlaying.DeviceLost; break; } _abort.WaitOne(250); } finally { request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void CleanUp() { try { Program.MutexHelper.Wait(); if (pConvertedFrameBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pConvertedFrameBuffer); pConvertedFrameBuffer = IntPtr.Zero; } if (_formatContext != null) { if (_formatContext->streams != null) { var j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { var stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } if (_hwDeviceCtx != null) { var f = _hwDeviceCtx; ffmpeg.av_buffer_unref(&f); _hwDeviceCtx = null; } _videoStream = null; _audioStream = null; _audioCodecContext = null; _videoCodecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); pConvertContext = null; } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, SourceName + ": Media Stream (close)"); } finally { try { Program.MutexHelper.Release(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
// Worker thread private void WorkerThread() { var res = ReasonToFinishPlaying.StoppedByUser; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { try { DateTime start = DateTime.UtcNow; // increment frames counter _framesReceived++; // provide new image to clients if (NewFrame != null) { Screen s = Screen.AllScreens[_screenindex]; if (_screenSize == Rectangle.Empty) { if (_area != Rectangle.Empty) { _screenSize = _area; } else { _screenSize = s.Bounds; //virtual clients can have odd dimensions if (_screenSize.Width % 2 != 0) { _screenSize.Width = _screenSize.Width - 1; } if (_screenSize.Height % 2 != 0) { _screenSize.Height = _screenSize.Height - 1; } } } using (var target = new Bitmap(_screenSize.Width, _screenSize.Height, PixelFormat.Format24bppRgb)) { using (Graphics g = Graphics.FromImage(target)) { try { g.CopyFromScreen(s.Bounds.X + _screenSize.X, s.Bounds.Y + _screenSize.Y, 0, 0, new Size(_screenSize.Width, _screenSize.Height)); } catch (Exception ex) { throw new Exception("Error grabbing screen (" + ex.Message + ") - disable screensaver."); //probably remote desktop or screensaver has kicked in } if (MousePointer) { var cursorBounds = new Rectangle( Cursor.Position.X - s.Bounds.X - _screenSize.X, Cursor.Position.Y - s.Bounds.Y - _screenSize.Y, Cursors.Default.Size.Width, Cursors.Default.Size.Height); Cursors.Default.Draw(g, cursorBounds); } } // notify client NewFrame?.Invoke(this, new NewFrameEventArgs(target)); _error = false; } } // wait for a while ? if (_frameInterval > 0) { // get download duration TimeSpan span = DateTime.UtcNow.Subtract(start); // milliseconds to sleep int msec = _frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } catch (Exception ex) { if (!_error) { MainForm.LogExceptionToFile(ex, "Desktop"); _error = true; } // provide information to clients res = ReasonToFinishPlaying.DeviceLost; // wait for a while before the next try Thread.Sleep(250); break; } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); }
public int PlayFile(ISoundFile file, int fadeInTime, PlayingFinished callback, bool loop) { int channel = 0; BASSFlag speakerFlag = GetSpeakerFlag(file); BASSFlag decodeFlag = BASSFlag.BASS_STREAM_DECODE; if (file.SoundFileType == SoundFileType.Music && file.Effects.SpeakerAssignment.Active) { switch (file.Effects.SpeakerAssignment.Assignment) { case Data.SpeakerAssignment.AllSpeakers: case Data.SpeakerAssignment.BothCenterRears: case Data.SpeakerAssignment.BothFronts: case Data.SpeakerAssignment.BothRears: case Data.SpeakerAssignment.CenterAndSubwoofer: case Data.SpeakerAssignment.Default: break; default: decodeFlag |= BASSFlag.BASS_SAMPLE_MONO; break; } } #if MONO System.Runtime.InteropServices.GCHandle gcHandle = new System.Runtime.InteropServices.GCHandle(); #endif if (file.SoundFileType == SoundFileType.WebRadio) { channel = Bass.BASS_StreamCreateURL(file.Path, 0, decodeFlag | BASSFlag.BASS_STREAM_BLOCK, null, IntPtr.Zero); } else { #if MONO byte[] buffer = null; long length = 0; try { #if ANDROID if (file.Path.IsSmbFile()) { buffer = SambaHelpers.GetFileContent(file.Path); length = buffer.Length; } else { #endif System.IO.FileStream fs = System.IO.File.OpenRead(file.Path); length = fs.Length; buffer = new byte[length]; fs.Read(buffer, 0, (int)length); fs.Close(); #if ANDROID } #endif } catch (System.IO.IOException e) { ErrorHandling.ErrorOccurred(file.Id, e.Message); return(0); } gcHandle = System.Runtime.InteropServices.GCHandle.Alloc(buffer, System.Runtime.InteropServices.GCHandleType.Pinned); channel = Bass.BASS_StreamCreateFile(gcHandle.AddrOfPinnedObject(), 0L, length, decodeFlag); #else // #if MONO channel = Bass.BASS_StreamCreateFile(file.Path, 0, 0, decodeFlag); #endif } if (channel == 0) { #if MONO if (gcHandle.IsAllocated) { gcHandle.Free(); } #endif ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } RunningFileInfo info = new RunningFileInfo(); bool isStreaming = BassStreamer.Instance.IsStreaming; bool useMultiSpeakerChannels = false; int speakers = 2; int origChannel = channel; if (!isStreaming && file.Effects != null && file.Effects.SpeakerAssignment.Active && file.Effects.SpeakerAssignment.Assignment == Data.SpeakerAssignment.AllSpeakers && !file.Effects.Balance.Active && !file.Effects.Pitch.Active && !file.Effects.Tempo.Active) { speakers = Bass.BASS_GetInfo().speakers; if (speakers > 2) { useMultiSpeakerChannels = true; } } Un4seen.Bass.BASSFlag flags = BASSFlag.BASS_DEFAULT; if (isStreaming) { flags = BASSFlag.BASS_FX_FREESOURCE | BASSFlag.BASS_STREAM_DECODE; } else if (useMultiSpeakerChannels) { flags = BASSFlag.BASS_FX_FREESOURCE | BASSFlag.BASS_STREAM_DECODE; } else { flags = BASSFlag.BASS_STREAM_AUTOFREE | BASSFlag.BASS_FX_FREESOURCE | speakerFlag; } channel = Un4seen.Bass.AddOn.Fx.BassFx.BASS_FX_TempoCreate(channel, flags); if (channel == 0) { #if MONO if (gcHandle.IsAllocated) { gcHandle.Free(); } #endif ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } bool result = true; if (useMultiSpeakerChannels) { int splitStream = Un4seen.Bass.AddOn.Mix.BassMix.BASS_Split_StreamCreate(channel, BASSFlag.BASS_STREAM_AUTOFREE | BASSFlag.BASS_SPEAKER_FRONT, null); if (splitStream == 0) { result = false; } else { int splitStream2 = Un4seen.Bass.AddOn.Mix.BassMix.BASS_Split_StreamCreate(channel, BASSFlag.BASS_STREAM_AUTOFREE | BASSFlag.BASS_SPEAKER_REAR, null); if (splitStream2 == 0) { result = false; } else { Bass.BASS_ChannelSetLink(splitStream, splitStream2); info.LinkedChannels = new List <int>(); info.LinkedChannels.Add(splitStream2); } if (result && speakers > 4) { int splitStream3 = Un4seen.Bass.AddOn.Mix.BassMix.BASS_Split_StreamCreate(channel, BASSFlag.BASS_STREAM_AUTOFREE | BASSFlag.BASS_SPEAKER_CENLFE, null); if (splitStream3 == 0) { result = false; } else { Bass.BASS_ChannelSetLink(splitStream, splitStream3); info.LinkedChannels.Add(splitStream3); } } if (result && speakers > 6) { int splitStream4 = Un4seen.Bass.AddOn.Mix.BassMix.BASS_Split_StreamCreate(channel, BASSFlag.BASS_STREAM_AUTOFREE | BASSFlag.BASS_SPEAKER_REAR2, null); if (splitStream4 == 0) { result = false; } else { Bass.BASS_ChannelSetLink(splitStream, splitStream4); info.LinkedChannels.Add(splitStream4); } } if (result) { channel = splitStream; } } } if (result) { lock (m_Mutex) { info.EndAction = new Action(() => { callback(file.Id, channel); }); info.Volume = file.Volume; } if (!loop) { int sync = 0; // If CueOut is active ... if (file.Effects.CueOut.Active) { // Convert the CueOut position (seconds) into a byte offset long cueOutPos = Bass.BASS_ChannelSeconds2Bytes(channel, file.Effects.CueOut.Position); // Set the "end" sync to that position sync = Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, cueOutPos, m_CueOutSync, new IntPtr(file.Id)); } else { long totalLength = Bass.BASS_ChannelGetLength(channel); long endingTime = Bass.BASS_ChannelSeconds2Bytes(channel, 0.1); // Default: set the "end" sync to the end of the stream, minus one ms sync = Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, totalLength - endingTime, m_StartNextSync, IntPtr.Zero); } if (sync == 0) { #if MONO if (gcHandle.IsAllocated) { gcHandle.Free(); } #endif ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } else { lock (m_Mutex) { m_NotLoops[channel] = sync; } } } if (!SetStartVolume(file, fadeInTime, channel, info)) { return(0); } info.CrossFade = false; if (file.Effects != null && file.Effects.FadeOutTime != 0) { long totalLength = Bass.BASS_ChannelGetLength(channel); if (totalLength == -1) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetVolumeError); return(0); } long fadeOutLength = Bass.BASS_ChannelSeconds2Bytes(channel, 0.001 * file.Effects.FadeOutTime); if (fadeOutLength == -1) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetVolumeError); return(0); } if (fadeOutLength > totalLength) { fadeOutLength = totalLength; } // If CueOut is active ... if (file.Effects.CueOut.Active) { // Convert the CueOut position (seconds) into a byte offset long cueOutPos = Bass.BASS_ChannelSeconds2Bytes(channel, file.Effects.CueOut.Position); // Set the "end" sync to that position if (Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, cueOutPos - fadeOutLength, m_FadeOutSync, new IntPtr(file.Effects.FadeOutTime)) == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } } else { if (Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, totalLength - fadeOutLength, m_FadeOutSync, new IntPtr(file.Effects.FadeOutTime)) == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } } if (loop) { // If CueOut is active ... if (file.Effects.CueOut.Active) { // Convert the CueOut position (seconds) into a byte offset long cueOutPos = Bass.BASS_ChannelSeconds2Bytes(channel, file.Effects.CueOut.Position); // Set the "end" sync to that position if (Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, cueOutPos, m_LoopSync, new IntPtr(file.Id)) == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } } else { if (Bass.BASS_ChannelSetSync(channel, BASSSync.BASS_SYNC_POS, totalLength, m_LoopSync, new IntPtr(file.Id)) == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } } } else { info.CrossFade = file.Effects.CrossFading; } } if (file.Effects != null && file.Effects.Pitch.Active) { float pitchValue = DetermineIntEffectValue(file.Effects.Pitch); if (!Bass.BASS_ChannelSetAttribute(channel, BASSAttribute.BASS_ATTRIB_TEMPO_PITCH, pitchValue)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } } if (file.Effects != null && file.Effects.Tempo.Active) { float tempoValue = DetermineIntEffectValue(file.Effects.Tempo); if (!Bass.BASS_ChannelSetAttribute(channel, BASSAttribute.BASS_ATTRIB_TEMPO, tempoValue)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } } if (file.Effects != null && file.Effects.Balance.Active && !useMultiSpeakerChannels) { SetBalanceEffect(channel, file.Id, file.Effects.Balance); } if (file.Effects != null && file.Effects.VolumeDB.Active) { float volumeDB = DetermineIntEffectValue(file.Effects.VolumeDB); float linear = (float)Math.Pow(10d, volumeDB / 20); int volFx = Bass.BASS_ChannelSetFX(channel, BASSFXType.BASS_FX_BFX_VOLUME, 1); if (volFx == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } Un4seen.Bass.AddOn.Fx.BASS_BFX_VOLUME fxVol = new Un4seen.Bass.AddOn.Fx.BASS_BFX_VOLUME(linear, Un4seen.Bass.AddOn.Fx.BASSFXChan.BASS_BFX_CHANALL); if (!Bass.BASS_FXSetParameters(volFx, fxVol)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } if (info.LinkedChannels != null) { foreach (int splitStream2 in info.LinkedChannels) { int volFx2 = splitStream2 != 0 ? Bass.BASS_ChannelSetFX(splitStream2, BASSFXType.BASS_FX_BFX_VOLUME, 1) : 0; if (splitStream2 != 0 && volFx2 == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } if (volFx2 != 0 && !Bass.BASS_FXSetParameters(volFx2, fxVol)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } } } } #pragma warning disable CS0618 // Type or member is obsolete if (file.Effects != null && file.Effects.Reverb.Active) { float linearLevel = (float)Math.Pow(10d, file.Effects.Reverb.Level / 20); int reverbFx = Bass.BASS_ChannelSetFX(channel, BASSFXType.BASS_FX_BFX_REVERB, 1); if (reverbFx == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } Un4seen.Bass.AddOn.Fx.BASS_BFX_REVERB fxReverb = new Un4seen.Bass.AddOn.Fx.BASS_BFX_REVERB(linearLevel, file.Effects.Reverb.Delay); if (!Bass.BASS_FXSetParameters(reverbFx, fxReverb)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } if (info.LinkedChannels != null) { foreach (int splitStream2 in info.LinkedChannels) { int reverbFx2 = splitStream2 != 0 ? Bass.BASS_ChannelSetFX(splitStream2, BASSFXType.BASS_FX_BFX_REVERB, 1) : 0; if (splitStream2 != 0 && reverbFx2 == 0) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } if (reverbFx2 != 0 && !Bass.BASS_FXSetParameters(reverbFx2, fxReverb)) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.SetEffectError); return(0); } } } } #pragma warning restore CS0618 // Type or member is obsolete if (loop) { Bass.BASS_ChannelFlags(channel, BASSFlag.BASS_SAMPLE_LOOP, BASSFlag.BASS_SAMPLE_LOOP); } lock (m_Mutex) { m_Loops[file.Id] = file; if (file.Effects != null && file.Effects.CueOut.Active) { m_CueOutRepeats[channel] = loop; } } if (file.Effects.CueIn.Active) { Bass.BASS_ChannelSetPosition(channel, file.Effects.CueIn.Position); } if (isStreaming) { result = BassStreamer.Instance.AddChannel(channel); } else { result = Bass.BASS_ChannelPlay(channel, false); } if (!result) { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); Bass.BASS_StreamFree(channel); #if MONO if (gcHandle.IsAllocated) { gcHandle.Free(); } #endif return(0); } lock (m_Mutex) { m_RunningFiles[channel] = info; #if MONO if (gcHandle.IsAllocated) { m_GCHandles[channel] = gcHandle; } #endif } return(channel); } else { ErrorHandling.BassErrorOccurred(file.Id, StringResources.FilePlayingError); return(0); } }
private void Init() { _mediaPlayer?.Dispose(); _videoFormat = VideoFormat; _lockCB = LockVideo; _unlockCB = UnlockVideo; _displayCB = DisplayVideo; //_cleanupVideoCB = CleanupVideo; _audioSetup = AudioSetup; _processAudio = ProcessAudio; _cleanupAudio = CleanupAudio; _pauseAudio = PauseAudio; _resumeAudio = ResumeAudio; _flushAudio = FlushAudio; _drainAudio = DrainAudio; string overrideURL = null; if (_camera != null) { switch (_camera.Camobject.settings.sourceindex) { case 9: var od = _camera.ONVIFDevice; if (od != null) { var ep = od.StreamEndpoint; if (ep != null) { var u = ep.Uri.Uri; overrideURL = u; } } break; } } FromType ftype = FromType.FromLocation; Seekable = false; var murl = overrideURL ?? Source; if (string.IsNullOrEmpty(murl)) { throw new Exception("Video source is empty"); } try { var p = Path.GetFullPath(overrideURL ?? Source); Seekable = !string.IsNullOrEmpty(p); if (Seekable) { ftype = FromType.FromPath; } } catch (Exception) { Seekable = false; } using (var media = new Media(LibVLC, murl, ftype)) { Duration = Time = 0; foreach (var opt in _options) { media.AddOption(opt); } _mediaPlayer = new MediaPlayer(media); _mediaPlayer.SetVideoFormatCallbacks(_videoFormat, null);// _cleanupVideoCB); _mediaPlayer.SetVideoCallbacks(_lockCB, _unlockCB, _displayCB); _mediaPlayer.TimeChanged += _mediaPlayer_TimeChanged; _mediaPlayer.EnableHardwareDecoding = false; _mediaPlayer.SetAudioFormatCallback(_audioSetup, _cleanupAudio); _mediaPlayer.SetAudioCallbacks(_processAudio, _pauseAudio, _resumeAudio, _flushAudio, _drainAudio); _mediaPlayer.EncounteredError += (sender, e) => { ErrorHandler?.Invoke("VLC Error"); _res = ReasonToFinishPlaying.VideoSourceError; }; _mediaPlayer.EndReached += (sender, e) => { _res = ReasonToFinishPlaying.VideoSourceError; }; _mediaPlayer.Stopped += (sender, e) => { Logger.LogMessage("VLC stopped"); Task.Run(() => { Debug.WriteLine("CLEANUP"); IsRunning = false; _stopping = false; _stopped?.Set(); PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }); }; } _lastTimeUpdate = DateTime.UtcNow; _mediaPlayer.Play(); }
private void DoStart() { var vss = Source; if (!_modeAudio) { vss = Tokenise(vss); } AVDictionary *options = null; if (_inputFormat == null) { ffmpeg.av_dict_set(&options, "analyzeduration", _analyzeDuration.ToString(), 0); string prefix = vss.ToLower().Substring(0, vss.IndexOf(":", StringComparison.Ordinal)); switch (prefix) { case "http": case "mmsh": case "mms": ffmpeg.av_dict_set(&options, "timeout", _timeout.ToString(), 0); ffmpeg.av_dict_set(&options, "stimeout", (_timeout * 1000).ToString(), 0); if (_cookies != "") { ffmpeg.av_dict_set(&options, "cookies", _cookies, 0); } if (_headers != "") { ffmpeg.av_dict_set(&options, "headers", _headers, 0); } if (_userAgent != "") { ffmpeg.av_dict_set(&options, "user-agent", _userAgent, 0); } break; case "tcp": case "udp": case "rtp": case "sdp": case "mmst": case "ftp": ffmpeg.av_dict_set(&options, "timeout", _timeout.ToString(), 0); break; case "rtsp": case "rtmp": ffmpeg.av_dict_set(&options, "stimeout", (_timeout * 1000).ToString(), 0); if (_userAgent != "") { ffmpeg.av_dict_set(&options, "user-agent", _userAgent, 0); } break; } ffmpeg.av_dict_set(&options, "rtsp_transport", RTSPmode, 0); } ffmpeg.av_dict_set(&options, "rtbufsize", "10000000", 0); var lo = _options.Split(Environment.NewLine.ToCharArray()); foreach (var nv in lo) { if (!string.IsNullOrEmpty(nv)) { var i = nv.IndexOf('='); if (i > -1) { var n = nv.Substring(0, i).Trim(); var v = nv.Substring(i + 1).Trim(); if (!string.IsNullOrEmpty(n) && !string.IsNullOrEmpty(v)) { int j; if (int.TryParse(v, out j)) { ffmpeg.av_dict_set_int(&options, n, j, 0); } else { ffmpeg.av_dict_set(&options, n, v, 0); } } } } } _stopReadingFrames = false; try { Program.FfmpegMutex.WaitOne(); var pFormatContext = ffmpeg.avformat_alloc_context(); _lastPacket = DateTime.UtcNow; _interruptCallback = InterruptCb; _interruptCallbackAddress = Marshal.GetFunctionPointerForDelegate(_interruptCallback); pFormatContext->interrupt_callback.callback = _interruptCallbackAddress; pFormatContext->interrupt_callback.opaque = null; if (ffmpeg.avformat_open_input(&pFormatContext, vss, _inputFormat, &options) != 0) { throw new ApplicationException(@"Could not open source"); } _formatContext = pFormatContext; SetupFormat(); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); _res = ReasonToFinishPlaying.VideoSourceError; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch { } } _starting = false; }
// Worker thread private void WorkerThread() { // buffer to read stream var buffer = new byte[BufSize]; var encoding = new ASCIIEncoding(); var res = ReasonToFinishPlaying.StoppedByUser; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { // reset reload event _reloadEvent.Reset(); // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for MJPEG downloading Stream stream = null; // boundary betweeen images (string and binary versions) try { // create request request = (HttpWebRequest)WebRequest.Create(_source); // set user agent if (_userAgent != null) { request.UserAgent = _userAgent; } // set proxy if (_proxy != null) { request.Proxy = _proxy; } if (_usehttp10) { request.ProtocolVersion = HttpVersion.Version10; } // set timeout value for the request request.Timeout = request.ServicePoint.ConnectionLeaseTimeout = request.ServicePoint.MaxIdleTime = _requestTimeout; request.AllowAutoRedirect = true; // set login and password if ((_login != null) && (_password != null) && (_login != string.Empty)) { request.Credentials = new NetworkCredential(_login, _password); } // set connection group name if (_useSeparateConnectionGroup) { request.ConnectionGroupName = GetHashCode().ToString(); } // get response response = request.GetResponse(); // get response stream stream = response.GetResponseStream(); stream.ReadTimeout = _requestTimeout; byte[] boundary = encoding.GetBytes("--myboundary"); byte[] sep = encoding.GetBytes("\r\n\r\n"); // loop int startPacket = -1; int endPacket = -1; int ttl = 0; bool hasaudio = false; while ((!_stopEvent.WaitOne(0, false)) && (!_reloadEvent.WaitOne(0, false))) { int read; if ((read = stream.Read(buffer, ttl, ReadSize)) == 0) { throw new ApplicationException(); } ttl += read; if (startPacket == -1) { startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl); } else { if (endPacket == -1) { endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl - (startPacket + boundary.Length)); } } var nf = NewFrame; if (startPacket > -1 && endPacket > startPacket) { int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100); if (br != -1) { var arr = new byte[br]; System.Array.Copy(buffer, startPacket, arr, 0, br - startPacket); string s = Encoding.ASCII.GetString(arr); int k = s.IndexOf("Content-type: ", StringComparison.Ordinal); if (k != -1) { s = s.Substring(k + 14); s = s.Substring(0, s.IndexOf("\r\n", StringComparison.Ordinal)); s = s.Trim(); } switch (s) { case "image/jpeg": try { using (var ms = new MemoryStream(buffer, br + 4, endPacket - br - 8)) { using (var bmp = (Bitmap)Image.FromStream(ms)) { var dae = new NewFrameEventArgs(bmp); nf.Invoke(this, dae); } } } catch (Exception ex) { //sometimes corrupted packets come through... Logger.LogExceptionToFile(ex, "KinectNetwork"); } break; case "audio/raw": if (!hasaudio) { hasaudio = true; //fixed 16khz 1 channel format RecordingFormat = new WaveFormat(16000, 16, 1); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } } var da = DataAvailable; if (da != null) { int l = endPacket - br - 8; var data = new byte[l]; int d; using (var ms = new MemoryStream(buffer, br + 4, l)) { d = ms.Read(data, 0, l); } if (d > 0) { _waveProvider.AddSamples(data, 0, data.Length); if (Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; int r = _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone(), r)); } } break; case "alert/text": // code to handle alert notifications goes here if (AlertHandler != null) { int dl = endPacket - br - 8; var data2 = new byte[dl]; using (var ms = new MemoryStream(buffer, br + 4, dl)) { ms.Read(data2, 0, dl); } string alerttype = Encoding.ASCII.GetString(data2); AlertHandler(this, new AlertEventArgs(alerttype)); } break; } } ttl -= endPacket; System.Array.Copy(buffer, endPacket, buffer, 0, ttl); startPacket = -1; endPacket = -1; } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients Logger.LogExceptionToFile(ex, "KinectNetwork"); res = ReasonToFinishPlaying.DeviceLost; break; // wait for a while before the next try //Thread.Sleep(250); } finally { request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); }
// Worker thread private void WorkerThread() { int width = region.Width; int height = region.Height; int x = region.Location.X; int y = region.Location.Y; Size size = region.Size; // Create 10 frames (which we will keep overwriting and reusing) Context[] buffer = new Context[10]; for (int i = 0; i < buffer.Length; i++) { // Note: It's important to use 32-bpp ARGB to avoid problems with FFmpeg later //var bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb); var bmp = new Bitmap(width, height, PixelFormat.Format32bppArgb); buffer[i] = new Context { original = bmp, hwndGraphics = Graphics.FromHwnd(windowHandle), imgGraphics = Graphics.FromImage(bmp), args = new NewFrameEventArgs(bmp) }; } // download start time and duration DateTime start; TimeSpan span; int counter = 0; int bufferPos = 0; Context captureContext = buffer[bufferPos]; Context displayContext = null; while (!stopEvent.WaitOne(0, false)) { // set download start time start = DateTime.Now; try { // Start capturing a new frame at the same // time we send the previous one to listeners #if !NET35 Task.WaitAll( #if NET40 Task.Factory.StartNew(() => #else Task.Run(() => #endif { #endif // wait for a while ? if (frameInterval > 0) { // get download duration span = DateTime.Now.Subtract(start); // miliseconds to sleep int msec = frameInterval - (int)span.TotalMilliseconds; // if we should sleep, then sleep as long as needed if ((msec > 0) && (stopEvent.WaitOne(msec, false))) return; } // capture the screen var wndHdc = captureContext.hwndGraphics.GetHdc(); var imgHdc = captureContext.imgGraphics.GetHdc(); captureContext.args.CaptureStarted = DateTime.Now; BitBlt(imgHdc, 0, 0, width, height, wndHdc, x, y, (int)CopyPixelOperation.SourceCopy); captureContext.args.FrameSize = size; captureContext.args.CaptureFinished = DateTime.Now; captureContext.hwndGraphics.ReleaseHdc(wndHdc); captureContext.imgGraphics.ReleaseHdc(imgHdc); // increment frames counter captureContext.args.FrameIndex = counter++; framesReceived++; #if !NET35 } ), #if NET40 Task.Factory.StartNew(() => #else Task.Run(() => #endif { #endif // provide new image to clients if (displayContext != null) { // reset whatever listeners had done with the frame displayContext.args.Frame = displayContext.original; NewFrame?.Invoke(this, displayContext.args); } #if !NET35 })); #endif // Update buffer position displayContext = buffer[bufferPos]; bufferPos = (bufferPos + 1) % buffer.Length; captureContext = buffer[bufferPos]; Debug.Assert(displayContext != captureContext); } catch (ThreadAbortException) { break; } catch (Exception exception) { #if !NET35 AggregateException ae = exception as AggregateException; if (ae != null && ae.InnerExceptions.Count == 1) exception = ae.InnerExceptions[0]; #endif // provide information to clients if (VideoSourceError == null) throw; VideoSourceError(this, new VideoSourceErrorEventArgs(exception)); // wait for a while before the next try Thread.Sleep(250); } // need to stop ? if (stopEvent.WaitOne(0, false)) break; } // release resources foreach (var c in buffer) { c.imgGraphics.Dispose(); c.args.Frame.Dispose(); } PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); }
void VideoSourcePlayingFinished(object sender, PlayingFinishedEventArgs e) { PlayingFinished?.Invoke(sender, e); }
private void ReadFrames() { var pConvertedFrameBuffer = IntPtr.Zero; SwsContext *pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; SampleChannel sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; int ret; if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, _audioFrame); if (ret == 0) { var dat = _audioFrame->data[0]; var numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &dat, _audioFrame->nb_samples); var l = numSamplesOut * 2 * _audioCodecContext->channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } if (_audioFrame->decode_error_flags > 0) { break; } } while (ret == 0); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, read)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, _videoFrame); if (ret == 0 && EmitFrame) { if (!videoInited) { videoInited = true; var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt, _videoCodecContext->width, _videoCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, _videoFrame->data, _videoFrame->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (_videoFrame->decode_error_flags > 0) { break; } using ( var mat = new Bitmap(_videoCodecContext->width, _videoCodecContext->height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); try { Program.MutexHelper.Wait(); if (pConvertedFrameBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pConvertedFrameBuffer); } if (_formatContext != null) { if (_formatContext->streams != null) { var j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { var stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } if (_videoFrame != null) { fixed(AVFrame **pinprt = &_videoFrame) { ffmpeg.av_frame_free(pinprt); _videoFrame = null; } } if (_audioFrame != null) { fixed(AVFrame **pinprt = &_audioFrame) { ffmpeg.av_frame_free(pinprt); _audioFrame = null; } } _videoStream = null; _audioStream = null; _audioCodecContext = null; _videoCodecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, "Media Stream (close)"); } finally { try { Program.MutexHelper.Release(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
public void Start() { Debug.WriteLine("START"); if (IsRunning) { return; } IsRunning = true; try { if (_failedLoad || string.IsNullOrEmpty(VlcHelper.VLCLocation)) { throw new ApplicationException("VLC not found. Set location in settings."); } _quit = false; _commands.Clear(); Task.Run(async() => { while (!_quit) { string cmd; if (_commands.TryDequeue(out cmd)) { switch (cmd) { case "init": try { Init(); } catch (ApplicationException ex) { Logger.LogException(ex, "VLC"); _res = ReasonToFinishPlaying.VideoSourceError; _quit = true; } break; case "stop": if (_mediaPlayer != null && _mediaPlayer.IsPlaying) { _mediaPlayer.Stop(); } else { _quit = true; } break; } } await Task.Delay(500); } Cleanup(); }); _commands.Enqueue("init"); _lastFrame = DateTime.UtcNow; _connecting = true; } catch (Exception ex) { Logger.LogException(ex, "VLCStream"); ErrorHandler?.Invoke("Invalid Source (" + Source + ")"); _connecting = false; IsRunning = false; _quit = false; _res = ReasonToFinishPlaying.VideoSourceError; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); } }
private void WorkerThread() { bool file = false; try { if (File.Exists(_source)) { file = true; } } catch { // ignored } if (_mFactory == null) { var args = new List <string> { "-I", "dumy", "--ignore-config", "--no-osd", "--disable-screensaver", "--plugin-path=./plugins" }; if (file) { args.Add("--file-caching=3000"); } try { var l2 = args.ToList(); l2.AddRange(_arguments); l2 = l2.Distinct().ToList(); _mFactory = new MediaPlayerFactory(l2.ToArray()); } catch (Exception ex) { MainForm.LogExceptionToFile(ex, "VLC Stream"); MainForm.LogMessageToFile("VLC arguments are: " + string.Join(",", args.ToArray()), "VLC Stream"); MainForm.LogMessageToFile("Using default VLC configuration.", "VLC Stream"); _mFactory = new MediaPlayerFactory(args.ToArray()); } GC.KeepAlive(_mFactory); } _mMedia = file ? _mFactory.CreateMedia <IMediaFromFile>(_source) : _mFactory.CreateMedia <IMedia>(_source); _mMedia.Events.DurationChanged += EventsDurationChanged; _mMedia.Events.StateChanged += EventsStateChanged; if (_mPlayer != null) { try { _mPlayer?.Dispose(); } catch { // ignored } _mPlayer = null; } _mPlayer = _mFactory.CreatePlayer <IVideoPlayer>(); _mPlayer.Events.TimeChanged += EventsTimeChanged; var fc = new Func <SoundFormat, SoundFormat>(SoundFormatCallback); _mPlayer.CustomAudioRenderer.SetFormatCallback(fc); var ac = new AudioCallbacks { SoundCallback = SoundCallback }; _mPlayer.CustomAudioRenderer.SetCallbacks(ac); _mPlayer.CustomAudioRenderer.SetExceptionHandler(Handler); _mPlayer.CustomRenderer.SetCallback(FrameCallback); _mPlayer.CustomRenderer.SetExceptionHandler(Handler); GC.KeepAlive(_mPlayer); _needsSetup = true; _stopping = false; _mPlayer.CustomRenderer.SetFormat(new BitmapFormat(FormatWidth, FormatHeight, ChromaType.RV24)); _mPlayer.Open(_mMedia); _mMedia.Parse(true); _mPlayer.Delay = 0; _framesReceived = 0; Duration = Time = 0; LastFrame = DateTime.MinValue; //check if file source (isseekable in _mPlayer is not reliable) Seekable = false; try { var p = Path.GetFullPath(_mMedia.Input); Seekable = !string.IsNullOrEmpty(p); } catch (Exception) { Seekable = false; } _mPlayer.WindowHandle = IntPtr.Zero; _videoQueue = new ConcurrentQueue <Bitmap>(); _audioQueue = new ConcurrentQueue <byte[]>(); _eventing = new Thread(EventManager) { Name = "vlc eventing", IsBackground = true }; _eventing.Start(); _mPlayer.Play(); _stopEvent.WaitOne(); if (_eventing != null && !_eventing.Join(0)) { _eventing.Join(); } if (!Seekable && !_stopRequested) { PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); } else { PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } DisposePlayer(); _stopEvent?.Close(); _stopEvent = null; }
// Worker thread private void WorkerThread() { // buffer to read stream var buffer = new byte[BufSize]; // JPEG magic number var jpegMagic = new byte[] { 0xFF, 0xD8, 0xFF }; var encoding = new ASCIIEncoding(); while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { // HTTP web request HttpWebRequest request = null; // web response WebResponse response = null; // stream for MJPEG downloading Stream stream = null; // boundary between images (string and binary versions) string boudaryStr = null; // length of boundary // flag signaling if boundary was checked or not var boundaryIsChecked = false; // read amounts and positions int todo = 0, total = 0, pos = 0, align = 1; var start = 0; ConnectionFactory connectionFactory = new ConnectionFactory(); // align // 1 = searching for image start // 2 = searching for image end try { // create request // get response var vss = Tokenise(); response = connectionFactory.GetResponse(vss, _cookies, _headers, _httpUserAgent, _login, _password, "GET", "", "", _useHttp10, out request); if (response == null) { throw new Exception("Stream could not connect"); } // check content type var contentType = response.ContentType; var contentTypeArray = contentType.Split('/'); // "application/octet-stream" int boundaryLen; byte[] boundary; if ((contentTypeArray[0] == "application") && (contentTypeArray[1] == "octet-stream")) { boundaryLen = 0; boundary = new byte[0]; } else if ((contentTypeArray[0] == "multipart") && contentType.Contains("mixed")) { // get boundary var boundaryIndex = contentType.IndexOf("boundary", 0, StringComparison.Ordinal); if (boundaryIndex != -1) { boundaryIndex = contentType.IndexOf("=", boundaryIndex + 8, StringComparison.Ordinal); } if (boundaryIndex == -1) { // try same scenario as with octet-stream, i.e. without boundaries boundaryLen = 0; boundary = new byte[0]; } else { boudaryStr = contentType.Substring(boundaryIndex + 1); // remove spaces and double quotes, which may be added by some IP cameras boudaryStr = boudaryStr.Trim(' ', '"'); boundary = encoding.GetBytes(boudaryStr); boundaryLen = boundary.Length; boundaryIsChecked = false; } } else { throw new Exception("Invalid content type."); } // get response stream try { stream = response.GetResponseStream(); } catch (NullReferenceException) { throw new Exception("Connection failed"); } stream.ReadTimeout = _requestTimeout; // loop while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { // check total read if (total > BufSize - ReadSize) { total = pos = todo = 0; } // read next portion from stream int read; if ((read = stream.Read(buffer, total, ReadSize)) == 0) { throw new ApplicationException(); } total += read; todo += read; // increment received bytes counter _bytesReceived += read; // do we need to check boundary ? if ((boundaryLen != 0) && !boundaryIsChecked) { // some IP cameras, like AirLink, claim that boundary is "myboundary", // when it is really "--myboundary". this needs to be corrected. pos = ByteArrayUtils.Find(buffer, boundary, 0, todo); // continue reading if boudary was not found if (pos == -1) { continue; } for (var i = pos - 1; i >= 0; i--) { var ch = buffer[i]; if ((ch == (byte)'\n') || (ch == (byte)'\r')) { break; } boudaryStr = (char)ch + boudaryStr; } boundary = encoding.GetBytes(boudaryStr); boundaryLen = boundary.Length; boundaryIsChecked = true; } // search for image start if ((align == 1) && (todo >= jpegMagic.Length)) { start = ByteArrayUtils.Find(buffer, jpegMagic, pos, todo); if (start != -1) { // found JPEG start pos = start + jpegMagic.Length; todo = total - pos; align = 2; } else { // delimiter not found todo = jpegMagic.Length - 1; pos = total - todo; } } var decode = !string.IsNullOrEmpty(_decodeKey); // search for image end ( boundaryLen can be 0, so need extra check ) while ((align == 2) && (todo != 0) && (todo >= boundaryLen)) { var stop = ByteArrayUtils.Find(buffer, boundaryLen != 0 ? boundary : jpegMagic, pos, todo); if (stop != -1) { // increment frames counter _framesReceived++; var nf = NewFrame; // image at stop if (nf != null) { if (decode) { var marker = Encoding.ASCII.GetBytes(_decodeKey); using ( var ms = new MemoryStream(buffer, start + jpegMagic.Length, jpegMagic.Length + marker.Length)) { var key = new byte[marker.Length]; ms.Read(key, 0, marker.Length); if (!ByteArrayUtils.UnsafeCompare(marker, key)) { throw new Exception( "Image Decode Failed - Check the decode key matches the encode key on ispy server"); } } using ( var ms = new MemoryStream(buffer, start + marker.Length, stop - start - marker.Length)) { ms.Seek(0, SeekOrigin.Begin); ms.WriteByte(jpegMagic[0]); ms.WriteByte(jpegMagic[1]); ms.WriteByte(jpegMagic[2]); ms.Seek(0, SeekOrigin.Begin); using (var bmp = (Bitmap)Image.FromStream(ms)) { var da = new NewFrameEventArgs(bmp); nf.Invoke(this, da); } } } else { using (var ms = new MemoryStream(buffer, start, stop - start)) { using (var bmp = (Bitmap)Image.FromStream(ms)) { var da = new NewFrameEventArgs(bmp); nf.Invoke(this, da); } } } } // shift array pos = stop + boundaryLen; todo = total - pos; Array.Copy(buffer, pos, buffer, 0, todo); total = todo; pos = 0; align = 1; } else { // boundary not found if (boundaryLen != 0) { todo = boundaryLen - 1; pos = total - todo; } else { todo = 0; pos = total; } } } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients Logger.LogException(ex, "MJPEG"); _res = ReasonToFinishPlaying.DeviceLost; break; // wait for a while before the next try //Thread.Sleep(250); } finally { // abort request request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void DoStart() { var vss = Source; if (!IsAudio) { vss = Tokenise(vss); } AVDictionary *options = null; if (_inputFormat == null) { var prefix = vss.ToLower().Substring(0, vss.IndexOf(":", StringComparison.Ordinal)); switch (prefix) { case "https": case "http": case "mmsh": case "mms": ffmpeg.av_dict_set_int(&options, "timeout", _timeout, 0); ffmpeg.av_dict_set_int(&options, "stimeout", _timeout * 1000, 0); if (_cookies != "") { ffmpeg.av_dict_set(&options, "cookies", _cookies, 0); } if (_headers != "") { ffmpeg.av_dict_set(&options, "headers", _headers, 0); } if (_userAgent != "") { ffmpeg.av_dict_set(&options, "user-agent", _userAgent, 0); } break; default: ffmpeg.av_dict_set_int(&options, "timeout", _timeout, 0); break; case "rtsp": case "rtmp": ffmpeg.av_dict_set_int(&options, "stimeout", _timeout * 1000, 0); if (_userAgent != "") { ffmpeg.av_dict_set(&options, "user-agent", _userAgent, 0); } break; } ffmpeg.av_dict_set(&options, "rtsp_transport", _modeRTSP, 0); } ffmpeg.av_dict_set_int(&options, "rtbufsize", 10000000, 0); var lo = _options.Split(Environment.NewLine.ToCharArray()); foreach (var nv in lo) { if (!string.IsNullOrEmpty(nv)) { var i = nv.IndexOf('='); if (i > -1) { var n = nv.Substring(0, i).Trim(); var v = nv.Substring(i + 1).Trim(); if (!string.IsNullOrEmpty(n) && !string.IsNullOrEmpty(v)) { int j; if (int.TryParse(v, out j)) { ffmpeg.av_dict_set_int(&options, n, j, 0); } else { ffmpeg.av_dict_set(&options, n, v, 0); } } } } } _abort = false; try { Program.MutexHelper.Wait(); var pFormatContext = ffmpeg.avformat_alloc_context(); _lastPacket = DateTime.UtcNow; _interruptCallback = InterruptCb; _interruptCallbackAddress = Marshal.GetFunctionPointerForDelegate(_interruptCallback); _aviocb = new AVIOInterruptCB_callback_func { Pointer = _interruptCallbackAddress }; pFormatContext->interrupt_callback.callback = _aviocb; pFormatContext->interrupt_callback.opaque = null; pFormatContext->max_analyze_duration = 0; Throw("OPEN_INPUT", ffmpeg.avformat_open_input(&pFormatContext, vss, _inputFormat, &options)); _formatContext = pFormatContext; SetupFormat(); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); _res = ReasonToFinishPlaying.VideoSourceError; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); } finally { try { Program.MutexHelper.Release(); } catch { } } _starting = false; }
private void ReadFrames() { AVFrame * pConvertedFrame = null; sbyte * pConvertedFrameBuffer = null; SwsContext *pConvertContext = null; BufferedWaveProvider waveProvider = null; SampleChannel sampleChannel = null; bool audioInited = false; bool videoInited = false; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); AVFrame *frame = ffmpeg.av_frame_alloc(); ffmpeg.av_frame_unref(frame); if (ffmpeg.av_read_frame(_formatContext, &packet) < 0) { _stopReadingFrames = true; _res = ReasonToFinishPlaying.VideoSourceError; break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } AVPacket packetTemp = packet; var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; if (_audioStream != null && packetTemp.stream_index == _audioStream->index) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { int s = 0; var buffer = new sbyte[_audioCodecContext->sample_rate * 2]; var tbuffer = new sbyte[_audioCodecContext->sample_rate * 2]; bool b = false; fixed(sbyte **outPtrs = new sbyte *[32]) { fixed(sbyte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; do { int gotFrame = 0; int inUsed = ffmpeg.avcodec_decode_audio4(_audioCodecContext, frame, &gotFrame, &packetTemp); if (inUsed < 0 || gotFrame == 0) { b = true; break; } int numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &frame->data0, frame->nb_samples); var l = numSamplesOut * 2 * _audioCodecContext->channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; packetTemp.data += inUsed; packetTemp.size -= inUsed; } while (packetTemp.size > 0); } } if (b) { break; } ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } byte[] ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; int read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, read)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index) { int frameFinished = 0; //decode video frame int ret = ffmpeg.avcodec_decode_video2(_codecContext, frame, &frameFinished, &packetTemp); if (ret < 0) { ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); break; } if (frameFinished == 1) { if (!videoInited) { videoInited = true; pConvertedFrame = ffmpeg.av_frame_alloc(); var convertedFrameBufferSize = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height); pConvertedFrameBuffer = (sbyte *)ffmpeg.av_malloc((ulong)convertedFrameBufferSize); ffmpeg.avpicture_fill((AVPicture *)pConvertedFrame, pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height); pConvertContext = ffmpeg.sws_getContext(_codecContext->width, _codecContext->height, _codecContext->pix_fmt, _codecContext->width, _codecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } var src = &frame->data0; var dst = &pConvertedFrame->data0; var srcStride = frame->linesize; var dstStride = pConvertedFrame->linesize; ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, _codecContext->height, dst, dstStride); var convertedFrameAddress = pConvertedFrame->data0; if (convertedFrameAddress != null) { var imageBufferPtr = new IntPtr(convertedFrameAddress); var linesize = dstStride[0]; if (frame->decode_error_flags > 0) { ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); break; } using ( var mat = new Bitmap(_codecContext->width, _codecContext->height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr)) { var nfe = new NewFrameEventArgs((Bitmap)mat.Clone()); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } } if (_videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout) { _res = ReasonToFinishPlaying.DeviceLost; _stopReadingFrames = true; } } ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); } while (!_stopReadingFrames && !MainForm.ShuttingDown); try { Program.FfmpegMutex.WaitOne(); if (pConvertedFrame != null) { ffmpeg.av_free(pConvertedFrame); } if (pConvertedFrameBuffer != null) { ffmpeg.av_free(pConvertedFrameBuffer); } if (_formatContext != null) { if (_formatContext->streams != null) { int j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { AVStream *stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } _videoStream = null; _audioStream = null; _audioCodecContext = null; _codecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, "Media Stream (close)"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void WorkerThread() { bool file = false; if (string.IsNullOrEmpty(Source)) { Logger.LogError("Source not found", "VLC"); _res = ReasonToFinishPlaying.VideoSourceError; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); return; } try { if (File.Exists(Source)) { file = true; } } catch { // ignored } if (_mFactory == null) { var args = new List <string> { "-I", "dumy", "--ignore-config", "--no-osd", "--disable-screensaver", "--plugin-path=./plugins" }; if (file) { args.Add("--file-caching=3000"); } try { var l2 = args.ToList(); l2.AddRange(_arguments); l2 = l2.Distinct().ToList(); _mFactory = new MediaPlayerFactory(l2.ToArray()); } catch (Exception ex) { Logger.LogException(ex, "VLC Stream"); Logger.LogMessage("VLC arguments are: " + string.Join(",", args.ToArray()), "VLC Stream"); Logger.LogMessage("Using default VLC configuration.", "VLC Stream"); return; } GC.KeepAlive(_mFactory); } var vss = Source; if (!_modeAudio) { vss = Tokenise(vss); } _mMedia = file ? _mFactory.CreateMedia <IMediaFromFile>(vss) : _mFactory.CreateMedia <IMedia>(vss); _mMedia.Events.DurationChanged += EventsDurationChanged; _mMedia.Events.StateChanged += EventsStateChanged; if (_mPlayer != null) { try { _mPlayer?.Dispose(); } catch { // ignored } _mPlayer = null; } _mPlayer = _mFactory.CreatePlayer <IVideoPlayer>(); _mPlayer.Events.TimeChanged += EventsTimeChanged; var fc = new Func <SoundFormat, SoundFormat>(SoundFormatCallback); _mPlayer.CustomAudioRenderer.SetFormatCallback(fc); var ac = new AudioCallbacks { SoundCallback = SoundCallback }; _mPlayer.CustomAudioRenderer.SetCallbacks(ac); _mPlayer.CustomAudioRenderer.SetExceptionHandler(Handler); if (!_modeAudio) { _mPlayer.CustomRenderer.SetCallback(FrameCallback); _mPlayer.CustomRenderer.SetExceptionHandler(Handler); } GC.KeepAlive(_mPlayer); _needsSetup = true; if (!_modeAudio) { _mPlayer.CustomRenderer.SetFormat(new BitmapFormat(_source.settings.vlcWidth, _source.settings.vlcHeight, ChromaType.RV32)); } _mPlayer.Open(_mMedia); _mMedia.Parse(true); _mPlayer.Delay = 0; Duration = Time = 0; LastFrame = DateTime.MinValue; //check if file source (isseekable in _mPlayer is not reliable) Seekable = false; try { var p = Path.GetFullPath(_mMedia.Input); Seekable = !string.IsNullOrEmpty(p); } catch (Exception) { Seekable = false; } _videoQueue = new ConcurrentQueue <Bitmap>(); _audioQueue = new ConcurrentQueue <byte[]>(); _mPlayer.Play(); _abort = new ManualResetEvent(false); EventManager(); if (Seekable) { PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } else { PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); } DisposePlayer(); _abort.Close(); }
// Worker thread private void WorkerThread( ) { // buffer to read stream var buffer = new byte[BufferSize]; // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for JPEG downloading Stream stream = null; // random generator to add fake parameter for cache preventing var rand = new Random((int)DateTime.UtcNow.Ticks); // download start time and duration var res = ReasonToFinishPlaying.StoppedByUser; int err = 0; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { int total = 0; try { // set download start time DateTime start = DateTime.UtcNow; var url = _source + ((_source.IndexOf('?') == -1) ? '?' : '&') + "fake=" + rand.Next(); response = ConnectionFactory.GetResponse(url, Cookies, Headers, HttpUserAgent, Login, Password, "GET", "", UseHttp10, out request); // get response stream try { stream = response.GetResponseStream(); } catch (NullReferenceException) { throw new Exception("Connection failed"); } stream.ReadTimeout = _requestTimeout; // loop while (!_stopEvent.WaitOne(0, false)) { // check total read if (total > BufferSize - ReadSize) { total = 0; } // read next portion from stream int read; if ((read = stream.Read(buffer, total, ReadSize)) == 0) { break; } total += read; // increment received bytes counter _bytesReceived += read; } if (!_stopEvent.WaitOne(0, false)) { // increment frames counter _framesReceived++; // provide new image to clients if (NewFrame != null) { using (var bitmap = (Bitmap)Image.FromStream(new MemoryStream(buffer, 0, total))) { // notify client NewFrame(this, new NewFrameEventArgs(bitmap)); // release the image } } } // wait for a while ? if (_frameInterval > 0) { // get download duration var span = DateTime.UtcNow.Subtract(start); // milliseconds to sleep int msec = _frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } err = 0; } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients Logger.LogExceptionToFile(ex, "JPEG"); err++; if (err > 3) { res = ReasonToFinishPlaying.DeviceLost; break; } //if ( VideoSourceError != null ) //{ // VideoSourceError( this, new VideoSourceErrorEventArgs( exception.Message ) ); //} // wait for a while before the next try Thread.Sleep(250); } finally { request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); }
// Worker thread private void WorkerThread() { // buffer to read stream byte[] buffer = new byte[bufSize]; // JPEG magic number byte[] jpegMagic = new byte[] { 0xFF, 0xD8, 0xFF }; int jpegMagicLength = 3; ASCIIEncoding encoding = new ASCIIEncoding(); while (!stopEvent.WaitOne(0, false)) { // reset reload event reloadEvent.Reset(); // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for MJPEG downloading Stream stream = null; // boundary betweeen images (string and binary versions) byte[] boundary = null; string boudaryStr = null; // length of boundary int boundaryLen; // flag signaling if boundary was checked or not bool boundaryIsChecked = false; // read amounts and positions int read, todo = 0, total = 0, pos = 0, align = 1; int start = 0, stop = 0; // align // 1 = searching for image start // 2 = searching for image end try { // create request request = (HttpWebRequest)WebRequest.Create(source); // set user agent if (HttpUserAgent != null) { request.UserAgent = HttpUserAgent; } // set proxy if (Proxy != null) { request.Proxy = Proxy; } // set timeout value for the request request.Timeout = RequestTimeout; // set login and password if ((Login != null) && (Password != null) && (Login != string.Empty)) { request.Credentials = new NetworkCredential(Login, Password); } // set connection group name if (SeparateConnectionGroup) { request.ConnectionGroupName = GetHashCode().ToString(); } // force basic authentication through extra headers if required if (ForceBasicAuthentication) { string authInfo = string.Format("{0}:{1}", Login, Password); authInfo = Convert.ToBase64String(Encoding.Default.GetBytes(authInfo)); request.Headers["Authorization"] = "Basic " + authInfo; } // get response response = request.GetResponse(); // check content type string contentType = response.ContentType; string[] contentTypeArray = contentType.Split('/'); // "application/octet-stream" if ((contentTypeArray[0] == "application") && (contentTypeArray[1] == "octet-stream")) { boundaryLen = 0; boundary = new byte[0]; } else if ((contentTypeArray[0] == "multipart") && (contentType.Contains("mixed"))) { // get boundary int boundaryIndex = contentType.IndexOf("boundary", 0); if (boundaryIndex != -1) { boundaryIndex = contentType.IndexOf("=", boundaryIndex + 8); } if (boundaryIndex == -1) { // try same scenario as with octet-stream, i.e. without boundaries boundaryLen = 0; boundary = new byte[0]; } else { boudaryStr = contentType.Substring(boundaryIndex + 1); // remove spaces and double quotes, which may be added by some IP cameras boudaryStr = boudaryStr.Trim(' ', '"'); boundary = encoding.GetBytes(boudaryStr); boundaryLen = boundary.Length; boundaryIsChecked = false; } } else { throw new Exception("Invalid content type."); } // get response stream stream = response.GetResponseStream(); stream.ReadTimeout = RequestTimeout; // loop while ((!stopEvent.WaitOne(0, false)) && (!reloadEvent.WaitOne(0, false))) { // check total read if (total > bufSize - readSize) { total = pos = todo = 0; } // read next portion from stream if ((read = stream.Read(buffer, total, readSize)) == 0) { throw new ApplicationException(); } total += read; todo += read; // increment received bytes counter bytesReceived += read; // do we need to check boundary ? if ((boundaryLen != 0) && (!boundaryIsChecked)) { // some IP cameras, like AirLink, claim that boundary is "myboundary", // when it is really "--myboundary". this needs to be corrected. pos = ByteArrayUtils.Find(buffer, boundary, 0, todo); // continue reading if boudary was not found if (pos == -1) { continue; } for (int i = pos - 1; i >= 0; i--) { byte ch = buffer[i]; if ((ch == (byte)'\n') || (ch == (byte)'\r')) { break; } boudaryStr = (char)ch + boudaryStr; } boundary = encoding.GetBytes(boudaryStr); boundaryLen = boundary.Length; boundaryIsChecked = true; } // search for image start if ((align == 1) && (todo >= jpegMagicLength)) { start = ByteArrayUtils.Find(buffer, jpegMagic, pos, todo); if (start != -1) { // found JPEG start pos = start + jpegMagicLength; todo = total - pos; align = 2; } else { // delimiter not found todo = jpegMagicLength - 1; pos = total - todo; } } // search for image end ( boundaryLen can be 0, so need extra check ) while ((align == 2) && (todo != 0) && (todo >= boundaryLen)) { stop = ByteArrayUtils.Find(buffer, (boundaryLen != 0) ? boundary : jpegMagic, pos, todo); if (stop != -1) { pos = stop; todo = total - pos; // increment frames counter framesReceived++; // image at stop if ((NewFrame != null) && (!stopEvent.WaitOne(0, false))) { var frame = new byte[stop - start]; Array.Copy(buffer, start, frame, 0, stop - start); NewFrame(frame); } // shift array pos = stop + boundaryLen; todo = total - pos; Array.Copy(buffer, pos, buffer, 0, todo); total = todo; pos = 0; align = 1; } else { // boundary not found if (boundaryLen != 0) { todo = boundaryLen - 1; pos = total - todo; } else { todo = 0; pos = total; } } } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception exception) { // provide information to clients VideoSourceError?.Invoke(exception.Message); // wait for a while before the next try Thread.Sleep(250); } finally { // abort request if (request != null) { request.Abort(); request = null; } // close response stream if (stream != null) { stream.Close(); stream = null; } // close response if (response != null) { response.Close(); response = null; } } // need to stop ? if (stopEvent.WaitOne(0, false)) { break; } } PlayingFinished?.Invoke(ReasonToFinishPlaying.StoppedByUser); }