private void FfmpegListener() { AudioFileReader afr = null; Program.WriterMutex.WaitOne(); try { afr = new AudioFileReader(); afr.Open(_source); } catch (Exception ex) { MainForm.LogExceptionToFile(ex); } Program.WriterMutex.ReleaseMutex(); if (afr == null || !afr.IsOpen) { if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.AudioSourceError); } return; } RecordingFormat = new WaveFormat(afr.SampleRate, 16, afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; byte[] data; double brat = (1000d / Convert.ToDouble(afr.SampleRate * afr.Channels * 4)); try { while (!_stopEvent.WaitOne(0, false)) { DateTime start = DateTime.Now; data = afr.ReadAudioFrame(); if (data.Length > 0) { if (DataAvailable != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } int interval = Convert.ToInt32(Convert.ToDouble(data.Length) * brat); if (interval > 0) { var span = DateTime.Now.Subtract(start); int msec = interval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } MainForm.LogExceptionToFile(e); } }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _afr = null; bool open = false; string errmsg = ""; try { Program.FfmpegMutex.WaitOne(); _afr = new AudioFileReader(); int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _afr.Timeout = Timeout; _afr.AnalyzeDuration = AnalyseDuration; _afr.Headers = Headers; _afr.Cookies = Cookies; _afr.UserAgent = UserAgent; _afr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_afr == null || !_afr.IsOpen || !open) { ShutDown("Could not open audio stream" + ": " + _source); return; } RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; LastFrame = DateTime.UtcNow; try { while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { byte[] data = _afr.ReadAudioFrame(); if (data != null && data.Length > 0) { LastFrame = DateTime.UtcNow; var da = DataAvailable; if (da != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); da.Invoke(this, new DataAvailableEventArgs((byte[])data.Clone())); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } } if (_stopEvent.WaitOne(30, false)) { break; } } else { if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) { throw new Exception("Audio source timeout"); } if (_stopEvent.WaitOne(30, false)) { break; } } } } catch (Exception e) { MainForm.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider?.ClearBuffer(); } ShutDown(errmsg); }
private void StreamMP3() { HttpWebRequest request = null; try { var resp = ConnectionFactory.GetResponse(_source, false, out request); var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { if (_bufferedWaveProvider != null && _bufferedWaveProvider.BufferLength - _bufferedWaveProvider.BufferedBytes < _bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { //Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(100); } else { var da = DataAvailable; if (da != null) { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null || _bufferedWaveProvider == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); RecordingFormat = new WaveFormat(frame.SampleRate, 16, frame.ChannelMode == ChannelMode.Mono ? 1 : 2); decompressor = new AcmMp3FrameDecompressor(waveFormat); _bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) { BufferDuration = TimeSpan.FromSeconds(5) }; _sampleChannel = new SampleChannel(_bufferedWaveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); _bufferedWaveProvider.AddSamples(buffer, 0, decompressed); var sampleBuffer = new float[buffer.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, buffer.Length); da(this, new DataAvailableEventArgs((byte[])buffer.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(buffer, 0, read); } } } if (_stopEvent.WaitOne(0, false)) { break; } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there if (decompressor != null) { decompressor.Dispose(); decompressor = null; } } } catch (Exception ex) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(ex, "MP3Stream"); } finally { try { request?.Abort(); } catch { } request = null; } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } } } } } catch (Exception e) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(e, "Direct"); } _stream?.Close(); _stream = null; if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider.ClearBuffer(); } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void ReadFrames() { pConvertedFrameBuffer = IntPtr.Zero; pConvertContext = null; var audioInited = false; var videoInited = false; byte[] ourBuffer = null, ffmpegBuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && ourBuffer == null) { ourBuffer = new byte[OutFormat.AverageBytesPerSecond]; ffmpegBuffer = new byte[Math.Max(48000 * 4, _audioCodecContext->sample_rate * 4)];//4 = handle fltp } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; var ret = -11; //EAGAIN if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null && !_ignoreAudio) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &ffmpegBuffer[0]) { outPtrs[0] = bPtr; outPtrs[1] = bPtr; var af = ffmpeg.av_frame_alloc(); ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af); if (ret == 0) { int numSamplesOut = 0; try { if (_swrContext == null) { //need to do this here as send_packet can change channel layout and throw an exception below initSWR(); } fixed(byte **inbufs = new byte *[_audioCodecContext->channels]) { for (uint i = 0; i < _audioCodecContext->channels; i++) { inbufs[i] = af->data[i]; } numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, OutFormat.SampleRate, inbufs, af->nb_samples); } } catch (Exception ex) { Logger.LogException(ex, "MediaStream - Audio Read"); _ignoreAudio = true; break; } if (numSamplesOut > 0) { var l = numSamplesOut * 2 * OutFormat.Channels; Buffer.BlockCopy(ffmpegBuffer, 0, ourBuffer, s, l); s += l; } else { ret = numSamplesOut; //(error) } } if (af->decode_error_flags > 0) { break; } } while (ret == 0); ffmpeg.av_frame_free(&af); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(ourBuffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(200) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, s)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { var ef = ShouldEmitFrame; ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { var vf = ffmpeg.av_frame_alloc(); ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, vf); if (ret == 0 && ef) { AVPixelFormat srcFmt; if (_hwDeviceCtx != null) { srcFmt = AVPixelFormat.AV_PIX_FMT_NV12; var output = ffmpeg.av_frame_alloc(); ffmpeg.av_hwframe_transfer_data(output, vf, 0); ffmpeg.av_frame_copy_props(output, vf); ffmpeg.av_frame_free(&vf); vf = output; } else { srcFmt = (AVPixelFormat)vf->format; } if (!videoInited) { videoInited = true; _finalSize = Helper.CalcResizeSize(_source.settings.resize, new Size(_videoCodecContext->width, _videoCodecContext->height), new Size(_source.settings.resizeWidth, _source.settings.resizeHeight)); var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, NormalizePixelFormat(srcFmt), _finalSize.Width, _finalSize.Height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, vf->data, vf->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (vf->decode_error_flags > 0) { ffmpeg.av_frame_free(&vf); break; } using ( var mat = new Bitmap(_finalSize.Width, _finalSize.Height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; ffmpeg.av_frame_free(&vf); break; } ffmpeg.av_frame_free(&vf); } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); if (ret == -11) { Thread.Sleep(10); } } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); CleanUp(); }
private void WebStreamListener() { _abort = new ManualResetEvent(false); try { var data = new byte[6400]; if (_socket != null) { while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } } } } catch (Exception e) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(e, "WebStream"); } if (_socket != null) { _socket.Close(); _socket = null; } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider.ClearBuffer(); } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void SpyServerListener() { while (true) { if (sSocket != null) { try { Socket clientSocket = sSocket.Accept(); Logger.Info(string.Format("来自【{0}】新的指挥请示已接入!开启计时器!", clientSocket.RemoteEndPoint.ToString())); //获得请求后,开启计时器,显示指挥时间。 speakTime.Elapsed += SpeakTime_Elapsed; sw.Start(); speakTime.Start(); SetForm(false); //计时器开启后,开始接收数据并播放。 while (true) { try { byte[] dataSize = RecerveVarData(clientSocket); if (dataSize.Length <= 0) { Logger.Info("无语音流,指挥结束!!!"); speakTime.Stop(); sw.Stop(); sw.Reset(); SetLB(string.Format("00:00:00")); SetForm(true); if (clientSocket != null) { //接收不到语音流,关闭套接字 clientSocket.Shutdown(SocketShutdown.Both); clientSocket.Close(); clientSocket.Dispose(); clientSocket = null; } break; } else { byte[] dec; ALawDecoder.ALawDecode(dataSize, dataSize.Length, out dec); var da = DataAvailable; if (da != null) { //Logger.Info("接受一段语音流,进入播放!!!"); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, dec.Length); da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, read); } } } } } catch (SocketException se) { //sSocket.Shutdown(SocketShutdown.Both); Logger.Error("通信出现异常,退出Socket. " + se.Message); sSocket.Dispose(); sSocket = null; } } } catch (Exception e) { if (sSocket != null) { Logger.Error("通信出现异常,关闭Socket. " + e.Message); //接收不到语音流,关闭套接字 sSocket.Close(); sSocket.Dispose(); sSocket = null; } } } else { if (speakTime != null) { Logger.Error("指挥端通信结束,计时器停止。"); speakTime.Stop(); } if (sw != null) { sw.Stop(); } SetLB(string.Format("00:00:00")); SetForm(true); Logger.Info("ServerStream ReStart!!!"); Start(); } } }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.Reallyclose) { if (DataAvailable != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.DeviceLost); } MainForm.LogExceptionToFile(e); } if (_socket != null) { _socket.Close(); _socket = null; } }
private void StreamWav() { _abort = new ManualResetEvent(false); HttpWebRequest request = null; try { using (HttpWebResponse resp = _connectionFactory.GetResponse(_source, "GET", "", out request)) { //1/4 of a second, 16 byte buffer var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels]; using (var stream = resp.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, data.Length); if (recbytesize == 0) { throw new Exception("lost stream"); } if (_sampleChannel == null) { continue; } _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } else { break; } } } } } catch (Exception ex) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(ex, "WavStream"); } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void SpyServerListener() { var data = new byte[3200]; try { var request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; var response = request.GetResponse(); using (Stream stream = response.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false)) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) { throw new Exception("lost stream"); } byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); var da = DataAvailable; if (da != null) { if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, dec.Length); da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, read); } } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //Logger.LogExceptionToFile(e,"ispyServer"); } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider?.ClearBuffer(); } }
private void FfmpegListener() { _isrunning = true; _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _afr = null; bool open = false; string errmsg = ""; try { Program.FFMPEGMutex.WaitOne(); _afr = new AudioFileReader(); int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _afr.Timeout = Timeout; _afr.AnalyzeDuration = AnalyseDuration; _afr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogErrorToFile(ex.Message); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_afr == null || !_afr.IsOpen || !open) { ShutDown("Could not open audio stream" + ": " + _source); return; } RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; int mult = _afr.BitsPerSample / 8; double btrg = Convert.ToDouble(_afr.SampleRate * mult * _afr.Channels); LastFrame = Helper.Now; bool realTime = !IsFileSource; try { DateTime req = Helper.Now; while (!_stopEvent.WaitOne(10, false) && !MainForm.Reallyclose) { byte[] data = _afr.ReadAudioFrame(); if (data == null || data.Equals(0)) { if (!realTime) { break; } } if (data != null && data.Length > 0) { LastFrame = Helper.Now; var da = DataAvailable; if (da != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone())); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } } if (realTime) { if (_stopEvent.WaitOne(30, false)) { break; } } else { // double f = (data.Length / btrg) * 1000; if (f > 0) { var span = Helper.Now.Subtract(req); var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds); if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } req = Helper.Now; } } } else { if ((Helper.Now - LastFrame).TotalMilliseconds > Timeout) { throw new Exception("Audio source timeout"); } if (_stopEvent.WaitOne(30, false)) { break; } } } } catch (Exception e) { MainForm.LogExceptionToFile(e); errmsg = e.Message; } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } } ShutDown(errmsg); }
private void FfmpegListener() { AudioFileReader afr = null; Program.WriterMutex.WaitOne(); try { afr = new AudioFileReader(); afr.Open(_source); } catch (Exception ex) { Log.Error("", ex);//MainForm.LogExceptionToFile(ex); } Program.WriterMutex.ReleaseMutex(); if (afr == null || !afr.IsOpen) { if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.AudioSourceError); } return; } RecordingFormat = new WaveFormat(afr.SampleRate, 16, afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; byte[] data; int mult = afr.BitsPerSample / 8; double btrg = Convert.ToDouble(afr.SampleRate * mult * afr.Channels); DateTime lastPacket = DateTime.Now; bool realTime = _source.IndexOf("://") != -1; try { DateTime req = DateTime.Now; while (!_stopEvent.WaitOne(0, false)) { data = afr.ReadAudioFrame(); if (data.Length > 0) { lastPacket = DateTime.Now; if (DataAvailable != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } if (realTime) { if (_stopEvent.WaitOne(10, false)) { break; } } else { double f = (data.Length / btrg) * 1000; if (f > 0) { var span = DateTime.Now.Subtract(req); var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds); if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } req = DateTime.Now; } } } else { if ((DateTime.Now - lastPacket).TotalMilliseconds > 5000) { afr.Close(); Stop(); throw new Exception("Audio source timeout"); } if (_stopEvent.WaitOne(30, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); } }
private void ReadFrames() { pConvertedFrameBuffer = IntPtr.Zero; pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; int ret = -11; //EAGAIN if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, _audioFrame); if (ret == 0) { fixed(byte **datptr = _audioFrame->data.ToArray()) { var numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, datptr, _audioFrame->nb_samples); if (numSamplesOut > 0) { var l = numSamplesOut * 2 * OutFormat.Channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } else { ret = numSamplesOut; //(error) } } } if (_audioFrame->decode_error_flags > 0) { break; } } while (ret == 0); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(200) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, s)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, _videoFrame); var ef = EmitFrame; //Debug.WriteLine("ret: "+ret+", ef:"+ef); if (ret == 0 && ef) { if (!videoInited) { videoInited = true; var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt, _videoCodecContext->width, _videoCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, _videoFrame->data, _videoFrame->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (_videoFrame->decode_error_flags > 0) { break; } using ( var mat = new Bitmap(_videoCodecContext->width, _videoCodecContext->height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); if (ret == -11) { Thread.Sleep(10); } } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); CleanUp(); }
private void ReadFrames() { AVFrame * pConvertedFrame = null; sbyte * pConvertedFrameBuffer = null; SwsContext *pConvertContext = null; BufferedWaveProvider waveProvider = null; SampleChannel sampleChannel = null; bool audioInited = false; bool videoInited = false; do { AVPacket packet = new AVPacket(); ffmpeg.av_init_packet(&packet); AVFrame *frame = ffmpeg.av_frame_alloc(); ffmpeg.av_frame_unref(frame); if (ffmpeg.av_read_frame(_formatContext, &packet) < 0) { _abort = true; _res = ReasonToFinishPlaying.VideoSourceError; break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } AVPacket packetTemp = packet; var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; if (_audioStream != null && packetTemp.stream_index == _audioStream->index) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { int s = 0; var buffer = new sbyte[_audioCodecContext->sample_rate * 2]; var tbuffer = new sbyte[_audioCodecContext->sample_rate * 2]; bool b = false; fixed(sbyte **outPtrs = new sbyte *[32]) { fixed(sbyte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; do { int gotFrame = 0; int inUsed = ffmpeg.avcodec_decode_audio4(_audioCodecContext, frame, &gotFrame, &packetTemp); if (inUsed < 0 || gotFrame == 0) { b = true; break; } int numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &frame->data0, frame->nb_samples); var l = numSamplesOut * 2 * _audioCodecContext->channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; packetTemp.data += inUsed; packetTemp.size -= inUsed; } while (packetTemp.size > 0); } } if (b) { break; } ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } byte[] ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; int read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, read)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index) { int frameFinished = 0; //decode video frame int ret = ffmpeg.avcodec_decode_video2(_codecContext, frame, &frameFinished, &packetTemp); if (ret < 0) { ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); break; } if (frameFinished == 1) { if (!videoInited) { videoInited = true; pConvertedFrame = ffmpeg.av_frame_alloc(); var convertedFrameBufferSize = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height); pConvertedFrameBuffer = (sbyte *)ffmpeg.av_malloc((ulong)convertedFrameBufferSize); ffmpeg.avpicture_fill((AVPicture *)pConvertedFrame, pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height); pConvertContext = ffmpeg.sws_getContext(_codecContext->width, _codecContext->height, _codecContext->pix_fmt, _codecContext->width, _codecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } var src = &frame->data0; var dst = &pConvertedFrame->data0; var srcStride = frame->linesize; var dstStride = pConvertedFrame->linesize; ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, _codecContext->height, dst, dstStride); var convertedFrameAddress = pConvertedFrame->data0; if (convertedFrameAddress != null) { var imageBufferPtr = new IntPtr(convertedFrameAddress); var linesize = dstStride[0]; if (frame->decode_error_flags > 0) { ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); break; } using ( var mat = new Bitmap(_codecContext->width, _codecContext->height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr)) { var nfe = new NewFrameEventArgs((Bitmap)mat.Clone()); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } } if (_videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_free_packet(&packet); ffmpeg.av_frame_free(&frame); Thread.SpinWait(20); } while (!_abort && !MainForm.ShuttingDown); try { Program.FfmpegMutex.WaitOne(); if (pConvertedFrame != null) { ffmpeg.av_free(pConvertedFrame); } if (pConvertedFrameBuffer != null) { ffmpeg.av_free(pConvertedFrameBuffer); } if (_formatContext != null) { if (_formatContext->streams != null) { int j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { AVStream *stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } _videoStream = null; _audioStream = null; _audioCodecContext = null; _codecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, "Media Stream (close)"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void ReadFrames() { var pConvertedFrameBuffer = IntPtr.Zero; SwsContext *pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; SampleChannel sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; int ret; if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, _audioFrame); if (ret == 0) { var dat = _audioFrame->data[0]; var numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &dat, _audioFrame->nb_samples); var l = numSamplesOut * 2 * _audioCodecContext->channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } if (_audioFrame->decode_error_flags > 0) { break; } } while (ret == 0); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, read)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, _videoFrame); if (ret == 0 && EmitFrame) { if (!videoInited) { videoInited = true; var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt, _videoCodecContext->width, _videoCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, _videoFrame->data, _videoFrame->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (_videoFrame->decode_error_flags > 0) { break; } using ( var mat = new Bitmap(_videoCodecContext->width, _videoCodecContext->height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); try { Program.MutexHelper.Wait(); if (pConvertedFrameBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pConvertedFrameBuffer); } if (_formatContext != null) { if (_formatContext->streams != null) { var j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { var stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } if (_videoFrame != null) { fixed(AVFrame **pinprt = &_videoFrame) { ffmpeg.av_frame_free(pinprt); _videoFrame = null; } } if (_audioFrame != null) { fixed(AVFrame **pinprt = &_audioFrame) { ffmpeg.av_frame_free(pinprt); _audioFrame = null; } } _videoStream = null; _audioStream = null; _audioCodecContext = null; _videoCodecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, "Media Stream (close)"); } finally { try { Program.MutexHelper.Release(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void FfmpegListener() { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; VideoFileReader vfr = null; Program.WriterMutex.WaitOne(); try { vfr = new VideoFileReader(); vfr.Open(_source); } catch (Exception ex) { Log.Error("", ex);//MainForm.LogExceptionToFile(ex); } Program.WriterMutex.ReleaseMutex(); if (vfr == null || !vfr.IsOpen) { if (PlayingFinished != null) { PlayingFinished(this, ReasonToFinishPlaying.VideoSourceError); } return; } bool hasaudio = false; if (vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(vfr.SampleRate, 16, vfr.Channels); WaveOutProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += MeteringProviderStreamVolume; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); } } int interval = 1000 / ((vfr.FrameRate == 0) ? 25 : vfr.FrameRate); byte[] data; Bitmap frame; try { while (!_stopEvent.WaitOne(0, false)) { DateTime start = DateTime.Now; frame = vfr.ReadVideoFrame(); if (frame == null) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } if (NewFrame != null) { NewFrame(this, new NewFrameEventArgs(frame)); } frame.Dispose(); if (hasaudio) { data = vfr.ReadAudioFrame(); if (DataAvailable != null) { _waveProvider.AddSamples(data, 0, data.Length); if (Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } _mFramesReceived++; //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; _meteringProvider.Read(sampleBuffer, 0, data.Length); DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone())); } } if (interval > 0) { // get frame extract duration TimeSpan span = DateTime.Now.Subtract(start); // miliseconds to sleep int msec = interval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } } catch (Exception e) { if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); reasonToStop = ReasonToFinishPlaying.DeviceLost; } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
private void SpyServerListener() { var data = new byte[3200]; try { var request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; var response = request.GetResponse(); using (Stream stream = response.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false)) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) { throw new Exception("lost stream"); } var recive = Encoding.UTF8.GetString(data, 0, recbytesize); if (recive.Equals("setCurrentVolumeMute")) { setVolumeMute(); } else if (recive.StartsWith("setCurrentVolume")) { if (recive.IndexOf("#") > -1) { var cmd = recive.Split(new[] { '#' }); Thread setValueThread = new Thread(new ParameterizedThreadStart(this.setVolumeFromServer)); setValueThread.IsBackground = true; setValueThread.Start(cmd[1]); Thread setValueThread1 = new Thread(new ParameterizedThreadStart(this.setVolumeWINMM)); setValueThread1.IsBackground = true; setValueThread1.Start(cmd[1]); } } else { byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); var da = DataAvailable; if (da != null) { if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, dec.Length); da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, read); } } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //Logger.LogExceptionToFile(e,"ispyServer"); } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider?.ClearBuffer(); } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } if (_stopEvent.WaitOne(Interval, false)) break; } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); Logger.LogExceptionToFile(e,"Direct"); } _stream?.Close(); _stream = null; }
private void SpyServerListener() { HttpWebRequest request = null; WebResponse response = null; Stream stream = null; var data = new byte[3200]; try { request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; response = request.GetResponse(); stream = response.GetResponseStream(); if (stream != null) { stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { if (DataAvailable != null) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) { throw new Exception("lost stream"); } byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; _sampleChannel.Read(sampleBuffer, 0, dec.Length); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(dec, 0, dec.Length); } var da = new DataAvailableEventArgs((byte[])dec.Clone(), dec.Length); DataAvailable(this, da); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.DeviceLost); } MainForm.LogExceptionToFile(e); } if (stream != null) { try { stream.Close(); } catch { } stream = null; } }
// Worker thread private void WorkerThread() { // buffer to read stream var buffer = new byte[BufSize]; var encoding = new ASCIIEncoding(); var res = ReasonToFinishPlaying.StoppedByUser; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { // reset reload event _reloadEvent.Reset(); // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for MJPEG downloading Stream stream = null; // boundary between images (string and binary versions) try { // create request request = (HttpWebRequest)WebRequest.Create(_source); // set user agent if (_userAgent != null) { request.UserAgent = _userAgent; } // set proxy if (_proxy != null) { request.Proxy = _proxy; } if (_usehttp10) { request.ProtocolVersion = HttpVersion.Version10; } // set timeout value for the request request.Timeout = request.ServicePoint.ConnectionLeaseTimeout = request.ServicePoint.MaxIdleTime = _requestTimeout; request.AllowAutoRedirect = true; // set login and password if ((_login != null) && (_password != null) && (_login != string.Empty)) { request.Credentials = new NetworkCredential(_login, _password); } // set connection group name if (_useSeparateConnectionGroup) { request.ConnectionGroupName = GetHashCode().ToString(); } // get response response = request.GetResponse(); // get response stream stream = response.GetResponseStream(); stream.ReadTimeout = _requestTimeout; byte[] boundary = encoding.GetBytes("--myboundary"); byte[] sep = encoding.GetBytes("\r\n\r\n"); // loop int startPacket = -1; int endPacket = -1; int ttl = 0; bool hasaudio = false; while ((!_stopEvent.WaitOne(0, false)) && (!_reloadEvent.WaitOne(0, false))) { int read; if ((read = stream.Read(buffer, ttl, ReadSize)) == 0) { throw new ApplicationException(); } ttl += read; if (startPacket == -1) { startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl); } else { if (endPacket == -1) { endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl - (startPacket + boundary.Length)); } } if (startPacket > -1 && endPacket > startPacket) { int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100); if (br != -1) { var arr = new byte[br]; System.Array.Copy(buffer, startPacket, arr, 0, br - startPacket); string s = Encoding.ASCII.GetString(arr); int k = s.IndexOf("Content-type: ", StringComparison.Ordinal); if (k != -1) { s = s.Substring(k + 14); s = s.Substring(0, s.IndexOf("\r\n", StringComparison.Ordinal)); s = s.Trim(); } switch (s) { case "image/jpeg": try { using (var ms = new MemoryStream(buffer, br + 4, endPacket - br - 8)) { var bitmap = (Bitmap)Image.FromStream(ms); // notify client NewFrame(this, new NewFrameEventArgs(bitmap)); // release the image bitmap.Dispose(); } } catch (Exception ex) { //sometimes corrupted packets come through... MainForm.LogExceptionToFile(ex); } break; case "audio/raw": if (!hasaudio) { hasaudio = true; //fixed 16khz 1 channel format RecordingFormat = new WaveFormat(16000, 16, 1); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } } var da = DataAvailable; if (da != null) { int l = endPacket - br - 8; var data = new byte[l]; int d = 0; using (var ms = new MemoryStream(buffer, br + 4, l)) { d = ms.Read(data, 0, l); } if (d > 0) { _waveProvider.AddSamples(data, 0, data.Length); if (Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone())); } } break; case "alert/text": // code to handle alert notifications goes here if (AlertHandler != null) { int dl = endPacket - br - 8; var data2 = new byte[dl]; using (var ms = new MemoryStream(buffer, br + 4, dl)) { ms.Read(data2, 0, dl); } string alerttype = Encoding.ASCII.GetString(data2); AlertHandler(this, new AlertEventArgs(alerttype)); } break; } } ttl -= endPacket; System.Array.Copy(buffer, endPacket, buffer, 0, ttl); startPacket = -1; endPacket = -1; } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients MainForm.LogExceptionToFile(ex); res = ReasonToFinishPlaying.DeviceLost; break; // wait for a while before the next try //Thread.Sleep(250); } finally { // abort request if (request != null) { try { request.Abort(); } catch { } request = null; } // close response stream if (stream != null) { try { stream.Flush(); } catch { } try { stream.Close(); } catch { } try { stream.Dispose(); } catch { } stream = null; } // close response if (response != null) { try { response.Close(); } catch { } response = null; } } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } if (PlayingFinished != null) { PlayingFinished(this, res); } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_stopEvent.WaitOne(0, false)) { if (DataAvailable != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } if (_stopEvent.WaitOne(Interval, false)) { break; } } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); } if (_stream != null) { _stream.Close(); _stream = null; } }