private void AudioThread() { _abort = new ManualResetEvent(false); while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length); if (DataAvailable != null) { _waveProvider.AddSamples(_audioBuffer, 0, dataLength); if (Listening) { WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength); } //forces processing of volume level without piping it out var sampleBuffer = new float[dataLength]; int read = _sampleChannel.Read(sampleBuffer, 0, dataLength); DataAvailable?.Invoke(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone(), read)); } } try { if (_sensor != null) { _sensor.AudioSource?.Stop(); _sensor.Stop(); _sensor.SkeletonFrameReady -= SensorSkeletonFrameReady; _sensor.ColorFrameReady -= SensorColorFrameReady; _sensor.DepthFrameReady -= SensorDepthFrameReady; _sensor.Dispose(); _sensor = null; } } catch { // ignored } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } Listening = false; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void SoundCallback(Sound soundData) { if (DataAvailable == null || _needsSetup) { return; } var data = new byte[soundData.SamplesSize]; Marshal.Copy(soundData.SamplesData, data, 0, (int)soundData.SamplesSize); if (_realChannels > 2) { //resample audio to 2 channels data = ToStereo(data, _realChannels); } _waveProvider.AddSamples(data, 0, data.Length); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, data.Length); } //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); if (DataAvailable != null) { DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone())); } }
private void AudioThread() { while (_stopEvent != null && !_stopEvent.WaitOne(0, false)) { int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length); if (DataAvailable != null) { _waveProvider.AddSamples(_audioBuffer, 0, dataLength); if (Listening) { WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength); } //forces processing of volume level without piping it out var sampleBuffer = new float[dataLength]; _sampleChannel.Read(sampleBuffer, 0, dataLength); if (DataAvailable != null) { DataAvailable(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone())); } } } }
private void SoundCallback(Sound soundData) { if (DataAvailable == null || _needsSetup) { return; } if (_sampleChannel != null) { var samples = new byte[soundData.SamplesSize]; Marshal.Copy(soundData.SamplesData, samples, 0, (int)soundData.SamplesSize); _waveProvider.AddSamples(samples, 0, samples.Length); var sampleBuffer = new float[samples.Length]; _sampleChannel.Read(sampleBuffer, 0, samples.Length); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(samples, 0, samples.Length); } var da = new DataAvailableEventArgs((byte[])samples.Clone()); if (DataAvailable != null) { DataAvailable(this, da); } } }
void ProcessAudio(byte[] data) { try { if (DataAvailable != null) { _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; SampleChannel.Read(sampleBuffer, 0, data.Length); DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone())); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } } } catch (NullReferenceException) { //DataAvailable can be removed at any time } catch (Exception ex) { MainForm.LogExceptionToFile(ex); } }
//void WaveInDataAvailable(object sender, WaveInEventArgs e) //{ // _isrunning = true; // if (DataAvailable != null) // { // //forces processing of volume level without piping it out // if (_sampleChannel != null) // { // var sampleBuffer = new float[e.BytesRecorded]; // _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); // if (Listening && WaveOutProvider!=null) // { // WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); // } // var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); // DataAvailable(this, da); // } // } //} //private long l = 0; //private DateTime d = DateTime.Now; void WaveInDataAvailable(object sender, WaveInEventArgs e) { _isrunning = true; if (DataAvailable != null) { //forces processing of volume level without piping it out if (_sampleChannel != null) { var sampleBuffer = new float[e.BytesRecorded]; _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); if (Listening && WaveOutProvider != null) { //if (l == 0) //{ // d = DateTime.Now; //} //l += e.BytesRecorded; WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); //double s = (DateTime.Now - d).TotalSeconds; //Debug.WriteLine(l + " bytes in " + s + " seconds, should be " + RecordingFormat.AverageBytesPerSecond * s); } //if (!Listening && l > 0) //{ // l = 0; //} var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); DataAvailable(this, da); } } }
/// <summary> /// Reads audio from this sample provider /// </summary> /// <param name="buffer">Sample buffer</param> /// <param name="offset">Offset into sample buffer</param> /// <param name="count">Number of samples required</param> /// <returns>Number of samples read</returns> public int Read(float[] buffer, int offset, int count) { lock (lockObject) { return(sampleChannel.Read(buffer, offset, count)); } }
/// ------------------------------------------------------------------------------------ private void WriteAudioStreamToChannel(AnnotationChannel channel, WaveStream inputStream) { var silentBlocksForOrig = new float[_srcRecStreamProvider.Stream.WaveFormat.Channels]; var blocksRead = 0; var totalBlocks = inputStream.Length / inputStream.WaveFormat.BlockAlign; var provider = new SampleChannel(inputStream); var buffer = new float[provider.WaveFormat.Channels]; while (provider.Read(buffer, 0, provider.WaveFormat.Channels) > 0 && blocksRead < totalBlocks) { blocksRead += 1; switch (channel) { case AnnotationChannel.Source: _audioFileWriter.WriteSamples(buffer, 0, _srcRecStreamProvider.Stream.WaveFormat.Channels); _audioFileWriter.WriteSample(0f); _audioFileWriter.WriteSample(0f); break; case AnnotationChannel.Careful: _audioFileWriter.WriteSamples(silentBlocksForOrig, 0, silentBlocksForOrig.Length); _audioFileWriter.WriteSample(buffer[0]); _audioFileWriter.WriteSample(0f); break; case AnnotationChannel.Translation: _audioFileWriter.WriteSamples(silentBlocksForOrig, 0, silentBlocksForOrig.Length); _audioFileWriter.WriteSample(0f); _audioFileWriter.WriteSample(buffer[0]); break; } } }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!stopEvent.WaitOne(0, false)) { if (DataAvailable != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } // need to stop ? if (stopEvent.WaitOne(0, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); } if (_socket != null) { _socket.Close(); _socket = null; } }
public void AddData(byte[] data) { bufferedWaveProvider.AddSamples(data, 0, data.Length); var temp = new float[data.Length]; var samplesNum = sampleChannel.Read(temp, 0, temp.Length); }
private void EventManager() { Bitmap frame; while (_stopEvent != null && !_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown) { try { var da = DataAvailable; var nf = NewFrame; if (_videoQueue.TryDequeue(out frame)) { if (frame != null) { using (var b = (Bitmap)frame.Clone()) { //new frame nf?.Invoke(this, new NewFrameEventArgs(b)); } } } byte[] audio; if (!_audioQueue.TryDequeue(out audio)) { continue; } da?.Invoke(this, new DataAvailableEventArgs(audio)); var sampleBuffer = new float[audio.Length]; _sampleChannel.Read(sampleBuffer, 0, audio.Length); _waveProvider.AddSamples(audio, 0, audio.Length); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(audio, 0, audio.Length); } } catch (Exception ex) { MainForm.LogExceptionToFile(ex, "VLC"); } } try { while (_videoQueue != null && _videoQueue.TryDequeue(out frame)) { frame?.Dispose(); } } catch { // ignored } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } if (_stopEvent.WaitOne(Interval, false)) { break; } } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); MainForm.LogExceptionToFile(e, "Direct"); } _stream?.Close(); _stream = null; }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); Logger.LogExceptionToFile(e, "WebStream"); } if (_socket != null) { _socket.Close(); _socket = null; } }
private void EventManager() { Bitmap frame; while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown) { try { if (_videoQueue.TryDequeue(out frame)) { if (frame != null) { NewFrame?.Invoke(this, new NewFrameEventArgs(frame)); frame.Dispose(); } } byte[] audio; if (!_audioQueue.TryDequeue(out audio)) { continue; } var da = DataAvailable; da?.Invoke(this, new DataAvailableEventArgs(audio)); var sampleBuffer = new float[audio.Length]; int read = SampleChannel.Read(sampleBuffer, 0, audio.Length); _waveProvider?.AddSamples(audio, 0, read); if (WaveOutProvider != null && Listening) { WaveOutProvider?.AddSamples(audio, 0, read); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } } try { while (_videoQueue != null && _videoQueue.TryDequeue(out frame)) { frame?.Dispose(); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } }
/// <summary> /// Read mono from file /// </summary> /// <param name = "filename">Name of the file</param> /// <param name = "samplerate">Sample rate</param> /// <param name = "milliseconds">milliseconds to read</param> /// <param name = "startmillisecond">Start millisecond</param> /// <returns>Array of samples</returns> public static float[] ReadMonoFromFile(string filename, int samplerate, int milliseconds, int startmillisecond) { int totalmilliseconds = milliseconds <= 0 ? Int32.MaxValue : milliseconds + startmillisecond; float[] data = null; // read as mono file List <float> floatList = new List <float>(); //WaveFormat waveFormat = new WaveFormat(samplerate, 1); WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(samplerate, 1); SampleChannel sampleChannel = ResampleToSampleChannel(filename, waveFormat); if (sampleChannel == null) { return(data); } int sampleCount = 0; int readCount = 0; int bufferSize = 16 * 1024; float[] buffer = new float[bufferSize]; // read until we have read the number of samples (measured in ms) we are supposed to do while ((readCount = sampleChannel.Read(buffer, 0, bufferSize)) > 0 && (float)(sampleCount) / samplerate * 1000 < totalmilliseconds) { floatList.AddRange(buffer.Take(readCount)); // increment with size of data sampleCount += readCount; } data = floatList.ToArray(); if ((float)(sampleCount) / samplerate * 1000 < (milliseconds + startmillisecond)) { // not enough samples to return the requested data return(null); } // Select specific part of the song int start = (int)((float)startmillisecond * samplerate / 1000); int end = (milliseconds <= 0) ? sampleCount : (int)((float)(startmillisecond + milliseconds) * samplerate / 1000); if (start != 0 || end != sampleCount) { float[] temp = new float[end - start]; Array.Copy(data, start, temp, 0, end - start); data = temp; } return(data); }
void ReadAudioFile() { WaveStream _audioFileReader; if (System.IO.Path.GetExtension(AudioFile).ToUpper() == ".FLAC") { _audioFileReader = new FlacReader(AudioFile); } else { _audioFileReader = new AudioFileReader(AudioFile); } var sampleChannel = new SampleChannel(_audioFileReader, true); var memoryStream = new System.IO.MemoryStream(); saplesCount = (int)(_audioFileReader.TotalTime.TotalSeconds * sampleChannel.WaveFormat.SampleRate * sampleChannel.WaveFormat.Channels); samples = new float[saplesCount]; lsamples = new float[saplesCount / 2 + 1]; rsamples = new float[saplesCount / 2 + 1]; sampleChannel.Read(samples, 0, samples.Length); // Separate channels for (int i = 0, j = 0, k = 0; i < samples.Length; i++) { if (i % 2 == 0) { lsamples[j] = samples[i]; j++; } else { rsamples[k] = samples[i]; k++; } } //System.Threading.Tasks.Task.Factory.StartNew(new Action(() => //{ // Dispatcher.Invoke(delegate // { // // Update painters // InitializePainer(WaveformPainterLeft, lsamples); // InitializePainer(WaveformPainterRight, rsamples); // }); // StoreToDatabase(); //})); }
/// <summary> /// Reads audio from this sample provider /// </summary> /// <param name="buffer">Sample buffer</param> /// <param name="offset">Offset into sample buffer</param> /// <param name="count">Number of samples required</param> /// <returns>Number of samples read</returns> public int Read(float[] buffer, int offset, int count) { lock (lockObject) { var readSamples = sampleChannel.Read(buffer, offset, count); for (int i = 0; i < readSamples; i++) { drcompressor.compressSample(ref buffer[offset + i]); } return(readSamples); } }
public void ReadWaveDataFromFile(string filePath) { using (var reader = new WaveFileReader(filePath)) { SamplesPerMillisecond = reader.WaveFormat.SampleRate / 1000; var channel = new SampleChannel(reader); var readNumber = channel.Read(_buffer, 0, BufferSize * 4); for (var i = 0; i < readNumber; i += channel.WaveFormat.Channels) { Samples.Add(_buffer[i]); } } }
private void EventManager() { byte[] audio; Bitmap frame; while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown) { var da = DataAvailable; var nf = NewFrame; if (_videoQueue.TryDequeue(out frame)) { //needs to be cloned for some weird reason var b = (Bitmap)frame.Clone(); //new frame if (nf != null) { nf.Invoke(this, new NewFrameEventArgs(frame)); } b.Dispose(); b = null; } if (_audioQueue.TryDequeue(out audio)) { if (da != null) { da.Invoke(this, new DataAvailableEventArgs(audio)); } var sampleBuffer = new float[audio.Length]; _sampleChannel.Read(sampleBuffer, 0, audio.Length); _waveProvider.AddSamples(audio, 0, audio.Length); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(audio, 0, audio.Length); } } } while (_videoQueue.TryDequeue(out frame)) { frame.Dispose(); frame = null; } }
/// <summary> /// Reads audio from this sample provider /// </summary> /// <param name="buffer">Sample buffer</param> /// <param name="offset">Offset into sample buffer</param> /// <param name="count">Number of samples required</param> /// <returns>Number of samples read</returns> public int Read(float[] buffer, int offset, int count) { lock (lockObject) { try { // Sometimes a NullReferenceException will bubble up here from VorbisWaveReader (and others?) return(sampleChannel?.Read(buffer, offset, count) ?? 0); } catch (Exception) // TODO: FIX FILTHY HACK { return(0); } } }
private void ProcessAudio(IntPtr data, IntPtr samples, uint count, long pts) { if (!IsRunning || _ignoreAudio || _quit) { return; } _lastFrame = DateTime.UtcNow; _connecting = false; var da = DataAvailable; int bytes = (int)count * 2;//(16 bit, 1 channel) if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var buf = new byte[bytes]; Marshal.Copy(samples, buf, 0, bytes); if (!_audioInited) { _audioInited = true; _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(200) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } _waveProvider.AddSamples(buf, 0, bytes); var sampleBuffer = new float[bytes]; var read = _sampleChannel.Read(sampleBuffer, 0, bytes); da(this, new DataAvailableEventArgs(buf, bytes)); if (Listening) { WaveOutProvider?.AddSamples(buf, 0, bytes); } } }
public CachedSound(Stream s) { using (var audioFileReader = new Mp3FileReader(s)) { var sampleChannel = new SampleChannel(audioFileReader, false); WaveFormat = sampleChannel.WaveFormat; var wholeFile = new List <float>((int)(audioFileReader.Length / 4)); var readBuffer = new float[sampleChannel.WaveFormat.SampleRate * sampleChannel.WaveFormat.Channels]; int samplesRead; while ((samplesRead = sampleChannel.Read(readBuffer, 0, readBuffer.Length)) > 0) { wholeFile.AddRange(readBuffer.Take(samplesRead)); } AudioData = wholeFile.ToArray(); } }
/// <summary> /// Resample wavefile to new waveformat and return float array /// </summary> /// <param name="wavInFilePath">audiofile</param> /// <param name="waveFormat">waveformat</param> /// <returns>float[] array</returns> public static float[] ResampleWavToFloats(string wavInFilePath, WaveFormat waveFormat) { var floatList = new List <float>(); SampleChannel sampleChannel = ResampleToSampleChannel(wavInFilePath, waveFormat); int readCount = 0; const int bufferSize = 16 * 1024; var buffer = new float[bufferSize]; // read until we have read the number of samples (measured in ms) we are supposed to do while ((readCount = sampleChannel.Read(buffer, 0, bufferSize)) > 0) { floatList.AddRange(buffer.Take(readCount)); } return(floatList.ToArray()); }
private void LoadChatAudio() { using (var fileStream = new MemoryStream(Properties.Resources.chatmessage)) { WaveStream readerStream = new Mp3FileReader(fileStream); SampleChannel sampleChannel = new SampleChannel(readerStream); int destBytesPerSample = 4 * sampleChannel.WaveFormat.Channels; int sourceBytesPerSample = (readerStream.WaveFormat.BitsPerSample / 8) * readerStream.WaveFormat.Channels; int byteLength = (int)(destBytesPerSample * (readerStream.Length / sourceBytesPerSample)); float[] audioFile = new float[byteLength / sizeof(float)]; sampleChannel.Read(audioFile, 0, audioFile.Length); PopAudio = AudioClip.Create("test.mp3", byteLength, sampleChannel.WaveFormat.Channels, sampleChannel.WaveFormat.SampleRate, false); PopAudio.SetData(audioFile, 0); } }
void WaveInDataAvailable(object sender, WaveInEventArgs e) { _isrunning = true; if (DataAvailable != null) { //forces processing of volume level without piping it out if (_sampleChannel != null) { var sampleBuffer = new float[e.BytesRecorded]; _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); } if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); } var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); DataAvailable(this, da); } }
private void SoundCallback(Sound soundData) { var da = DataAvailable; if (da == null || _needsSetup) { return; } try { var data = new byte[soundData.SamplesSize]; Marshal.Copy(soundData.SamplesData, data, 0, (int)soundData.SamplesSize); if (_realChannels > 2) { //resample audio to 2 channels data = ToStereo(data, _realChannels); } _waveProvider?.AddSamples(data, 0, data.Length); //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } catch { //can fail at shutdown } }
private void StreamWav() { var res = ReasonToFinishPlaying.StoppedByUser; HttpWebRequest request = null; try { using (HttpWebResponse resp = ConnectionFactory.GetResponse(_source, out request)) { //1/10 of a second, 16 byte buffer var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels]; using (var stream = resp.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, data.Length); if (recbytesize == 0) { throw new Exception("lost stream"); } if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var dae = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize); da(this, dae); } } else { break; } } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception ex) { var af = AudioFinished; if (af != null) { af(this, ReasonToFinishPlaying.DeviceLost); } MainForm.LogExceptionToFile(ex, "WavStream"); } finally { // abort request if (request != null) { try { request.Abort(); } catch { } request = null; } } }
private void DirectStreamListener() { _abort = new ManualResetEvent(false); try { var data = new byte[PacketSize]; if (_stream != null) { while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; int read = _sampleChannel.Read(sampleBuffer, 0, recbytesize); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } } else { break; } } } } } catch (Exception e) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(e, "Direct"); } _stream?.Close(); _stream = null; if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider.ClearBuffer(); } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void ReadFrames() { pConvertedFrameBuffer = IntPtr.Zero; pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; var ret = -11; //EAGAIN if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null && !_ignoreAudio) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; var af = ffmpeg.av_frame_alloc(); ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af); if (ret == 0) { int numSamplesOut = 0; try { if (_swrContext == null) { //need to do this here as send_packet can change channel layout and throw an exception below initSWR(); } var dat = af->data[0]; numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &dat, af->nb_samples); } catch (Exception ex) { Logger.LogException(ex, "MediaStream - Audio Read"); _ignoreAudio = true; break; } if (numSamplesOut > 0) { var l = numSamplesOut * 2 * OutFormat.Channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } else { ret = numSamplesOut; //(error) } } if (af->decode_error_flags > 0) { break; } } while (ret == 0); ffmpeg.av_frame_free(&af); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(200) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, s)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { var ef = ShouldEmitFrame; ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { var vf = ffmpeg.av_frame_alloc(); ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, vf); if (ret == 0 && ef) { AVPixelFormat srcFmt; if (_hwDeviceCtx != null) { srcFmt = AVPixelFormat.AV_PIX_FMT_NV12; var output = ffmpeg.av_frame_alloc(); ffmpeg.av_hwframe_transfer_data(output, vf, 0); ffmpeg.av_frame_copy_props(output, vf); ffmpeg.av_frame_free(&vf); vf = output; } else { srcFmt = (AVPixelFormat)vf->format; } if (!videoInited) { videoInited = true; _finalSize = Helper.CalcResizeSize(_source.settings.resize, new Size(_videoCodecContext->width, _videoCodecContext->height), new Size(_source.settings.resizeWidth, _source.settings.resizeHeight)); var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, NormalizePixelFormat(srcFmt), _finalSize.Width, _finalSize.Height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, vf->data, vf->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (vf->decode_error_flags > 0) { ffmpeg.av_frame_free(&vf); break; } using ( var mat = new Bitmap(_finalSize.Width, _finalSize.Height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; ffmpeg.av_frame_free(&vf); break; } ffmpeg.av_frame_free(&vf); } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); if (ret == -11) { Thread.Sleep(10); } } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); CleanUp(); }
private void StreamMP3() { _abort = new ManualResetEvent(false); HttpWebRequest request = null; try { var resp = _connFactory.GetResponse(_source, "GET", "", out request); var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { if (_bufferedWaveProvider != null && _bufferedWaveProvider.BufferLength - _bufferedWaveProvider.BufferedBytes < _bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { //Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(100); } else { var da = DataAvailable; if (da != null) { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null || _bufferedWaveProvider == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); RecordingFormat = new WaveFormat(frame.SampleRate, 16, frame.ChannelMode == ChannelMode.Mono ? 1 : 2); decompressor = new AcmMp3FrameDecompressor(waveFormat); _bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) { BufferDuration = TimeSpan.FromSeconds(5) }; _sampleChannel = new SampleChannel(_bufferedWaveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); _bufferedWaveProvider.AddSamples(buffer, 0, decompressed); var sampleBuffer = new float[buffer.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, buffer.Length); da(this, new DataAvailableEventArgs((byte[])buffer.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(buffer, 0, read); } } } } // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there if (decompressor != null) { decompressor.Dispose(); decompressor = null; } } } catch (Exception ex) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(ex, "MP3Stream"); } try { request?.Abort(); } catch { } request = null; AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }