private void SoundCallback(Sound soundData) { if (DataAvailable == null || _needsSetup) { return; } if (_sampleChannel != null) { var samples = new byte[soundData.SamplesSize]; Marshal.Copy(soundData.SamplesData, samples, 0, (int)soundData.SamplesSize); _waveProvider.AddSamples(samples, 0, samples.Length); var sampleBuffer = new float[samples.Length]; _sampleChannel.Read(sampleBuffer, 0, samples.Length); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(samples, 0, samples.Length); } var da = new DataAvailableEventArgs((byte[])samples.Clone()); if (DataAvailable != null) { DataAvailable(this, da); } } }
/// <summary> /// New recording data is available, distribute the data. /// </summary> private void OnDataAvailable(object sender, DataAvailableEventArgs e) { if (convertedSource == null || convertedSource.WaveFormat == null) { return; } latestDataAvailable = DateTime.Now; if (dataAvailableCallback != null) { int read; lock (bufferSwapSync) { var currentBuffer = bufferCaptured; var spaceLeft = bufferCaptured.Data.Length - bufferCaptured.Used; while (spaceLeft > 0 && (read = convertedSource.Read(currentBuffer.Data, currentBuffer.Used, spaceLeft)) > 0) { spaceLeft -= read; currentBuffer.Used += read; } } } }
void OnData(object sender, DataAvailableEventArgs e) { _tracker.OnData(e.Format.BytesToMilliseconds(e.ByteCount)); var startStep = _tracker.CurrentTimeAtBeginningOfLastRecordedSample; var stepTime = 1 * 1000 * 10000 / e.Format.SampleRate / e.Format.Channels; long tick = (long)startStep; var target = new float[e.ByteCount / 4]; System.Buffer.BlockCopy(e.Data, 0, target, 0, e.ByteCount); int i = 0; foreach (var ee in target) { i++; tick += stepTime; if (ee == 1.0f && i % 2 == 0) { _audioTicks.Add(tick); if (firstStart) { Console.WriteLine($"Start in... {5 - _audioTicks.Count}"); } if (firstStart && _audioTicks.Count == 5) { _audioTicks.Clear(); firstStart = false; } } } }
private void OnDatAvailable(object sender, DataAvailableEventArgs e) { //register an event handler for the DataAvailable event of //the soundInSource //Important: use the DataAvailable of the SoundInSource //If you use the DataAvailable event of the ISoundIn itself //the data recorded by that event might won't be available at the //soundInSource yet _logger.Debug($"Audio data available - Bytes: {e.ByteCount} Format: {e.Format}"); //read data from the converedSource //important: don't use the e.Data here //the e.Data contains the raw data provided by the //soundInSource which won't have your target format byte[] buffer = new byte[_convertedSource.WaveFormat.BytesPerSecond / 2]; int read; //keep reading as long as we still get some data //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false while ((read = _convertedSource.Read(buffer, 0, buffer.Length)) > 0) { //write the read data to a file // ReSharper disable once AccessToDisposedClosure _waveWriter.Write(buffer, 0, read); } }
// Capturing EMG samples //private void Capture_DataAvailable(object sender, DataAvailableEventArgs e) //{ // int samplesPerChannel = e.ScanNumber; // float[] values = new float[samplesPerChannel*1]; // Change to add more sensors // for (int sampleNumber = 0; sampleNumber < samplesPerChannel; sampleNumber = sampleNumber+1) // This loops captures data from sensor # sampleNumber+1 // { // Console.WriteLine("Sensor #" + 1 + ": " + e.Samples[0, sampleNumber]); //write sensor values in console // Console.WriteLine("Sensor #" + 2 + ": " + e.Samples[1, sampleNumber]); //write sensor values in console // values[sampleNumber*1] = e.Samples[0, sampleNumber]; // //values[sampleNumber*4+1] = e.Samples[1, sampleNumber]; // //values[sampleNumber * 4 + 2] = e.Samples[2, sampleNumber]; // //values[sampleNumber * 4 + 3] = e.Samples[3, sampleNumber]; // //values[sampleNumber * 8 + 4] = e.Samples[4, sampleNumber]; // //values[sampleNumber * 8 + 5] = e.Samples[5, sampleNumber]; // //values[sampleNumber * 8 + 6] = e.Samples[6, sampleNumber]; // //values[sampleNumber * 8 + 7] = e.Samples[7, sampleNumber]; // } // Send(values); //} // capturing IMU samples. The first index identifies the sensor (it can be between 0 and InstalledSensors -1) //The second can assume values of 0, 1, 2 that identifies respectively the values x, y and z of the angular velocity vector //The third index identifies the sample(it can be between 0 and SamplesNumber -1) private void Capture_DataAvailable(object sender, DataAvailableEventArgs e) { //int samplesPerChannel = e.ScanNumber; // the max scannumber is 20 int samplesPerChannel = 20; float[,] values = new float[samplesPerChannel, 2]; // Change to add more sensors for (int sampleNumber = 0; sampleNumber < samplesPerChannel; sampleNumber = sampleNumber + 1) // This loops captures data from sensor # sampleNumber+1 { Console.WriteLine("Sensor #" + 15 + " angular velocity X: " + e.GyroscopeSamples[14, 0, sampleNumber] + " " + sampleNumber); //write sensor values in console Console.WriteLine("Sensor #" + 15 + " angular velocity Y: " + e.GyroscopeSamples[14, 1, sampleNumber] + " " + sampleNumber); //write sensor values in console // Console.WriteLine("Sensor #" + 15 + " angular velocity Z: " + e.GyroscopeSamples[14, 2, sampleNumber] + " " + sampleNumber); // Console.WriteLine("Sensor #" + 15 + " acceleration X: " + e.AccelerometerSamples[14, 0, sampleNumber] + " " + sampleNumber); //Console.WriteLine("Sensor #" + 15 + " acceleration Y: " + e.AccelerometerSamples[14, 1, sampleNumber] + " " + sampleNumber); //Console.WriteLine("Sensor #" + 15 + " acceleration Z: " + e.AccelerometerSamples[14, 2, sampleNumber] + " " + sampleNumber); values[sampleNumber, 0] = e.GyroscopeSamples[14, 0, sampleNumber]; values[sampleNumber, 1] = e.GyroscopeSamples[14, 1, sampleNumber]; //values[sampleNumber, 2] = e.GyroscopeSamples[14, 2, sampleNumber]; //values[sampleNumber, 3] = e.AccelerometerSamples[14, 0, sampleNumber]; //values[sampleNumber, 4] = e.AccelerometerSamples[14, 1, sampleNumber]; //values[sampleNumber, 5] = e.AccelerometerSamples[14, 2, sampleNumber]; //values[sampleNumber*4+1] = e.Samples[1, sampleNumber]; //values[sampleNumber * 4 + 2] = e.Samples[2, sampleNumber]; //values[sampleNumber * 4 + 3] = e.Samples[3, sampleNumber]; //values[sampleNumber * 8 + 4] = e.Samples[4, sampleNumber]; //values[sampleNumber * 8 + 5] = e.Samples[5, sampleNumber]; //values[sampleNumber * 8 + 6] = e.Samples[6, sampleNumber]; //values[sampleNumber * 8 + 7] = e.Samples[7, sampleNumber]; } Send(values); //Console.WriteLine("imu values" + values[19,0]); //Console.ReadKey(); }
private static void CSCore_DataAvailable(object sender, DataAvailableEventArgs e) { if (Program.CSCore_CaptureStarted != true) { Program.CSCore_CaptureStarted = true; } ; if (Program.RunKeyboardThread != 2) { CSCore_StopCapture(); } ; byte[] buffer = e.Data; int bytesRecorded = e.ByteCount; int bufferIncrement = capture.WaveFormat.BlockAlign; for (int index = 0; index < bytesRecorded; index += bufferIncrement) { float sample32 = BitConverter.ToSingle(buffer, index); if (sampleAggregator.Add(sample32) == true) { break; } ; } }
private void OnCaptureDataAvailable(object sender, DataAvailableEventArgs e) { if (IsEnabled) { Write(this, e.Format, e.Data, e.Offset, e.ByteCount); } }
//void WaveInDataAvailable(object sender, WaveInEventArgs e) //{ // _isrunning = true; // if (DataAvailable != null) // { // //forces processing of volume level without piping it out // if (_sampleChannel != null) // { // var sampleBuffer = new float[e.BytesRecorded]; // _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); // if (Listening && WaveOutProvider!=null) // { // WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); // } // var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); // DataAvailable(this, da); // } // } //} //private long l = 0; //private DateTime d = DateTime.Now; void WaveInDataAvailable(object sender, WaveInEventArgs e) { _isrunning = true; if (DataAvailable != null) { //forces processing of volume level without piping it out if (_sampleChannel != null) { var sampleBuffer = new float[e.BytesRecorded]; _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); if (Listening && WaveOutProvider != null) { //if (l == 0) //{ // d = DateTime.Now; //} //l += e.BytesRecorded; WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); //double s = (DateTime.Now - d).TotalSeconds; //Debug.WriteLine(l + " bytes in " + s + " seconds, should be " + RecordingFormat.AverageBytesPerSecond * s); } //if (!Listening && l > 0) //{ // l = 0; //} var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); DataAvailable(this, da); } } }
void WaveInDataAvailable(object sender, WaveInEventArgs e) { if (_waveIn == null) { return; } var da = DataAvailable; if (da == null) { return; } var sc = _sampleChannel; if (sc == null) { return; } var sampleBuffer = new float[e.BytesRecorded]; sc.Read(sampleBuffer, 0, e.BytesRecorded); if (Listening) { WaveOutProvider?.AddSamples(e.Buffer, 0, e.BytesRecorded); } var dae = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); da(this, dae); }
// Process audio data private void Wasapi_DataAvailable(object sender, DataAvailableEventArgs e) { if (IsHandleCreated && !Disposing && !IsDisposed) { try { Invoke(new Action(() => { var samples = PCMUtils.PCM32ToSamples(e.Data, e.ByteCount, 2); detector.ProcessData(samples); chart1.Series[0].Points.Clear(); chart1.Series[1].Points.Clear(); chart1.Series[2].Points.Clear(); chart1.Series[2].Points.AddXY(0, detector.BeatThreshold); chart1.Series[2].Points.AddXY(detector.WindowSize, detector.BeatThreshold); for (int i = 0; i < detector.energyBuffer.Count; i++) { chart1.Series[0].Points.Add(detector.energyBuffer[i]); chart1.Series[1].Points.Add(detector.beatBuffer[i]); } label1.Text = $"Gain: {detector.Gain:0.00}"; })); } catch (ObjectDisposedException) { } } }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!stopEvent.WaitOne(0, false)) { if (DataAvailable != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } // need to stop ? if (stopEvent.WaitOne(0, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); } if (_socket != null) { _socket.Close(); _socket = null; } }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { using (var helper = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat)) { using (var helpStm = new WaveFormatConversionStream(_waveFormat, helper)) { totBytes = helpStm.Read(bSrc, 0, 25000); } } } var enc = _muLawCodec.Encode(bSrc, 0, totBytes); ALawEncoder.ALawEncode(bSrc, totBytes, enc); Buffer.BlockCopy(enc, 0, _talkBuffer, _talkDatalen, enc.Length); _talkDatalen += enc.Length; int j = 0; try { while (j + 240 < _talkDatalen) { //need to write out in 240 byte packets var pkt = new byte[240]; Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 240); // _avstream.Write(_hdr, 0, _hdr.Length); _avstream.Write(pkt, 0, 240); j += 240; } if (j < _talkDatalen) { Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j); _talkDatalen = _talkDatalen - j; } } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(); } }
void AudioProviderOnDataAvailable(object Sender, DataAvailableEventArgs DataAvailableEventArgs) { _audioEncoder.EnsureBufferIsSufficient(ref _encodedBuffer, DataAvailableEventArgs.Length); var encodedLength = _audioEncoder.Encode(DataAvailableEventArgs.Buffer, 0, DataAvailableEventArgs.Length, _encodedBuffer, 0); DataAvailable?.Invoke(this, new DataAvailableEventArgs(_encodedBuffer, encodedLength)); }
private void OnDataAvailable(object sender, DataAvailableEventArgs e) { _buffer.Write(e.Data, 0, e.ByteCount); if (e.ByteCount > 0 && DataAvailable != null) { DataAvailable(this, e); } }
private void SoundInSource_DataAvailable(object sender, DataAvailableEventArgs e) { // We need to read from our SoundInSource, otherwise SingleBlockRead is never called byte[] buffer = new byte[this.sampledWaveSource.WaveFormat.BytesPerSecond / 2]; while (this.sampledWaveSource.Read(buffer, 0, buffer.Length) > 0) { } }
private void SoundInSource_DataAvailable(object sender, DataAvailableEventArgs e) { int read; while ((read = _source.Read(buffer, 0, buffer.Length)) > 0) { ; } }
public void DataAvailable(System.Object sender, DataAvailableEventArgs args) { int read; while ((read = waveSource.Read(buffer, 0, buffer.Length)) > 0) { ; } }
public void AudioCaptureEvent(object sender, DataAvailableEventArgs e) { if (oldFormat != e.Format) { convertWaveFormat(e.Format); sendWaveFormat(); } udpSocket.Send(e.Data, e.ByteCount, SocketFlags.DontRoute); }
public void CtorDataAvailableInProperty() { var e = new DataAvailableEventArgs(new List <byte>(new byte[] { 3, 2, 1 })); Assert.Equal <int>(3, e.MessageData.Count()); Assert.Equal <byte>(3, e.MessageData[0]); Assert.Equal <byte>(2, e.MessageData[1]); Assert.Equal <byte>(1, e.MessageData[2]); }
private void OnSoundDataAvailable(object sender, DataAvailableEventArgs dataAvailableEventArgs) { int readCount; while ((readCount = _stream.Read(_readBuffer, 0, _readBuffer.Length)) > 0) { DataAvailable?.Invoke(_readBuffer, 0, readCount); } }
private void OnNewData(object sender, DataAvailableEventArgs e) { int read = 0; while ((read = _source.Read(_writerBuffer, 0, _writerBuffer.Length)) > 0) { _writer.Write(_writerBuffer, 0, read); } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { if (DataAvailable != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening) { WaveOutProvider?.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize); DataAvailable(this, da); } } else { break; } if (_stopEvent.WaitOne(Interval, false)) { break; } } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); MainForm.LogExceptionToFile(e, "Direct"); } _stream?.Close(); _stream = null; }
private void OnDataAvailable(object sender, DataAvailableEventArgs e) { _buffer.Write(e.Data, 0, e.ByteCount); EventHandler <DataAvailableEventArgs> dataAvailable = DataAvailable; if (e.ByteCount > 0 && dataAvailable != null) { dataAvailable(this, e); } }
private static unsafe void encode(object sender, DataAvailableEventArgs e) { int read; while ((read = final.Read(trashBuf, 0, trashBuf.Length)) > 0) { //_writer.Write(trashBuf, 0, read); //Console.WriteLine("read"); } }
/// <summary> /// Write captured Audio to file /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void WriteAudio(object sender, DataAvailableEventArgs e) { if (audioFile == null) { return; } audioFile.Write(e.Data, 0, e.ByteCount); //audioFile.flu(); }
public void DataAvailable(Object sender, DataAvailableEventArgs args) { int read; while ((read = waveSource.Read(buffer, 0, buffer.Length)) > 0) { ; } Debug.Log("Read " + read); }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { if (DataAvailable != null) { int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening) { WaveOutProvider?.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) { break; } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(e, "WebStream"); } if (_socket != null) { _socket.Close(); _socket = null; } }
public void AudioDeviceDataAvailable(object sender, DataAvailableEventArgs e) { try { DataAvailable?.Invoke(this, new NewDataAvailableArgs((byte[])e.RawData.Clone())); } catch (Exception ee) { Logger.Error("AudioDeviceDataAvailable error occured:" + ee.Message); } }
void SourceDataAvailable(object sender, DataAvailableEventArgs eventArgs) { try { AudioAvailable?.Invoke(this, eventArgs); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } }
public void AddSamples(byte[] samples) { if (DataAvailable != null) { if (samples.Length > 0) { var da = new DataAvailableEventArgs((byte[])samples.Clone()); DataAvailable(this, da); } } }
void DataAvailable(object o, DataAvailableEventArgs data) { //byte[] buffer = new byte[realTimeSource.WaveFormat.BytesPerSecond / 2]; //while (realTimeSource.Read(buffer, 0, buffer.Length) > 0) //{ //} //byte[] buffer = data.Data; //seems like buffer is something like the hz constant value, like 48000 works, low value no input, high value lags byte[] buffer = new byte[realTimeSource.WaveFormat.BytesPerSecond]; realTimeSource.Read(buffer, 0, buffer.Length); }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat)) { int j = -1; var bDst = new byte[44100]; totBytes = 0; using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws)) { while (j != 0) { j = helpStm.Read(bDst, totBytes, 10000); totBytes += j; } helpStm.Close(); } ws.Close(); bSrc = bDst; } } var enc = new byte[totBytes / 2]; ALawEncoder.ALawEncode(bSrc, totBytes, enc); try { _avstream.Write(enc, 0, enc.Length); } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(); } }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat); var helpStm = new WaveFormatConversionStream(_waveFormat, ws); totBytes = helpStm.Read(bSrc, 0, 25000); ws.Close(); ws.Dispose(); helpStm.Close(); helpStm.Dispose(); } var enc = new byte[totBytes / 2]; ALawEncoder.ALawEncode(bSrc, totBytes, enc); try { _avstream.Write(enc, 0, enc.Length); _avstream.Flush(); } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(); } }
private void SpyServerListener() { HttpWebRequest request = null; WebResponse response = null; Stream stream = null; var data = new byte[3200]; try { request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; response = request.GetResponse(); stream = response.GetResponseStream(); if (stream != null) { stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false) && !MainForm.Reallyclose) { if (DataAvailable != null) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize==0) throw new Exception("lost stream"); byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; _sampleChannel.Read(sampleBuffer, 0, dec.Length); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(dec, 0, dec.Length); } var da = new DataAvailableEventArgs((byte[])dec.Clone(), dec.Length); DataAvailable(this, da); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) break; } } if (AudioFinished != null) AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); if (AudioFinished != null) AudioFinished(this, ReasonToFinishPlaying.DeviceLost); MainForm.LogExceptionToFile(e); } if (stream != null) { try { stream.Close(); } catch { } stream = null; } }
void AudioDeviceDataAvailable(object sender, DataAvailableEventArgs e) { if (Levels == null) return; try { if (WriterBuffer == null) { var dt = DateTime.Now.AddSeconds(0 - Micobject.settings.buffer); AudioBuffer.RemoveAll(p => p.TimeStamp < dt); AudioBuffer.Add(new AudioAction(e.RawData, Levels.Max(), DateTime.Now)); } else { WriterBuffer.Enqueue(new AudioAction(e.RawData, Levels.Max(), DateTime.Now)); } if (Micobject.settings.needsupdate) { Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate; Micobject.settings.channels = AudioSource.RecordingFormat.Channels; Micobject.settings.needsupdate = false; } OutSockets.RemoveAll(p => p.Connected == false); if (OutSockets.Count>0) { if (_mp3Writer == null) { //_as = new AudioStreamer(); //_as.Open(AudioCodec.AAC, AudioSource.RecordingFormat.BitsPerSample * AudioSource.RecordingFormat.SampleRate * AudioSource.RecordingFormat.Channels, AudioSource.RecordingFormat.SampleRate, AudioSource.RecordingFormat.Channels); _audioStreamFormat = new WaveFormat(22050, 16, Micobject.settings.channels); var wf = new MP3Stream.WaveFormat(_audioStreamFormat.SampleRate, _audioStreamFormat.BitsPerSample, _audioStreamFormat.Channels); _mp3Writer = new Mp3Writer(_outStream, wf, false); } //unsafe //{ // fixed (byte* p = e.RawData) // { // int byteLength = 0; // int* l = &byteLength; // byte* outStream = _as.WriteAudio(p, e.RawData.Length, l); // byteLength = *l; // if (byteLength > 0) // { // var toSend = new byte[byteLength]; // for (var i = 0; i < byteLength;i++ ) // { // if (i==0) // Debug.Write(toSend[0]); // toSend[i] = *(outStream + i); // } // foreach (Socket s in OutSockets) // { // s.Send(Encoding.ASCII.GetBytes(byteLength.ToString("X") + "\r\n")); // s.Send(toSend); // s.Send(Encoding.ASCII.GetBytes("\r\n")); // } // } // } //} byte[] bSrc = e.RawData; int totBytes = bSrc.Length; var ws = new TalkHelperStream(bSrc, totBytes, AudioSource.RecordingFormat); var helpStm = new WaveFormatConversionStream(_audioStreamFormat, ws); totBytes = helpStm.Read(_bResampled, 0, 25000); ws.Close(); ws.Dispose(); helpStm.Close(); helpStm.Dispose(); _mp3Writer.Write(_bResampled, 0, totBytes); if (_outStream.Length > 0) { var bout = new byte[(int) _outStream.Length]; _outStream.Seek(0, SeekOrigin.Begin); _outStream.Read(bout, 0, (int) _outStream.Length); _outStream.SetLength(0); _outStream.Seek(0, SeekOrigin.Begin); foreach (Socket s in OutSockets) { s.Send(Encoding.ASCII.GetBytes(bout.Length.ToString("X") + "\r\n")); s.Send(bout); s.Send(Encoding.ASCII.GetBytes("\r\n")); } } } else { if (_mp3Writer != null) { _mp3Writer.Close(); _mp3Writer = null; } //if (_as!=null) //{ // _as.Close(); // _as.Dispose(); // _as = null; //} } if (DataAvailable != null) { DataAvailable(this, new NewDataAvailableArgs((byte[])e.RawData.Clone())); } } catch (Exception ex) { Log.Error("",ex);//MainForm.LogExceptionToFile(ex); } }
private void WebStreamListener() { try { var data = new byte[6400]; if (_socket != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { if (DataAvailable != null) { int recbytesize = _socket.Receive(data, 0, 6400,SocketFlags.None); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening) { WaveOutProvider?.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) break; } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(e,"WebStream"); } if (_socket != null) { _socket.Close(); _socket = null; } }
public void AddSamples(byte[] samples) { if (DataAvailable == null) return; if (samples.Length > 0) { var da = new DataAvailableEventArgs((byte[]) samples.Clone()); DataAvailable(this, da); } }
void _audioSource_DataAvailablePipe(object sender, DataAvailableEventArgs eventArgs) { //event here because it's checked for null }
void AudioSourceDataAvailable(object sender, DataAvailableEventArgs eventArgs) { _bwp.AddSamples(eventArgs.RawData, 0, eventArgs.BytesRecorded); }
void WaveInDataAvailable(object sender, WaveInEventArgs e) { _isrunning = true; if (DataAvailable != null) { //forces processing of volume level without piping it out if (_sampleChannel != null) { var sampleBuffer = new float[e.BytesRecorded]; _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded); } if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded); } var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(),e.BytesRecorded); DataAvailable(this, da); } }
void c_NewDataAvailable(object sender, DataAvailableEventArgs e) { Client c = (Client)sender; string da = c.RecieveString(); if (da.Contains("myid")) { c.ID = int.Parse(da.Split(':')[1]); if (DataRevieved != null) { DataRevieved(this, new NewDataEventArgs() { Data = "id " + c.ID.ToString() + " connected" }); } } else if (DataRevieved != null&&!string.IsNullOrEmpty(da)) { DataRevieved(this, new NewDataEventArgs() {Data = c.ID.ToString()+": "+da}); } }
void WaveInDataAvailable(object sender, WaveInEventArgs e) { if (_waveIn == null) return; var da = DataAvailable; if (da == null) return; var sc = _sampleChannel; if (sc == null) return; var sampleBuffer = new float[e.BytesRecorded]; sc.Read(sampleBuffer, 0, e.BytesRecorded); if (Listening) { WaveOutProvider?.AddSamples(e.Buffer, 0, e.BytesRecorded); } var dae = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded); da(this, dae); }
void hostClient_NewDataAvailable(object sender, DataAvailableEventArgs e) { Client h = (Client)sender; string s = h.RecieveString(); if (!string.IsNullOrWhiteSpace(s)) { if (!s.ToLower().Contains("succes")) { try { Console.WriteLine(s); Data.DataGram d = Data.DataGram.Deserialize(s); if (d.InitData != null) { if (d.InitData.ConnectIpAdresses != null) { foreach (Data.ClientData sr in d.InitData.ConnectIpAdresses) { Console.WriteLine(sr.IpAdress); IPAddress ip = IPAddress.Parse(sr.IpAdress); Client c = new Client(new IPEndPoint(ip, 3031)); c.ID = sr.ID; c.ExceptionOccured += new EventHandler<ClientExceptionEventArgs>(c_ExceptionOccured); c.Connected += new EventHandler<ConnectedEventArgs>(c_Connected); c.NewDataAvailable += new EventHandler<DataAvailableEventArgs>(c_NewDataAvailable); c.ConnectAsync(); if (Peers == null) Peers = new List<Client>(); Peers.Add(c); } hostClient.NewDataAvailable -= hostClient_NewDataAvailable; hostClient.ExceptionOccured -= hostClient_ExceptionOccured; if (Peers == null) Peers = new List<Client>(); Peers.Add(hostClient); hostClient.NewDataAvailable += new EventHandler<DataAvailableEventArgs>(c_NewDataAvailable); hostClient.ExceptionOccured += new EventHandler<ClientExceptionEventArgs>(c_ExceptionOccured); hostClient.ID = 0; } else if (d.InitData.ClientID != null) { this.ID = d.InitData.ClientID; if (DataRevieved!=null) DataRevieved(this, new NewDataEventArgs() { Data = "connected with id: " + ID.ToString() }); } } } catch { if (DataRevieved != null) { Console.WriteLine("error sending"); DataRevieved(this, new NewDataEventArgs() { Data = s }); } } } } }
private void OnDataAvailable(object sender, DataAvailableEventArgs e) { Console.Write(e.Data); }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; int j = -1; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat)) { var bDst = new byte[44100]; totBytes = 0; using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws)) { while (j != 0) { j = helpStm.Read(bDst, totBytes, 10000); totBytes += j; } helpStm.Close(); } ws.Close(); bSrc = bDst; } } if (_needsencodeinit) { _enc.EncodeInit(BitConverter.ToInt16(e.RawData, 0), BitConverter.ToInt16(e.RawData, 2)); _needsencodeinit = false; } var buff = new byte[25000]; int c; unsafe { fixed (byte* src = bSrc) { fixed (byte* dst = buff) { c = (int)_enc.EncodeFoscam(src, totBytes, dst); } } } Buffer.BlockCopy(buff,0,_talkBuffer,_talkDatalen,c); _talkDatalen += c; var dtms = (int) (Helper.Now - _dt).TotalMilliseconds; int i = 0; j = 0; try { while (j + 160 < _talkDatalen) { //need to write out in 160 byte packets for 40ms byte[] cmd = SInit(TalkData, MoIPAvFlag); cmd = AddNext(cmd, dtms + (i*40)); cmd = AddNext(cmd, _seq); cmd = AddNext(cmd, (int) (Helper.Now - _dt).TotalSeconds); cmd = AddNext(cmd, (byte) 0x0); cmd = AddNext(cmd, 160); var pkt = new byte[160]; Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 160); cmd = AddNext(cmd, pkt, 160); Encode(ref cmd); _avstream.Write(cmd, 0, cmd.Length); j += 160; _seq++; i++; } if (j < _talkDatalen) { Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen-j); _talkDatalen = _talkDatalen - j; } } catch (SocketException) { StopTalk(true); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(true); } }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { //let it just pipe through }
private void OnDataAvailable(object sender, DataAvailableEventArgs e) { replyBuffer_ += e.Data; //Console.WriteLine("Full: " + replyBuffer_); // Don't do anything with this stuff. if (task_ == Task.None) { waitingForReply_ = false; replyBuffer_ = ""; return; } Match match = faderRegex_.Match(replyBuffer_); if (match.Success) { Group g = match.Groups[1]; uint faderLevel = 0; if (!UInt32.TryParse(g.ToString(), out faderLevel)) { Console.WriteLine("Failed to parse fader level."); Reset(); return; } // Increase or decrease the current fader level. switch (task_) { case Task.Increase: faderLevel++; break; case Task.Decrease: faderLevel--; break; default: break; } // Tell the cp750 to change volume. waitingForReply_ = false; SendCommand("cp750.sys.fader " + faderLevel); task_ = Task.None; replyBuffer_ = ""; } }
private void DirectStreamListener() { try { var data = new byte[PacketSize]; if (_stream != null) { while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { if (DataAvailable != null) { int recbytesize = _stream.Read(data, 0, PacketSize); if (recbytesize > 0) { if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening) { WaveOutProvider?.AddSamples(data, 0, recbytesize); } var da = new DataAvailableEventArgs((byte[]) data.Clone(),recbytesize); DataAvailable(this, da); } } else { break; } if (_stopEvent.WaitOne(Interval, false)) break; } } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); //if (AudioSourceError!=null) // AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); MainForm.LogExceptionToFile(e,"Direct"); } _stream?.Close(); _stream = null; }
private void SpyServerListener() { var data = new byte[3200]; try { var request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; var response = request.GetResponse(); using (Stream stream = response.GetResponseStream()) { if (stream == null) throw new Exception("Stream is null"); stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) throw new Exception("lost stream"); byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; _sampleChannel.Read(sampleBuffer, 0, dec.Length); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, dec.Length); } var dae = new DataAvailableEventArgs((byte[]) dec.Clone(), dec.Length); da(this, dae); } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) break; } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(e,"ispyServer"); } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) _waveProvider.ClearBuffer(); if (WaveOutProvider?.BufferedBytes > 0) WaveOutProvider?.ClearBuffer(); }
private void StreamWav() { var res = ReasonToFinishPlaying.StoppedByUser; HttpWebRequest request = null; try { using (HttpWebResponse resp = ConnectionFactory.GetResponse(_source, out request)) { //1/10 of a second, 16 byte buffer var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels]; using (var stream = resp.GetResponseStream()) { if (stream == null) throw new Exception("Stream is null"); while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, data.Length); if (recbytesize == 0) throw new Exception("lost stream"); if (_sampleChannel != null) { _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening && WaveOutProvider != null) { WaveOutProvider.AddSamples(data, 0, recbytesize); } var dae = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize); da(this, dae); } } else { break; } } } } if (AudioFinished != null) AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } catch (Exception ex) { var af = AudioFinished; if (af != null) af(this, ReasonToFinishPlaying.DeviceLost); MainForm.LogExceptionToFile(ex, "WavStream"); } finally { // abort request if (request != null) { try { request.Abort(); } catch { } request = null; } } }