private void DoRecording() { captureState = CaptureState.Capturing; foreach (var buffer in buffers) { if (!buffer.InQueue) { buffer.Reuse(); } } while (captureState == CaptureState.Capturing) { if (callbackEvent.WaitOne()) { // requeue any buffers returned to us foreach (var buffer in buffers) { if (buffer.Done) { DataAvailable?.Invoke(this, new WaveInEventArgs(buffer.Data, buffer.BytesRecorded)); if (captureState == CaptureState.Capturing) { buffer.Reuse(); } } } } } }
/// <summary> /// Raises event 'DataAvailable' /// </summary> protected virtual void OnDataAvailable() { if (DataAvailable != null) { DataAvailable.Invoke(this, System.EventArgs.Empty); } }
private void OnInputDataAvailable(IntPtr handle, uint length) { if (length == 0U) { return; } IntPtr ptr = IntPtr.Zero; try { AudioIOUtil.ThrowIfError(AudioInput.Peek(_handle, out ptr, ref length)); byte[] buffer = new byte[length]; Marshal.Copy(ptr, buffer, 0, (int)length); AudioInput.Drop(_handle); DataAvailable?.Invoke(this, new AudioDataAvailableEventArgs(buffer)); } catch (Exception e) { Log.Error(nameof(AsyncAudioCapture), e.Message); } }
/// <summary> /// Runs this instance. /// </summary> private void Run() { var timer = new HighResolutionTimer(); var lastElapsedTime = TimeSpan.FromSeconds(0); while (IsRunning) { if (lastElapsedTime < ReadTime) { Thread.Sleep(ReadTime - lastElapsedTime); } timer.Start(); var sensorData = RetrieveSensorData(); lastElapsedTime = timer.Elapsed; if (IsRunning) { DataAvailable?.Invoke(this, sensorData); } timer.Reset(); } SetSleepMode(true); }
/// <summary> /// Initializes this resampler with the given input audio source and output format. /// Attaches to the given source's event to start resampling as soon as <see cref="IStreamAudioSource.DataAvailable"/> is raised. /// </summary> /// <param name="audioSource"></param> /// <param name="outputFormat"></param> public void Initialize(IStreamAudioSource audioSource, WaveStreamAudioFormat outputFormat) { this.WaveProviderAdapter = new NAudioStreamAudioSourceToWaveProviderAdapterSimple(audioSource); this.Resampler = new MediaFoundationResampler(this.WaveProviderAdapter, NAudioUtilities.ToNAudioWaveFormat(outputFormat)); //set this *after* we initialize the resampler. if it throws, we won't dispose the input audio source by accident this.WrappedAudioSource = audioSource; this.Format = outputFormat; //handle events from the wrapped source audioSource.DataAvailable += (s, e) => { //feed into our adapter WaveProviderAdapter.Write(e); //read from resampler and trigger our own output event int read; while ((read = Resampler.Read(Buffer, 0, Buffer.Length)) > 0) { DataAvailable?.Invoke(this, new StreamAudioSourceDataEvent() { Buffer = new ArraySegment <byte>(Buffer, 0, read), Format = Format }); } }; }
private void AudioThread() { _abort = new ManualResetEvent(false); while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length); if (DataAvailable != null) { _waveProvider.AddSamples(_audioBuffer, 0, dataLength); if (Listening) { WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength); } //forces processing of volume level without piping it out var sampleBuffer = new float[dataLength]; int read = _sampleChannel.Read(sampleBuffer, 0, dataLength); DataAvailable?.Invoke(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone(), read)); } } try { if (_sensor != null) { _sensor.AudioSource?.Stop(); _sensor.Stop(); _sensor.SkeletonFrameReady -= SensorSkeletonFrameReady; _sensor.ColorFrameReady -= SensorColorFrameReady; _sensor.DepthFrameReady -= SensorDepthFrameReady; _sensor.Dispose(); _sensor = null; } } catch { // ignored } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; } if (_waveProvider != null && _waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } Listening = false; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
public void Start(string endpoint) { StateChanged?.Invoke(this, new ConsumerStateEventArgs(State.Busy, "", SessionId)); byte[] buffer = null; using (var file = new Mp3FileReader(@"C:\Users\karel\Downloads\102-Keith-Million.mp3")) { using (var stream = WaveFormatConversionStream.CreatePcmStream(file)) { using (var raw = new RawSourceWaveStream(stream, format)) { buffer = new byte[raw.Length]; raw.Read(buffer, 0, buffer.Length); } } } if (buffer != null && buffer.Length > 0) { DataAvailable?.Invoke(this, new DataEventArgs(buffer, buffer.Length)); } StateChanged?.Invoke(this, new ConsumerStateEventArgs(State.Finished, "", SessionId)); }
private void ThreadProc() { logger.DebugFormat("NetDriver thread start"); running = true; Socket socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); socket.NoDelay = true; try { if (!string.IsNullOrEmpty(host)) { socket.Connect(host, port); } else { socket.Connect(address, port); } while (running) { if (sendBuffer.Count > 0) { var data = sendBuffer.ToArray(); sendBuffer.Clear(); socket.Send(data, SocketFlags.None); //logger.DebugFormat("NetDriver send data: {0}", Utilities.ByteArrayToString(data)); } if (socket.Poll(100, SelectMode.SelectRead) && socket.Available > 0) { //logger.DebugFormat("NetDriver data available..."); while (socket.Available > 0) { var data = new byte[socket.Available]; socket.Receive(data, SocketFlags.Partial); receiveBuffer.AddRange(data); //logger.DebugFormat("NetDriver received data: {0}", Utilities.ByteArrayToString(data)); } notify?.Invoke(); wait.Set(); } } socket.Shutdown(SocketShutdown.Both); } catch (ThreadInterruptedException) { //logger.DebugFormat("NetDriver thread connect interrupted"); } catch (SocketException ex) { logger.ErrorFormat("NetDriver socket error: {0}", ex.Message); } socket.Close(); running = false; logger.DebugFormat("NetDriver thread exit"); }
bool Processing(int Handle, IntPtr Buffer, int Length, IntPtr User) { if (DataAvailable != null) { DataAvailable.Invoke(new BufferProvider(Buffer, Length, BufferKind)); } return(true); }
void AudioProviderOnDataAvailable(object Sender, DataAvailableEventArgs DataAvailableEventArgs) { _audioEncoder.EnsureBufferIsSufficient(ref _encodedBuffer, DataAvailableEventArgs.Length); var encodedLength = _audioEncoder.Encode(DataAvailableEventArgs.Buffer, 0, DataAvailableEventArgs.Length, _encodedBuffer, 0); DataAvailable?.Invoke(this, new DataAvailableEventArgs(_encodedBuffer, encodedLength)); }
private void Procedure(int handle, int channel, IntPtr buffer, int length, IntPtr user) { var newBuffer = GetBuffer(length); Marshal.Copy(buffer, newBuffer, 0, length); Task.Run(() => DataAvailable?.Invoke(this, new DataAvailableEventArgs(newBuffer, length))); }
void Procedure(int Handle, int Channel, IntPtr Buffer, int Length, IntPtr User) { var buffer = GetBuffer(Length); Marshal.Copy(Buffer, buffer, 0, Length); Task.Run(() => DataAvailable?.Invoke(this, new DataAvailableEventArgs(buffer, Length))); }
static void OnDataAvailable(object sender, WaveInEventArgs e) { DataAvailable?.Invoke(sender, new StreamingAudioBufferAvailableEventArgs(e.Buffer, e.BytesRecorded)); //if (waveFile != null) //{ // waveFile.Write(e.Buffer, 0, e.BytesRecorded); // waveFile.Flush(); //} }
private void OnSoundDataAvailable(object sender, DataAvailableEventArgs dataAvailableEventArgs) { int readCount; while ((readCount = _stream.Read(_readBuffer, 0, _readBuffer.Length)) > 0) { DataAvailable?.Invoke(_readBuffer, 0, readCount); } }
bool RecordProcedure(int handle, IntPtr ptr, int length, IntPtr user) { var buffer = GetBuffer(length); Marshal.Copy(ptr, buffer, 0, length); DataAvailable?.Invoke(this, new DataAvailableEventArgs(buffer, length)); return(true); }
bool RecordProcedure(int Handle, IntPtr Ptr, int Length, IntPtr User) { var buffer = GetBuffer(Length); Marshal.Copy(Ptr, buffer, 0, Length); DataAvailable?.Invoke(this, new DataAvailableEventArgs(buffer, Length)); return(true); }
private void ProcessStereoData(object sender, WaveInEventArgs e) { WaveBuffer buffer = new(e.Buffer) { ByteBufferCount = e.BytesRecorded }; int count = buffer.FloatBufferCount; for (int i = 0; i < count; i += _capture.WaveFormat.Channels) { DataAvailable?.Invoke(buffer.FloatBuffer[i], buffer.FloatBuffer[i + 1]); } }
private void Timer_Elapsed(object sender, ElapsedEventArgs e) { if (socket.Available()) { currentData = Read(); if (currentData != null) { DataAvailable.Invoke(currentData, null); } } }
void Procedure(int Handle, int Channel, IntPtr Buffer, int Length, IntPtr User) { if (_buffer == null || _buffer.Length < Length) { _buffer = new byte[Length]; } Marshal.Copy(Buffer, _buffer, 0, Length); DataAvailable?.Invoke(this, new DataAvailableEventArgs(_buffer, Length)); }
public void AudioDeviceDataAvailable(object sender, DataAvailableEventArgs e) { try { DataAvailable?.Invoke(this, new NewDataAvailableArgs((byte[])e.RawData.Clone())); } catch (Exception ee) { Logger.Error("AudioDeviceDataAvailable error occured:" + ee.Message); } }
void Processing(IntPtr Buffer, int Length) { if (_buffer == null || _buffer.Length < Length) { _buffer = new byte[Length]; } Marshal.Copy(Buffer, _buffer, 0, Length); DataAvailable?.Invoke(this, new DataAvailableEventArgs(_buffer, Length)); }
public void SubscribeToSystemValue(SystemProperty property) { _context.Post(state => { _systemValueSubscriptions.Add(property, GetSystemValueProperty(property)); var da = new DataAvailableEventArgs(); da.SystemValues.Add(property, _systemValueSubscriptions[property].GetValue()); DataAvailable?.Invoke(this, da); }, null); }
private void ProcessMonoData(object sender, WaveInEventArgs e) { WaveBuffer buffer = new(e.Buffer) { ByteBufferCount = e.BytesRecorded }; int count = buffer.FloatBufferCount; // Handle mono by passing the same data for left and right for (int i = 0; i < count; i++) { DataAvailable?.Invoke(buffer.FloatBuffer[i], buffer.FloatBuffer[i]); } }
public void SubscribeToPosition(int positionId, string column) { _context.Post(state => { _positionSubscriptions.Add(positionId, column); var da = new DataAvailableEventArgs(); da.PositionValues.Add((positionId, column), _positionSubscriptions.Get(positionId, column).GetValue()); DataAvailable?.Invoke(this, da); }, null); }
private void SoundInSourceOnDataAvailable(object sender, DataAvailableEventArgs dataAvailableEventArgs) { int read; var buffer = new byte[dataAvailableEventArgs.ByteCount]; while ((read = _captureSource.Read(buffer, 0, buffer.Length)) > 0) { var notEncodedLength = _notEncodedBuffer?.Length ?? 0; var soundBuffer = new byte[read + notEncodedLength]; //Fill the soundbuffer with _notEncodedBuffer if (notEncodedLength > 0) { Buffer.BlockCopy(_notEncodedBuffer, 0, soundBuffer, 0, notEncodedLength); } //Fill the soundbuffer with the data Buffer.BlockCopy(buffer, 0, soundBuffer, notEncodedLength, read); var segmentCount = (int)Math.Floor((double)soundBuffer.Length / _bytesPerSegment); var segmentsEnd = segmentCount * _bytesPerSegment; var notEncodedCount = soundBuffer.Length - segmentsEnd; _notEncodedBuffer = new byte[notEncodedCount]; Buffer.BlockCopy(soundBuffer, segmentsEnd, _notEncodedBuffer, 0, notEncodedCount); if (segmentCount == 0) { return; } var dataBuffers = new byte[segmentCount][]; var dataBufferLengths = new int[segmentCount]; unsafe { fixed(byte *soundBufferPtr = soundBuffer) for (int i = 0; i < segmentCount; i++) { int len; dataBuffers[i] = _opusEncoder.Encode(soundBufferPtr + _bytesPerSegment * i, _bytesPerSegment, out len); dataBufferLengths[i] = len; } } DataAvailable?.Invoke(this, new DataInfoAvailableEventArgs(new VoiceChatDataInfo(dataBuffers, dataBufferLengths))); } }
private void DoRecord() { while (Recording) { sourceReader.ReadSample(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); if (callback.NewSample) { int bytecount = callback.Read(out byte[] data); DataAvailable?.Invoke(this, new WaveInEventArgs(data, bytecount)); } } }
private void OnDataAvailable(byte[] buffer, int bytesRecorded) { if (bytesRecorded > 0) { byte[] data = new byte[bytesRecorded]; Array.Copy(buffer, data, data.Length); DataAvailable?.Invoke(data); //DataAvailable?.Invoke(recordBuffer, recordBufferOffset); } }
protected NAudioProvider(IWaveIn WaveIn) { _waveIn = WaveIn; _waveIn.DataAvailable += (S, E) => { DataAvailable?.Invoke(this, new DataAvailableEventArgs(E.Buffer, E.BytesRecorded)); }; NAudioWaveFormat = WaveIn.WaveFormat; WaveFormat = WaveIn.WaveFormat.ToCaptura(); }
public Task Start() { Cancellation?.Dispose(); Cancellation = new CancellationTokenSource(); var token = Cancellation.Token; //start a background task Threading.Tasks.FireAndForget(async() => { try { byte[] buffer = new byte[19200]; while (!token.IsCancellationRequested) { int read = await SourceStream.ReadAsync(buffer, 0, buffer.Length, token); if (read == 0) { //end of stream reached Stopped?.Invoke(this, new StreamAudioSourceStoppedEvent() { Cause = StreamAudioSourceStoppedCause.End }); return; } DataAvailable?.Invoke(this, new StreamAudioSourceDataEvent() { Buffer = new ArraySegment <byte>(buffer, 0, read), Format = Format }); } } catch (OperationCanceledException) { Stopped?.Invoke(this, new StreamAudioSourceStoppedEvent() { Cause = StreamAudioSourceStoppedCause.Stopped }); } catch (Exception ex) { Stopped?.Invoke(this, new StreamAudioSourceStoppedEvent() { Cause = StreamAudioSourceStoppedCause.Exception, Exception = ex }); throw; } }); return(Task.CompletedTask); }
bool Procedure(int Handle, IntPtr Buffer, int Length, IntPtr User) { if (_buffer == null || _buffer.Length < Length) { _buffer = new byte[Length]; } Marshal.Copy(Buffer, _buffer, 0, Length); DataAvailable?.Invoke(_buffer, Length); return(true); }