void IMicrophoneSubscriber.ReceiveMicrophoneData(ArraySegment <float> data, [NotNull] WaveFormat format) { if (data.Array == null) { throw new ArgumentNullException("data"); } if (!format.Equals(_resamplerInput.WaveFormat)) { throw new ArgumentException("Incorrect format supplied to preprocessor", "format"); } lock (_inputWriteLock) { //Write data into input buffer var written = _resamplerInput.Write(data); if (written < data.Count) { //We didn't write everything, so try to write out as much as possible (fill buffer) and then count the rest of the samples as lost var written2 = _resamplerInput.Write(new ArraySegment <float>(data.Array, data.Offset + written, Math.Min(data.Count - written, _resamplerInput.Capacity - _resamplerInput.Capacity))); //Increase the count of lost samples, so we can inject the appropriate amount of silence to compensate later var totalWritten = written + written2; var lost = data.Count - totalWritten; if (lost > 0) { Interlocked.Add(ref _droppedSamples, lost); Log.Warn("Lost {0} samples in the preprocessor (buffer full), injecting silence to compensate", lost); } } } //Wake up the processing thread _threadEvent.Set(); }
void IMicrophoneSubscriber.ReceiveMicrophoneData(ArraySegment <float> data, [NotNull] WaveFormat format) { if (data.Array == null) { throw new ArgumentNullException("data"); } // ReSharper disable once InconsistentlySynchronizedField (Justification: `_resamplerInput` is itself thread safe, so using it with synchronisation is unnecessary) if (!format.Equals(_resamplerInput.WaveFormat)) { throw new ArgumentException("Incorrect format supplied to preprocessor", "format"); } lock (_inputWriteLock) { // Write as much data into the buffer as possible var written = _resamplerInput.Write(data); // If not everything was written it means the input buffer is full! The only thing to do is throw // away the excess audio and to keep track of exactly how much was lost. The lost samples will be injected // as silence, to keep everything in sync. if (written < data.Count) { var lost = data.Count - written; Interlocked.Add(ref _droppedSamples, lost); Log.Warn("Lost {0} samples in the preprocessor (buffer full), injecting silence to compensate", lost); } } //Wake up the processing thread _threadEvent.Set(); }
public void Handle(ArraySegment <float> inputSamples, WaveFormat format) { if (_resetRequired) { Log.Trace("Resetting encoder pipeline"); _resampler.Reset(); _input.Reset(); _output.Reset(); _resetRequired = false; } if (!format.Equals(_inputFormat)) { throw new ArgumentException(string.Format("Samples expected in format {0}, but supplied with format {1}", _inputFormat, format), "format"); } if (_microphoneDiagnosticOutput != null) { _microphoneDiagnosticOutput.WriteSamples(inputSamples); } //Write samples to the pipeline (keep a running total of how many we have sent) //Keep sending until we've sent all of these samples var offset = 0; while (offset != inputSamples.Count) { offset += _input.Write(inputSamples.Array, offset + inputSamples.Offset, inputSamples.Count - offset); //Drain some of those samples just written, encode them and send them off EncodeFrames(); } }
/// <summary> /// Given some samples consume them (as many as possible at a time) and send frames downstream (as frequently as possible) /// </summary> /// <param name="samples"></param> private void ConsumeSamples(ArraySegment <float> samples) { while (samples.Count > 0) { //Write as many samples as possible (up to capacity of buffer) var written = _rawMicSamples.Write(samples.Array, samples.Offset, samples.Count); samples = new ArraySegment <float>(samples.Array, samples.Offset + written, samples.Count - written); //Drain as many of those samples as possible in frame sized chunks SendFrame(); } }
public void ReceiveMicrophoneData(ArraySegment <float> inputSamples, [NotNull] WaveFormat format) { if (format == null) { throw new ArgumentNullException("format"); } if (!format.Equals(_inputFormat)) { throw new ArgumentException(string.Format("Samples expected in format {0}, but supplied with format {1}", _inputFormat, format), "format"); } using (var encoderLock = _encoder.Lock()) { var encoder = encoderLock.Value; //Early exit if we have been disposed on the main thread if (_disposed) { return; } //Early exit if we've sent the last frame of this stream if (_stopped) { return; } //Propogate the loss value on to the encoder encoder.PacketLoss = TransmissionPacketLoss; //Write samples to the pipeline (keep a running total of how many we have sent) //Keep sending until we've sent all of these samples var offset = 0; while (offset != inputSamples.Count) { // ReSharper disable once AssignNullToNotNullAttribute (Justification: Array segment cannot be null) offset += _input.Write(new ArraySegment <float>(inputSamples.Array, inputSamples.Offset + offset, inputSamples.Count - offset)); //Drain some of those samples just written, encode them and send them off //If we're shutting down send a maximum of 1 packet var encodedFrames = EncodeFrames(encoder, _stopping ? 1 : int.MaxValue); //Don't encode any more frames if we've sent the one final frame if (encodedFrames > 0 && _stopping) { _stopped = true; Log.Debug("Encoder stopped"); break; } } } }
private void ProcessInputAudio(float[] frame) { //Push frame through the resampler and process the resampled audio one frame at a time var offset = 0; while (offset != frame.Length) { offset += _resamplerInput.Write(frame, offset, frame.Length - offset); //Read resampled data and push it through the pipeline while (_resampledOutput.Read(new ArraySegment <float>(_intermediateFrame, 0, _intermediateFrame.Length))) { PreprocessAudioFrame(_intermediateFrame); } } }
/// <summary> /// Given some samples consume them (as many as possible at a time) and send frames downstream (as frequently as possible) /// </summary> /// <param name="samples"></param> private void ConsumeSamples(ArraySegment <float> samples) { if (samples.Array == null) { throw new ArgumentNullException("samples"); } while (samples.Count > 0) { //Write as many samples as possible (up to capacity of buffer) var written = _rawMicSamples.Write(samples); //Shrink the input segment to exclude the samples we just wrote // ReSharper disable once AssignNullToNotNullAttribute (Justification: Array segment cannot be null) samples = new ArraySegment <float>(samples.Array, samples.Offset + written, samples.Count - written); //Drain as many of those samples as possible in frame sized chunks SendFrame(); } }