/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="inputChannels">The input channels.</param> /// <param name="outputChannels">The output channels.</param> void driver_BufferUpdate(IntPtr[] inputChannels, IntPtr[] outputChannels) { if (this.NumberOfInputChannels > 0) { var audioAvailable = AudioAvailable; if (audioAvailable != null) { var args = new AsioAudioAvailableEventArgs(inputChannels, outputChannels, nbSamples, driver.Capabilities.InputChannelInfos[0].type); audioAvailable(this, args); if (args.WrittenToOutputBuffers) { return; } } } if (this.NumberOfOutputChannels > 0) { int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have reached the end of the input data - clear out the end Array.Clear(waveBuffer, read, waveBuffer.Length - read); } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed(void *pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), outputChannels, NumberOfOutputChannels, nbSamples); } } if (read == 0) { if (AutoStop) { Stop(); // this can cause hanging issues } HasReachedEnd = true; } } }
/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="inputChannels">The input channels.</param> /// <param name="outputChannels">The output channels.</param> void driver_BufferUpdate(IntPtr[] inputChannels, IntPtr[] outputChannels) { if (this.NumberOfInputChannels > 0) { var audioAvailable = AudioAvailable; if (audioAvailable != null) { var args = new AsioAudioAvailableEventArgs(inputChannels, outputChannels, nbSamples, driver.Capabilities.InputChannelInfos[0].type); audioAvailable(this, args); if (args.WrittenToOutputBuffers) { return; } } } if (this.NumberOfOutputChannels > 0) { int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have stopped } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed(void *pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), outputChannels, NumberOfOutputChannels, nbSamples); } } if (read == 0) { Stop(); } } }
private void asioDriver_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { var index = 0; foreach (var input in Sources) { if (input.IsMapped) { floatBuffer[index] = BufferHelpers.Ensure(floatBuffer[index], e.SamplesPerBuffer); copySamplesToManagedMemory(index, e); (input as SignalSource).RaiseBufferReady( new RoutingEventArgs { Index = index, Format = formatPerLine, Count = e.SamplesPerBuffer, Buffer = floatBuffer[index] }); } } }
private unsafe void mAsio_AudioAvailable(object sender, NAudio.Wave.AsioAudioAvailableEventArgs e) {
/// <summary> /// driver buffer update callback to fill the wave buffer. /// </summary> /// <param name="inputChannels">The input channels.</param> /// <param name="outputChannels">The output channels.</param> void driver_BufferUpdate(IntPtr[] inputChannels, IntPtr[] outputChannels) { if (this.NumberOfInputChannels > 0) { var audioAvailable = AudioAvailable; if (audioAvailable != null) { var args = new AsioAudioAvailableEventArgs(inputChannels, outputChannels, nbSamples, driver.Capabilities.InputChannelInfos[0].type); audioAvailable(this, args); if (args.WrittenToOutputBuffers) return; } } if (this.NumberOfOutputChannels > 0) { int read = sourceStream.Read(waveBuffer, 0, waveBuffer.Length); if (read < waveBuffer.Length) { // we have stopped } // Call the convertor unsafe { // TODO : check if it's better to lock the buffer at initialization? fixed (void* pBuffer = &waveBuffer[0]) { convertor(new IntPtr(pBuffer), outputChannels, NumberOfOutputChannels, nbSamples); } } if (read == 0) { Stop(); } } }
/// <summary> /// Handles the AudioAvailable event of the asioOut control. /// Saves audio data (stored in buffers) into WaveFileWriter objects. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="AsioAudioAvailableEventArgs" /> instance containing the event data.</param> private void asioOut_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { sampleCount += e.SamplesPerBuffer; var samples = e.InputBuffers; for (int iSoundFileIndex = 0; iSoundFileIndex < ciNumberOfMicroconeAudioChannels; iSoundFileIndex++) { int iInputBufferPtrIndex = iSoundFileIndex; if (iNumberOfChannelsActual == 7 && iSoundFileIndex >= 1) { // For 7-channel (legacy) devices, duplicate buffer [0] so it exists in Sound Files [0] & [1] iInputBufferPtrIndex--; } Marshal.Copy((IntPtr)samples[iInputBufferPtrIndex], (float[])audioBuff, (int)0, e.SamplesPerBuffer); soundFiles[iSoundFileIndex].WriteSamples(audioBuff, 0, e.SamplesPerBuffer); } }
void asioOut_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { var samples = e.GetAsInterleavedSamples(); writer.WriteSamples(samples, 0, samples.Length); }
void OnAsioOutAudioAvailable(object sender, AsioAudioAvailableEventArgs e) { #pragma warning disable 618 var samples = e.GetAsInterleavedSamples(); #pragma warning restore 618 writer.WriteSamples(samples, 0, samples.Length); }
private void AsioOut_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { float[] sourceAudio = new float[e.SamplesPerBuffer*e.InputBuffers.Length]; e.GetAsInterleavedSamples(sourceAudio); float[] proccesedAudio = new float[settings.BufferSize]; byte[] result = new byte[settings.BufferSize*4]; byte[] sendingAudio = new byte[e.SamplesPerBuffer*4]; for (int j = 1, q = 0; j < sourceAudio.Length; j += 2, q++) { proccesedAudio[q] = sourceAudio[j]; } proccesedAudio = VSTHost.GetInstance().Procces(proccesedAudio); for (int i = 0, n = 0; i < proccesedAudio.Length; i ++) { result[n] = BitConverter.GetBytes(proccesedAudio[i])[0]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[1]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[2]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[3]; n++; } result.CopyTo(sendingAudio,0); if (connection.IsConnected) { connection.Send(sendingAudio); } }
private unsafe void copySamplesToManagedMemory(int index, AsioAudioAvailableEventArgs e) { fixed (float* fixedFloatBuffer = floatBuffer[index]) { float* pFloatBuffer = fixedFloatBuffer; switch (e.AsioSampleType) { case AsioSampleType.Int32LSB: int* pInt = (int*)e.InputBuffers[index++]; for (int i = 0; i < e.SamplesPerBuffer; i++) { *(pFloatBuffer++) = *(pInt++) / (float)Int32.MaxValue; } break; case AsioSampleType.Int16LSB: short* pShort = (short*)e.InputBuffers[index++]; for (int i = 0; i < e.SamplesPerBuffer; i++) { *(pFloatBuffer++) = *(pShort++) / (float)Int16.MaxValue; } break; case AsioSampleType.Int24LSB: byte* pByte = (byte*)e.InputBuffers[index++]; for (int i = 0; i < e.SamplesPerBuffer; i++) { int sample = pByte[0] | (pByte[1] << 8) | ((sbyte)pByte[2] << 16); *(pFloatBuffer++) = sample / 8388608f; pByte += 3; } break; case AsioSampleType.Float32LSB: float* pFloat = (float*)e.InputBuffers[index++]; for (int i = 0; i < e.SamplesPerBuffer; i++) { *(pFloatBuffer++) = *(pFloat++); } break; default: throw new NotSupportedException("Unsupported audio format detected"); } } }
private void Input_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { timer.Restart(); if (bytebuffer == null) { createByteBuffer(e.SamplesPerBuffer); } over = bbufferMax - bwriteIndex - e.SamplesPerBuffer; first = e.SamplesPerBuffer + (over < 0 ? over : 0); for (int i = 0; i < e.InputBuffers.Length; i++) { Marshal.Copy(e.InputBuffers[i], bytebuffer[i], bwriteIndex, first); } bwriteIndex += first; if (bwriteIndex >= bbufferMax) { bwriteIndex -= bbufferMax; } if (over < 0) { for (int i = 0; i < e.InputBuffers.Length; i++) { Marshal.Copy(e.InputBuffers[i], bytebuffer[i], bwriteIndex, over * -1); } bwriteIndex -= over; } bcount += e.SamplesPerBuffer; timer.Stop(); timerlog.Add(timer.ElapsedMicroseconds); }
private void AsioOut_AudioAvailable(object sender, AsioAudioAvailableEventArgs e) { float[] sourceAudio = new float[e.SamplesPerBuffer * e.InputBuffers.Length]; float[] proccesedAudio = new float[settings.BufferSize]; byte[] result = new byte[settings.BufferSize * 4]; byte[] sendingAudio = new byte[e.SamplesPerBuffer * 4]; if (!IsInputMute) { //LRLRLRLRLRLR.... e.GetAsInterleavedSamples(sourceAudio); for (int j = 1, q = 0; j < sourceAudio.Length; j += 2, q++) { proccesedAudio[q] = sourceAudio[j]; } proccesedAudio = VSTHost.GetInstance().Procces(proccesedAudio); for (int i = 0, n = 0; i < proccesedAudio.Length; i++) { result[n] = BitConverter.GetBytes(proccesedAudio[i])[0]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[1]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[2]; n++; result[n] = BitConverter.GetBytes(proccesedAudio[i])[3]; n++; } result.CopyTo(sendingAudio, 0); } if (connection.IsConnected) { connection.Send(sendingAudio); } if (isAudioReceiving) { lock (receiveLocker) if (isNewAudioAvailable) { incomingAudioBufferedWaveProvider.AddSamples(incomingBytes, 0, incomingBytes.Length); isNewAudioAvailable = false; } } if (!IsOutputMute) inputAudioBufferedWaveProvider.AddSamples(result, 0, result.Length); }