private static void ConvertSamples(IntPtr In, ASIOSampleType InType, Audio.SampleBuffer Out) { switch (InType) { //case ASIOSampleType.Int16MSB: //case ASIOSampleType.Int24MSB: //case ASIOSampleType.Int32MSB: //case ASIOSampleType.Float32MSB: //case ASIOSampleType.Float64MSB: //case ASIOSampleType.Int32MSB16: //case ASIOSampleType.Int32MSB18: //case ASIOSampleType.Int32MSB20: //case ASIOSampleType.Int32MSB24: case ASIOSampleType.Int16LSB: Audio.Util.LEi16ToLEf64(In, Out.Raw, Out.Count); break; //case ASIOSampleType.Int24LSB: case ASIOSampleType.Int32LSB: Audio.Util.LEi32ToLEf64(In, Out.Raw, Out.Count); break; case ASIOSampleType.Float32LSB: Audio.Util.LEf32ToLEf64(In, Out.Raw, Out.Count); break; case ASIOSampleType.Float64LSB: Audio.Util.CopyMemory(Out.Raw, In, Out.Count * sizeof(double)); break; //case ASIOSampleType.Int32LSB16: //case ASIOSampleType.Int32LSB18: //case ASIOSampleType.Int32LSB20: //case ASIOSampleType.Int32LSB24: default: throw new NotImplementedException("Unsupported sample type"); } }
private void OnBufferSwitch(int Index, ASIOBool Direct) { Audio.SampleBuffer[] a = new Audio.SampleBuffer[input.Length]; for (int i = 0; i < input.Length; ++i) { a[i] = input[i].Samples; ConvertSamples( input[i].Info.buffers[Index], input[i].Type, a[i].Raw, a[i].Count); } Audio.SampleBuffer[] b = new Audio.SampleBuffer[output.Length]; for (int i = 0; i < output.Length; ++i) { b[i] = output[i].Samples; } callback(buffer, a, b, sampleRate); for (int i = 0; i < output.Length; ++i) { ConvertSamples( b[i].Raw, output[i].Info.buffers[Index], output[i].Type, b[i].Count); } }
private void OnBufferSwitch(int Index, ASIOBool Direct) { Audio.SampleBuffer[] a = new Audio.SampleBuffer[input.Length]; for (int i = 0; i < input.Length; ++i) { a[i] = input[i].Samples; using (Audio.RawLock l = new Audio.RawLock(a[i], false, true)) ConvertSamples( input[i].Info.buffers[Index], input[i].Type, l, l.Count); } Audio.SampleBuffer[] b = new Audio.SampleBuffer[output.Length]; for (int i = 0; i < output.Length; ++i) { b[i] = output[i].Samples; } callback(buffer, a, b, sampleRate); for (int i = 0; i < output.Length; ++i) { using (Audio.RawLock l = new Audio.RawLock(b[i], true, false)) ConvertSamples( l, output[i].Info.buffers[Index], output[i].Type, l.Count); } }
public Stream(Guid DeviceId, Audio.Stream.SampleHandler Callback, Channel[] Input, Channel[] Output) : base(Input, Output) { Log.Global.WriteLine(MessageType.Info, "Instantiating ASIO stream with {0} input channels and {1} output channels.", Input.Length, Output.Length); asio = new AsioObject(DeviceId); asio.Init(IntPtr.Zero); callback = Callback; // Just use the driver's preferred buffer size. bufferSize = asio.BufferSize.Preferred; ASIOBufferInfo[] infos = new ASIOBufferInfo[Input.Length + Output.Length]; for (int i = 0; i < Input.Length; ++i) { infos[i].isInput = ASIOBool.True; infos[i].channelNum = Input[i].Index; } for (int i = 0; i < Output.Length; ++i) { infos[Input.Length + i].isInput = ASIOBool.False; infos[Input.Length + i].channelNum = Output[i].Index; } ASIOCallbacks callbacks = new ASIOCallbacks() { bufferSwitch = OnBufferSwitch, sampleRateDidChange = OnSampleRateChange, asioMessage = OnAsioMessage, bufferSwitchTimeInfo = OnBufferSwitchTimeInfo }; asio.CreateBuffers(infos, bufferSize, callbacks); // Create input buffers. input = new BufferInfo[Input.Length]; inputBuffers = new Audio.SampleBuffer[Input.Length]; for (int i = 0; i < Input.Length; ++i) { input[i] = new BufferInfo(infos[i], Input[i].Type); inputBuffers[i] = new Audio.SampleBuffer(bufferSize); } // Create output buffers. output = new BufferInfo[Output.Length]; outputBuffers = new Audio.SampleBuffer[Output.Length]; for (int i = 0; i < Output.Length; ++i) { output[i] = new BufferInfo(infos[Input.Length + i], Output[i].Type); outputBuffers[i] = new Audio.SampleBuffer(bufferSize); } sampleRate = asio.SampleRate; asio.Start(); }
private static double AmplifySignal(Audio.SampleBuffer Samples, double Gain) { double peak = 0.0; for (int i = 0; i < Samples.Count; ++i) { double v = Samples[i]; v *= Gain; peak = Math.Max(peak, Math.Abs(v)); Samples[i] = v; } return(peak); }
public Buffer(WAVEFORMATEX Format, int Count) { handle = GCHandle.Alloc(this); size = BlockAlignedSize(Format, Count); samples = new Audio.SampleBuffer(Count) { Tag = this }; header = new WAVEHDR(); header.lpData = Marshal.AllocHGlobal(size); header.dwUser = (IntPtr)handle; header.dwBufferLength = (uint)size; header.dwFlags = 0; pin = GCHandle.Alloc(header, GCHandleType.Pinned); }
private static double AmplifySignal(Audio.SampleBuffer Signal, double Gain) { double peak = 0.0; using (Audio.SamplesLock samples = new Audio.SamplesLock(Signal, true, true)) { for (int i = 0; i < samples.Count; ++i) { double v = samples[i]; v *= Gain; peak = Math.Max(peak, Math.Abs(v)); samples[i] = v; } } return(peak); }
public Buffer(WAVEFORMATEX Format, int Count) { samples = new Audio.SampleBuffer(Count) { Tag = this }; int size = BlockAlignedSize(Format, Count); data = new byte[size]; dataPin = GCHandle.Alloc(data, GCHandleType.Pinned); header = new WAVEHDR(); headerPin = GCHandle.Alloc(header, GCHandleType.Pinned); header.lpData = dataPin.AddrOfPinnedObject(); header.dwBufferLength = (uint)size; header.dwFlags = 0; }
private void Proc() { Audio.SampleBuffer[] input = new Audio.SampleBuffer[] { }; Audio.SampleBuffer[] output = new Audio.SampleBuffer[] { }; long samples = 0; DateTime start = DateTime.Now; while (run) { // Run at ~50 callbacks/second. This doesn't need to be super precise. In // practice, Thread.Sleep is going to be +/- 10s of ms, but we'll still deliver // the right number of samples on average. Thread.Sleep(20); double elapsed = (DateTime.Now - start).TotalSeconds; int needed_samples = (int)(Math.Round(elapsed * SampleRate) - samples); callback(needed_samples, input, output, SampleRate); samples += needed_samples; } }
private void Proc() { // Send 60 chunks/second. This code won't be perfectly accurate if 60 doesn't divide SampleRate. int count = (int)(SampleRate / 60); Audio.SampleBuffer[] input = new Audio.SampleBuffer[] { }; Audio.SampleBuffer[] output = new Audio.SampleBuffer[] { }; long t0 = Util.Timer.Counter; while (run) { long t1 = Util.Timer.Counter; if ((t1 - t0) / Util.Timer.Frequency > count / SampleRate) { callback(count, input, output, SampleRate); t0 = t1; } else { Thread.Sleep(0); } } }
private void Proc() { Thread.CurrentThread.Name = "WaveAudio Stream"; try { Log.Global.WriteLine(MessageType.Info, "Entering streaming thread"); EventWaitHandle[] events = waveIn.Select(i => i.Callback).Concat(waveOut.Select(i => i.Callback)).ToArray(); Audio.SampleBuffer[] input = new Audio.SampleBuffer[waveIn.Length]; Audio.SampleBuffer[] output = new Audio.SampleBuffer[waveOut.Length]; while (!stop) { // TODO: Why can't we use this? //if (!WaitHandle.WaitAll(events, 100)) // continue; // Read from the inputs. for (int i = 0; i < waveIn.Length; ++i) { InBuffer b = waveIn[i].GetBuffer(); if (b == null) { return; } using (Audio.RawLock l = new Audio.RawLock(b.Samples, false, true)) ConvertSamples(b.Data, format, l, l.Count); b.Record(); input[i] = b.Samples; } // Get an available buffer from the outputs. for (int i = 0; i < waveOut.Length; ++i) { OutBuffer b = waveOut[i].GetBuffer(); if (b == null) { return; } output[i] = b.Samples; } // Call the callback. callback(buffer, input, output, format.nSamplesPerSec); // Play the results. for (int i = 0; i < output.Length; ++i) { OutBuffer b = (OutBuffer)output[i].Tag; using (Audio.RawLock l = new Audio.RawLock(b.Samples, true, false)) ConvertSamples(l, b.Data, format, l.Count); b.Play(); } } } catch (Exception Ex) { Log.Global.WriteLine(MessageType.Error, "Unhandled exception on streaming thread '{0}': {1}", Ex.GetType().FullName, Ex.ToString()); } Log.Global.WriteLine(MessageType.Info, "Exiting streaming thread"); }
public Buffer(ASIOBufferInfo Info, ASIOSampleType Type, int Count) { info = Info; type = Type; samples = new Audio.SampleBuffer(Count); }
private void Proc() { Thread.CurrentThread.Name = "WaveAudio Stream"; try { Log.Global.WriteLine(MessageType.Info, "Entering streaming thread"); Audio.SampleBuffer[] input = new Audio.SampleBuffer[waveIn.Length]; Audio.SampleBuffer[] output = new Audio.SampleBuffer[waveOut.Length]; while (!stop) { // Read from the inputs. for (int i = 0; i < waveIn.Length; ++i) { InBuffer b = null; do { b = waveIn[i].GetBuffer(); } while (b == null && !stop); if (b != null) { ConvertSamples(b.Data, format, b.Samples.Raw, b.Samples.Count); b.Record(); input[i] = b.Samples; } } // Get an available buffer from the outputs. for (int i = 0; i < waveOut.Length; ++i) { OutBuffer b = null; do { b = waveOut[i].GetBuffer(); } while (b == null && !stop); if (b != null) { output[i] = b.Samples; } } if (!stop) { Debug.Assert(input.All(i => i != null)); Debug.Assert(output.All(i => i != null)); // Call the callback. callback(buffer, input, output, format.nSamplesPerSec); // Play the results. for (int i = 0; i < output.Length; ++i) { OutBuffer b = (OutBuffer)output[i].Tag; ConvertSamples(b.Samples.Raw, b.Data, format, b.Samples.Count); b.Play(); } } } } catch (Exception Ex) { Log.Global.WriteLine(MessageType.Error, "Unhandled exception on streaming thread '{0}': {1}", Ex.GetType().FullName, Ex.ToString()); } Log.Global.WriteLine(MessageType.Info, "Exiting streaming thread"); }
private void OnBufferSwitch(int Index, ASIOBool Direct) { Audio.SampleBuffer[] a = new Audio.SampleBuffer[input.Length]; for (int i = 0; i < input.Length; ++i) { a[i] = input[i].Samples; using (Audio.RawLock l = new Audio.RawLock(a[i], false, true)) ConvertSamples( input[i].Info.buffers[Index], input[i].Type, l, l.Count); } Audio.SampleBuffer[] b = new Audio.SampleBuffer[output.Length]; for (int i = 0; i < output.Length; ++i) b[i] = output[i].Samples; callback(buffer, a, b, sampleRate); for (int i = 0; i < output.Length; ++i) { using (Audio.RawLock l = new Audio.RawLock(b[i], true, false)) ConvertSamples( l, output[i].Info.buffers[Index], output[i].Type, l.Count); } }