static void RunGame() { // init picovoice platform string keywordPath = $"pico_chess_{_platform}.ppn"; string contextPath = $"chess_{_platform}.rhn"; using Picovoice picovoice = new Picovoice(keywordPath, WakeWordCallback, contextPath, InferenceCallback); DrawBoard("\n"); // create and start recording short[] recordingBuffer = new short[picovoice.FrameLength]; ALCaptureDevice captureDevice = ALC.CaptureOpenDevice(null, picovoice.SampleRate, ALFormat.Mono16, picovoice.FrameLength * 2); { ALC.CaptureStart(captureDevice); while (!_quitGame) { int samplesAvailable = ALC.GetAvailableSamples(captureDevice); if (samplesAvailable > picovoice.FrameLength) { ALC.CaptureSamples(captureDevice, ref recordingBuffer[0], picovoice.FrameLength); picovoice.Process(recordingBuffer); } Thread.Yield(); } // stop and clean up resources Console.WriteLine("Bye!"); ALC.CaptureStop(captureDevice); ALC.CaptureCloseDevice(captureDevice); } }
// ALC_API void ALC_APIENTRY alcCaptureSamples( ALCdevice *device, ALCvoid *buffer, ALCsizei samples ); /// <summary>This function completes a capture operation, and does not block.</summary> /// <typeparam name="T">The buffer datatype.</typeparam> /// <param name="device">A pointer to a capture device.</param> /// <param name="buffer">A reference to a buffer, which must be large enough to accommodate the number of samples.</param> /// <param name="samples">The number of samples to be retrieved.</param> public static unsafe void CaptureSamples <T>(ALCaptureDevice device, ref T buffer, int samples) where T : unmanaged { fixed(T *ptr = &buffer) { CaptureSamples(device, ptr, samples); } }
private void DoRecording() { int sampleRate = 44100; using MemoryStream stream = new MemoryStream(); using BinaryWriter writer = new BinaryWriter(stream); short[] recording = new short[1024]; int numSamples = 0; Thread.Sleep(200); ALCaptureDevice captureDevice = ALC.CaptureOpenDevice(null, sampleRate, ALFormat.Mono16, 1024); { ALC.CaptureStart(captureDevice); while (_record) { int current = 0; while (current < recording.Length) { int samplesAvailable = ALC.GetAvailableSamples(captureDevice); if (samplesAvailable > 512) { int samplesToRead = Math.Min(samplesAvailable, recording.Length - current); ALC.CaptureSamples(captureDevice, ref recording[current], samplesToRead); current += samplesToRead; } Thread.Yield(); } byte[] result = new byte[current * sizeof(short)]; Buffer.BlockCopy(recording, 0, result, 0, result.Length); writer.Write(result); numSamples += current; } ALC.CaptureStop(captureDevice); ALC.CaptureCloseDevice(captureDevice); } writer.Flush(); stream.Flush(); WriteDataToFile(stream, numSamples, sampleRate); }
public static bool InitDevice() { ALDevice audioDev = ALC.OpenDevice(null); AlcError err = ALC.GetError(audioDev); if (err != AlcError.NoError) { return(false); } ALContext aLContext = ALC.CreateContext(audioDev, new int[0]); bool makeRs = ALC.MakeContextCurrent(aLContext); err = ALC.GetError(audioDev); if (!makeRs || err != AlcError.NoError) { return(false); } //ALCdevice* inputDevice = alcCaptureOpenDevice(NULL, FREQ, AL_FORMAT_MONO16, FREQ / 2); ALCaptureDevice captureDev = ALC.CaptureOpenDevice(null, FREQ, ALFormat.Mono16, FREQ / 2); // FREQ ALC.CaptureStart(captureDev); err = ALC.GetError(audioDev); if (err != AlcError.NoError) { return(false); } int[] buffer = AL.GenBuffers(16); err = ALC.GetError(audioDev); if (err != AlcError.NoError) { return(false); } return(true); }
public static extern unsafe void CaptureSamples(ALCaptureDevice device, void *buffer, int samples);
/// <summary> /// Checks to see that the ALC_ENUMERATION_EXT extension is present. This will always be available in 1.1 devices or /// later. /// </summary> /// <param name="device">The device to check the extension is present for.</param> /// <returns>If the ALC_ENUMERATION_EXT extension was present.</returns> public static bool IsEnumerationExtensionPresent(ALCaptureDevice device) => IsExtensionPresent(device, "ALC_ENUMERATION_EXT");
public static extern void GetInteger(ALCaptureDevice device, AlcGetInteger param, int size, out int data);
// ALC_API void ALC_APIENTRY alcGetIntegerv( ALCdevice *device, ALCenum param, ALCsizei size, ALCint *buffer ); /// <summary> /// Gets the current number of available capture samples. /// </summary> /// <param name="device">The device.</param> /// <returns>The number of capture samples available.</returns> public static int GetAvailableSamples(ALCaptureDevice device) { GetInteger(device, AlcGetInteger.CaptureSamples, 1, out int result); return(result); }
/// <summary>This function completes a capture operation, and does not block.</summary> /// <typeparam name="T">The buffer datatype.</typeparam> /// <param name="device">A pointer to a capture device.</param> /// <param name="buffer">A buffer, which must be large enough to accommodate the number of samples.</param> /// <param name="samples">The number of samples to be retrieved.</param> public static void CaptureSamples <T>(ALCaptureDevice device, T[] buffer, int samples) where T : unmanaged { CaptureSamples(device, ref buffer[0], samples); }
public static extern unsafe void GetInteger(ALCaptureDevice device, AlcGetInteger param, int size, int *data);
public static extern void CaptureSamples(ALCaptureDevice device, ref short buffer, int samples);
/// <summary> /// Creates an input audio stream, instantiates an instance of Porcupine object, and monitors the audio stream for /// occurrencec of the wake word(s). It prints the time of detection for each occurrence and the wake word. /// </summary> /// <param name="modelPath">Absolute path to the file containing model parameters. If not set it will be set to the default location.</param> /// <param name="keywordPaths">Absolute paths to keyword model files. If not set it will be populated from `keywords` argument.</param> /// <param name="keywordPaths">Absolute paths to keyword model files. If not set it will be populated from `keywords` argument.</param> /// <param name="sensitivities"> /// Sensitivities for detecting keywords. Each value should be a number within [0, 1]. A higher sensitivity results in fewer /// misses at the cost of increasing the false alarm rate. If not set 0.5 will be used. /// </param> /// <param name="keywords"> /// List of keywords (phrases) for detection. The list of available (default) keywords can be retrieved /// using `Porcupine.KEYWORDS`. If `keyword_paths` is set then this argument will be ignored. /// </param> /// <param name="audioDeviceIndex">Optional argument. If provided, audio is recorded from this input device. Otherwise, the default audio input device is used.</param> /// <param name="outputPath">Optional argument. If provided, recorded audio will be stored in this location at the end of the run.</param> public static void RunDemo(string modelPath, List <string> keywordPaths, List <string> keywords, List <float> sensitivities, int?audioDeviceIndex = null, string outputPath = null) { Porcupine porcupine = null; BinaryWriter outputFileWriter = null; int totalSamplesWritten = 0; try { // init porcupine wake word engine porcupine = Porcupine.Create(modelPath, keywordPaths, keywords, sensitivities); // get keyword names for labeling detection results if (keywords == null) { keywords = keywordPaths.Select(k => Path.GetFileNameWithoutExtension(k).Split("_")[0]).ToList(); } // open stream to output file if (!string.IsNullOrWhiteSpace(outputPath)) { outputFileWriter = new BinaryWriter(new FileStream(outputPath, FileMode.OpenOrCreate, FileAccess.Write)); WriteWavHeader(outputFileWriter, 1, 16, 16000, 0); } // choose audio device string deviceName = null; if (audioDeviceIndex != null) { List <string> captureDeviceList = ALC.GetStringList(GetEnumerationStringList.CaptureDeviceSpecifier).ToList(); if (captureDeviceList != null && audioDeviceIndex.Value < captureDeviceList.Count) { deviceName = captureDeviceList[audioDeviceIndex.Value]; } else { throw new ArgumentException("No input device found with the specified index. Use --show_audio_devices to show" + "available inputs", "--audio_device_index"); } } Console.Write("Listening for {"); for (int i = 0; i < keywords.Count; i++) { Console.Write($" {keywords[i]}({sensitivities[i]})"); } Console.Write(" }\n"); // create and start recording short[] recordingBuffer = new short[porcupine.FrameLength]; ALCaptureDevice captureDevice = ALC.CaptureOpenDevice(deviceName, 16000, ALFormat.Mono16, porcupine.FrameLength * 2); { ALC.CaptureStart(captureDevice); while (!Console.KeyAvailable) { int samplesAvailable = ALC.GetAvailableSamples(captureDevice); if (samplesAvailable > porcupine.FrameLength) { ALC.CaptureSamples(captureDevice, ref recordingBuffer[0], porcupine.FrameLength); int result = porcupine.Process(recordingBuffer); if (result >= 0) { Console.WriteLine($"[{DateTime.Now.ToLongTimeString()}] Detected '{keywords[result]}'"); } if (outputFileWriter != null) { foreach (short sample in recordingBuffer) { outputFileWriter.Write(sample); } totalSamplesWritten += recordingBuffer.Length; } } Thread.Yield(); } // stop and clean up resources Console.WriteLine("Stopping..."); ALC.CaptureStop(captureDevice); ALC.CaptureCloseDevice(captureDevice); } } finally { if (outputFileWriter != null) { // write size to header and clean up WriteWavHeader(outputFileWriter, 1, 16, 16000, totalSamplesWritten); outputFileWriter.Flush(); outputFileWriter.Dispose(); } porcupine?.Dispose(); } }
public static extern void CaptureSamples(ALCaptureDevice device, IntPtr buffer, int samples);
public static void Main(string[] args) { Console.WriteLine("Hello!"); IEnumerable <string> devices = ALC.ALC.GetStringList(GetEnumerationStringList.DeviceSpecifier); Console.WriteLine($"Devices: {string.Join(", ", devices)}"); // Get the default device, then go though all devices and select the AL soft device if it exists. string deviceName = ALC.ALC.GetString(ALDevice.Null, AlcGetString.DefaultDeviceSpecifier); foreach (string d in devices) { if (d.Contains("OpenAL Soft")) { deviceName = d; } } IEnumerable <string> allDevices = EnumerateAll.GetStringList(GetEnumerateAllContextStringList.AllDevicesSpecifier); Console.WriteLine($"All Devices: {string.Join(", ", allDevices)}"); ALDevice device = ALC.ALC.OpenDevice(deviceName); ALContext context = ALC.ALC.CreateContext(device, (int[])null); ALC.ALC.MakeContextCurrent(context); CheckALError("Start"); ALC.ALC.GetInteger(device, AlcGetInteger.MajorVersion, 1, out int alcMajorVersion); ALC.ALC.GetInteger(device, AlcGetInteger.MinorVersion, 1, out int alcMinorVersion); string alcExts = ALC.ALC.GetString(device, AlcGetString.Extensions); ALContextAttributes attrs = ALC.ALC.GetContextAttributes(device); Console.WriteLine($"Attributes: {attrs}"); string exts = AL.AL.Get(ALGetString.Extensions); string rend = AL.AL.Get(ALGetString.Renderer); string vend = AL.AL.Get(ALGetString.Vendor); string vers = AL.AL.Get(ALGetString.Version); Console.WriteLine( $"Vendor: {vend}, \nVersion: {vers}, \nRenderer: {rend}, \nExtensions: {exts}, \nALC Version: {alcMajorVersion}.{alcMinorVersion}, \nALC Extensions: {alcExts}"); Console.WriteLine("Available devices: "); IEnumerable <string> list = EnumerateAll.GetStringList(GetEnumerateAllContextStringList.AllDevicesSpecifier); foreach (string item in list) { Console.WriteLine(" " + item); } Console.WriteLine("Available capture devices: "); list = ALC.ALC.GetStringList(GetEnumerationStringList.CaptureDeviceSpecifier); foreach (string item in list) { Console.WriteLine(" " + item); } int auxSlot = 0; if (EFX.IsExtensionPresent(device)) { Console.WriteLine("EFX extension is present!!"); EFX.GenEffect(out int effect); EFX.Effect(effect, EffectInteger.EffectType, (int)EffectType.Reverb); EFX.GenAuxiliaryEffectSlot(out auxSlot); EFX.AuxiliaryEffectSlot(auxSlot, EffectSlotInteger.Effect, effect); } // Record a second of data CheckALError("Before record"); short[] recording = new short[44100 * 4]; ALCaptureDevice captureDevice = ALC.ALC.CaptureOpenDevice(null, 44100, ALFormat.Mono16, 1024); { ALC.ALC.CaptureStart(captureDevice); int current = 0; while (current < recording.Length) { int samplesAvailable = ALC.ALC.GetAvailableSamples(captureDevice); if (samplesAvailable > 512) { int samplesToRead = Math.Min(samplesAvailable, recording.Length - current); ALC.ALC.CaptureSamples(captureDevice, ref recording[current], samplesToRead); current += samplesToRead; } Thread.Yield(); } ALC.ALC.CaptureStop(captureDevice); } CheckALError("After record"); // Playback the recorded data CheckALError("Before data"); AL.AL.GenBuffer(out int alBuffer); // short[] sine = new short[44100 * 1]; // FillSine(sine, 4400, 44100); // FillSine(recording, 440, 44100); AL.AL.BufferData(alBuffer, ALFormat.Mono16, ref recording[0], recording.Length * 2, 44100); CheckALError("After data"); AL.AL.Listener(ALListenerf.Gain, 0.1f); AL.AL.GenSource(out int alSource); AL.AL.Source(alSource, ALSourcef.Gain, 1f); AL.AL.Source(alSource, ALSourcei.Buffer, alBuffer); if (EFX.IsExtensionPresent(device)) { EFX.Source(alSource, EFXSourceInteger3.AuxiliarySendFilter, auxSlot, 0, 0); } AL.AL.SourcePlay(alSource); Console.WriteLine("Before Playing: " + AL.AL.GetErrorString(AL.AL.GetError())); if (DeviceClock.IsExtensionPresent(device)) { long[] clockLatency = new long[2]; DeviceClock.GetInteger(device, GetInteger64.DeviceClock, clockLatency); Console.WriteLine("Clock: " + clockLatency[0] + ", Latency: " + clockLatency[1]); CheckALError(" "); } if (SourceLatency.IsExtensionPresent()) { SourceLatency.GetSource(alSource, SourceLatencyVector2d.SecOffsetLatency, out Vector2d values); SourceLatency.GetSource(alSource, SourceLatencyVector2i.SampleOffsetLatency, out int values1, out int values2, out long values3); Console.WriteLine("Source latency: " + values); Console.WriteLine($"Source latency 2: {Convert.ToString(values1, 2)}, {values2}; {values3}"); CheckALError(" "); } while (AL.AL.GetSourceState(alSource) == ALSourceState.Playing) { if (SourceLatency.IsExtensionPresent()) { SourceLatency.GetSource(alSource, SourceLatencyVector2d.SecOffsetLatency, out Vector2d values); SourceLatency.GetSource(alSource, SourceLatencyVector2i.SampleOffsetLatency, out int values1, out int values2, out long values3); Console.WriteLine("Source latency: " + values); Console.WriteLine($"Source latency 2: {Convert.ToString(values1, 2)}, {values2}; {values3}"); CheckALError(" "); } if (DeviceClock.IsExtensionPresent(device)) { long[] clockLatency = new long[2]; DeviceClock.GetInteger(device, GetInteger64.DeviceClock, 1, clockLatency); Console.WriteLine("Clock: " + clockLatency[0] + ", Latency: " + clockLatency[1]); CheckALError(" "); } Thread.Sleep(10); } AL.AL.SourceStop(alSource); // Test float32 format extension if (EXTFloat32.IsExtensionPresent()) { Console.WriteLine("Testing float32 format extension with a sine wave..."); float[] sine = new float[44100 * 2]; for (int i = 0; i < sine.Length; i++) { sine[i] = MathF.Sin(440 * MathF.PI * 2 * (i / (float)sine.Length)); } int buffer = AL.AL.GenBuffer(); EXTFloat32.BufferData(buffer, FloatBufferFormat.Mono, sine, 44100); AL.AL.Listener(ALListenerf.Gain, 0.1f); AL.AL.Source(alSource, ALSourcef.Gain, 1f); AL.AL.Source(alSource, ALSourcei.Buffer, buffer); AL.AL.SourcePlay(alSource); while (AL.AL.GetSourceState(alSource) == ALSourceState.Playing) { Thread.Sleep(10); } AL.AL.SourceStop(alSource); } // Test double format extension if (EXTDouble.IsExtensionPresent()) { Console.WriteLine("Testing float64 format extension with a saw wave..."); double[] saw = new double[44100 * 2]; for (int i = 0; i < saw.Length; i++) { double t = i / (double)saw.Length * 440; saw[i] = t - Math.Floor(t); } int buffer = AL.AL.GenBuffer(); EXTDouble.BufferData(buffer, DoubleBufferFormat.Mono, saw, 44100); AL.AL.Listener(ALListenerf.Gain, 0.1f); AL.AL.Source(alSource, ALSourcef.Gain, 1f); AL.AL.Source(alSource, ALSourcei.Buffer, buffer); AL.AL.SourcePlay(alSource); while (AL.AL.GetSourceState(alSource) == ALSourceState.Playing) { Thread.Sleep(10); } AL.AL.SourceStop(alSource); } ALC.ALC.MakeContextCurrent(ALContext.Null); ALC.ALC.DestroyContext(context); ALC.ALC.CloseDevice(device); Console.WriteLine("Goodbye!"); Console.WriteLine("Playing sound..."); ExampleSound(); Console.WriteLine("Done!"); }
public static extern void CaptureStop([In] ALCaptureDevice device);
public static extern bool CaptureCloseDevice([In] ALCaptureDevice device);
/// <summary> /// Checks to see that the ALC_EXT_CAPTURE extension is present. This will always be available in 1.1 devices or later. /// </summary> /// <param name="device">The device to check the extension is present for.</param> /// <returns>If the ALC_EXT_CAPTURE extension was present.</returns> public static bool IsCaptureExtensionPresent(ALCaptureDevice device) => IsExtensionPresent(device, "ALC_EXT_CAPTURE");
public static extern bool IsExtensionPresent([In] ALCaptureDevice device, [In] string extname);
/// <summary> /// Creates an input audio stream, instantiates an instance of Rhino object, and infers the intent from spoken commands. /// </summary> /// <param name="contextPath"> /// Absolute path to file containing context model (file with `.rhn` extension). A context represents the set of /// expressions(spoken commands), intents, and intent arguments(slots) within a domain of interest. /// </param> /// <param name="modelPath">Absolute path to the file containing model parameters. If not set it will be set to the default location.</param> /// <param name="sensitivity"> /// Inference sensitivity. It should be a number within [0, 1]. A higher sensitivity value results in /// fewer misses at the cost of (potentially) increasing the erroneous inference rate. If not set, the default value of 0.5 will be used. /// </param> /// <param name="audioDeviceIndex">Optional argument. If provided, audio is recorded from this input device. Otherwise, the default audio input device is used.</param> /// <param name="outputPath">Optional argument. If provided, recorded audio will be stored in this location at the end of the run.</param> public static void RunDemo(string contextPath, string modelPath, float sensitivity, int?audioDeviceIndex = null, string outputPath = null) { Rhino rhino = null; BinaryWriter outputFileWriter = null; int totalSamplesWritten = 0; try { // init rhino speech-to-intent engine rhino = Rhino.Create(contextPath, modelPath, sensitivity); // open stream to output file if (!string.IsNullOrWhiteSpace(outputPath)) { outputFileWriter = new BinaryWriter(new FileStream(outputPath, FileMode.OpenOrCreate, FileAccess.Write)); WriteWavHeader(outputFileWriter, 1, 16, 16000, 0); } // choose audio device string deviceName = null; if (audioDeviceIndex != null) { List <string> captureDeviceList = ALC.GetStringList(GetEnumerationStringList.CaptureDeviceSpecifier).ToList(); if (captureDeviceList != null && audioDeviceIndex.Value < captureDeviceList.Count) { deviceName = captureDeviceList[audioDeviceIndex.Value]; } else { throw new ArgumentException("No input device found with the specified index. Use --show_audio_devices to show" + "available inputs", "--audio_device_index"); } } Console.WriteLine(rhino.ContextInfo); Console.WriteLine("Listening...\n"); // create and start recording short[] recordingBuffer = new short[rhino.FrameLength]; ALCaptureDevice captureDevice = ALC.CaptureOpenDevice(deviceName, 16000, ALFormat.Mono16, rhino.FrameLength * 2); { ALC.CaptureStart(captureDevice); while (!Console.KeyAvailable) { int samplesAvailable = ALC.GetAvailableSamples(captureDevice); if (samplesAvailable > rhino.FrameLength) { ALC.CaptureSamples(captureDevice, ref recordingBuffer[0], rhino.FrameLength); bool isFinalized = rhino.Process(recordingBuffer); if (isFinalized) { Inference inference = rhino.GetInference(); if (inference.IsUnderstood) { Console.WriteLine("{"); Console.WriteLine($" intent : '{inference.Intent}'"); Console.WriteLine(" slots : {"); foreach (KeyValuePair <string, string> slot in inference.Slots) { Console.WriteLine($" {slot.Key} : '{slot.Value}'"); } Console.WriteLine(" }"); Console.WriteLine("}"); } else { Console.WriteLine("Didn't understand the command."); } } if (outputFileWriter != null) { foreach (short sample in recordingBuffer) { outputFileWriter.Write(sample); } totalSamplesWritten += recordingBuffer.Length; } } Thread.Yield(); } // stop and clean up resources Console.WriteLine("Stopping..."); ALC.CaptureStop(captureDevice); ALC.CaptureCloseDevice(captureDevice); } } finally { if (outputFileWriter != null) { // write size to header and clean up WriteWavHeader(outputFileWriter, 1, 16, 16000, totalSamplesWritten); outputFileWriter.Flush(); outputFileWriter.Dispose(); } rhino?.Dispose(); } }