/// <summary> /// Picovoice constructor /// </summary> /// <param name="keywordPath">Absolute path to Porcupine's keyword model file.</param> /// <param name="wakeWordCallback"> /// User-defined callback invoked upon detection of the wake phrase. /// The callback accepts no input arguments. /// </param> /// <param name="contextPath"> /// Absolute path to file containing context parameters. A context represents the set of /// expressions(spoken commands), intents, and intent arguments(slots) within a domain of interest. /// </param> /// <param name="inferenceCallback"> /// User-defined callback invoked upon completion of intent inference. The callback /// accepts a single input argument of type `Map<String, dynamic>` that is populated with the following items: /// (1) IsUnderstood: whether Rhino understood what it heard based on the context /// (2) Intent: if isUnderstood, name of intent that were inferred /// (3) Slots: if isUnderstood, dictionary of slot keys and values that were inferred /// </param> /// <param name="porcupineModelPath">Absolute path to the file containing Porcupine's model parameters.</param> /// <param name="porcupineSensitivity"> /// Wake word detection sensitivity. It should be a number within [0, 1]. A higher /// sensitivity results in fewer misses at the cost of increasing the false alarm rate. /// </param> /// <param name="rhinoModelPath">Absolute path to the file containing Rhino's model parameters.</param> /// <param name="rhinoSensitivity"> /// Inference sensitivity. It should be a number within [0, 1]. A higher sensitivity value /// results in fewer misses at the cost of(potentially) increasing the erroneous inference rate. /// </returns> public static Picovoice Create(string keywordPath, Action wakeWordCallback, string contextPath, Action <Inference> inferenceCallback, string porcupineModelPath = null, float porcupineSensitivity = 0.5f, string rhinoModelPath = null, float rhinoSensitivity = 0.5f) { Porcupine porcupine = Porcupine.Create(keywordPaths: new List <string> { keywordPath }, modelPath: porcupineModelPath, sensitivities: new List <float> { porcupineSensitivity }); Rhino rhino = Rhino.Create(contextPath: contextPath, modelPath: rhinoModelPath, sensitivity: rhinoSensitivity); if (porcupine.FrameLength != rhino.FrameLength) { throw new ArgumentException("Porcupine and Rhino frame lengths are different"); } if (porcupine.SampleRate != rhino.SampleRate) { throw new ArgumentException("Porcupine and Rhino sample rate are different"); } return(new Picovoice(porcupine, wakeWordCallback, rhino, inferenceCallback)); }
/// <summary> /// Creates an instance of Rhino inference engine with built-in audio processing /// </summary> /// <param name="contextPath">Absolute path to the Rhino context file (.rhn).</param> /// <param name="inferenceCallback">A callback for when Rhino has made an intent inference.</param> /// <param name="modelPath">(Optional) Absolute path to the file containing model parameters. If not set it will be set to the default location.</param> /// <param name="sensitivity"> /// (Optional) Inference sensitivity. A higher sensitivity value results in /// fewer misses at the cost of (potentially) increasing the erroneous inference rate. /// Sensitivity should be a floating-point number within 0 and 1. /// </param> /// <param name="errorCallback">(Optional) Callback that triggers is the engine experiences a problem while processing audio.</param> /// <returns>An instance of RhinoManager.</returns> public static RhinoManager Create(string contextPath, Action <Inference> inferenceCallback, string modelPath = null, float sensitivity = 0.5f, Action <Exception> errorCallback = null) { Rhino rhino = Rhino.Create(contextPath, modelPath: modelPath, sensitivity: sensitivity); return(new RhinoManager(rhino, inferenceCallback, errorCallback)); }
/// <summary> /// Creates an instance of Rhino inference engine with built-in audio processing /// </summary> /// <param name="accessKey">AccessKey obtained from Picovoice Console (https://console.picovoice.ai/).</param> /// <param name="contextPath">Absolute path to the Rhino context file (.rhn).</param> /// <param name="inferenceCallback">A callback for when Rhino has made an intent inference.</param> /// <param name="modelPath">(Optional) Absolute path to the file containing model parameters. If not set it will be set to the default location.</param> /// <param name="sensitivity"> /// (Optional) Inference sensitivity. A higher sensitivity value results in /// fewer misses at the cost of (potentially) increasing the erroneous inference rate. /// Sensitivity should be a floating-point number within 0 and 1. /// </param> /// <param name="requireEndpoint"> /// (Optional) Boolean variable to indicate if Rhino should wait for a chunk of silence before finishing inference. /// </param> /// <param name="processErrorCallback">(Optional) Reports errors that are encountered while the engine is processing audio.</returns> public static RhinoManager Create( string accessKey, string contextPath, Action <Inference> inferenceCallback, string modelPath = null, float sensitivity = 0.5f, bool requireEndpoint = true, Action <RhinoException> processErrorCallback = null) { Rhino rhino = Rhino.Create(accessKey, contextPath, modelPath: modelPath, sensitivity: sensitivity, requireEndpoint: requireEndpoint); return(new RhinoManager(rhino, inferenceCallback, processErrorCallback)); }