/// <summary>
        /// The method reads user-entered settings and creates a new instance of the DialogServiceConnector object
        /// when the "Reconnect" button is pressed (or the microphone button is pressed for the first time).
        /// </summary>
        private void InitSpeechConnector()
        {
            DialogServiceConfig config = null;

            var hasSubscription = !string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.SubscriptionKey);
            var hasRegion       = !string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.SubscriptionKeyRegion);
            var hasBotId        = !string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.BotId);
            var hasUrlOverride  = !string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.UrlOverride);

            if (hasSubscription && (hasRegion || hasUrlOverride))
            {
                if (!string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.CustomCommandsAppId))
                {
                    // NOTE: Custom commands is a preview Azure Service.
                    // Set the custom commands configuration object based on three items:
                    // - The Custom commands application ID
                    // - Cognitive services speech subscription key.
                    // - The Azure region of the subscription key(e.g. "westus").
                    config = CustomCommandsConfig.FromSubscription(this.settings.RuntimeSettings.Profile.CustomCommandsAppId, this.settings.RuntimeSettings.Profile.SubscriptionKey, this.settings.RuntimeSettings.Profile.SubscriptionKeyRegion);
                }
                else if (hasBotId)
                {
                    config = BotFrameworkConfig.FromSubscription(this.settings.RuntimeSettings.Profile.SubscriptionKey, this.settings.RuntimeSettings.Profile.SubscriptionKeyRegion, this.settings.RuntimeSettings.Profile.BotId);
                }
                else
                {
                    // Set the bot framework configuration object based on two items:
                    // - Cognitive services speech subscription key. It is needed for billing and is tied to the bot registration.
                    // - The Azure region of the subscription key(e.g. "westus").
                    config = BotFrameworkConfig.FromSubscription(this.settings.RuntimeSettings.Profile.SubscriptionKey, this.settings.RuntimeSettings.Profile.SubscriptionKeyRegion);
                }
            }

            if (!string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.ConnectionLanguage))
            {
                // Set the speech recognition language. If not set, the default is "en-us".
                config.Language = this.settings.RuntimeSettings.Profile.ConnectionLanguage;
            }

            if (this.settings.RuntimeSettings.Profile.CustomSpeechEnabled)
            {
                // Set your custom speech end-point id here, as given to you by the speech portal https://speech.microsoft.com/portal.
                // Otherwise the standard speech end-point will be used.
                config.SetServiceProperty("cid", this.settings.RuntimeSettings.Profile.CustomSpeechEndpointId, ServicePropertyChannel.UriQueryParameter);

                // Custom Speech does not support cloud Keyword Verification at the moment. If this is not done, there will be an error
                // from the service and connection will close. Remove line below when supported.
                config.SetProperty("KeywordConfig_EnableKeywordVerification", "false");
            }

            if (this.settings.RuntimeSettings.Profile.VoiceDeploymentEnabled)
            {
                // Set one or more IDs associated with the custom TTS voice your bot will use
                // The format of the string is one or more GUIDs separated by comma (no spaces). You get these GUIDs from
                // your custom TTS on the speech portal https://speech.microsoft.com/portal.
                config.SetProperty(PropertyId.Conversation_Custom_Voice_Deployment_Ids, this.settings.RuntimeSettings.Profile.VoiceDeploymentIds);
            }

            if (!string.IsNullOrEmpty(this.settings.RuntimeSettings.Profile.FromId))
            {
                // Set the from.id in the Bot-Framework Activity sent by this tool.
                // from.id field identifies who generated the activity, and may be required by some bots.
                // See https://github.com/microsoft/botframework-sdk/blob/master/specs/botframework-activity/botframework-activity.md
                // for Bot Framework Activity schema and from.id.
                config.SetProperty(PropertyId.Conversation_From_Id, this.settings.RuntimeSettings.Profile.FromId);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.LogFilePath))
            {
                // Speech SDK has verbose logging to local file, which may be useful when reporting issues.
                // Supply the path to a text file on disk here. By default no logging happens.
                config.SetProperty(PropertyId.Speech_LogFilename, this.settings.RuntimeSettings.Profile.LogFilePath);
            }

            if (hasUrlOverride)
            {
                // For prototyping new Direct Line Speech channel service feature, a custom service URL may be
                // provided by Microsoft and entered in this tool.
                config.SetProperty("SPEECH-Endpoint", this.settings.RuntimeSettings.Profile.UrlOverride);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.ProxyHostName) &&
                !string.IsNullOrWhiteSpace(this.settings.RuntimeSettings.Profile.ProxyPortNumber) &&
                int.TryParse(this.settings.RuntimeSettings.Profile.ProxyPortNumber, out var proxyPortNumber))
            {
                // To funnel network traffic via a proxy, set the host name and port number here
                config.SetProxy(this.settings.RuntimeSettings.Profile.ProxyHostName, proxyPortNumber, string.Empty, string.Empty);
            }

            // If a the DialogServiceConnector object already exists, destroy it first
            if (this.connector != null)
            {
                // First, unregister all events
                this.connector.ActivityReceived -= this.Connector_ActivityReceived;
                this.connector.Recognizing      -= this.Connector_Recognizing;
                this.connector.Recognized       -= this.Connector_Recognized;
                this.connector.Canceled         -= this.Connector_Canceled;
                this.connector.SessionStarted   -= this.Connector_SessionStarted;
                this.connector.SessionStopped   -= this.Connector_SessionStopped;

                // Then dispose the object
                this.connector.Dispose();
                this.connector = null;
            }

            // Create a new Dialog Service Connector for the above configuration and register to receive events
            this.connector = new DialogServiceConnector(config, AudioConfig.FromDefaultMicrophoneInput());
            this.connector.ActivityReceived += this.Connector_ActivityReceived;
            this.connector.Recognizing      += this.Connector_Recognizing;
            this.connector.Recognized       += this.Connector_Recognized;
            this.connector.Canceled         += this.Connector_Canceled;
            this.connector.SessionStarted   += this.Connector_SessionStarted;
            this.connector.SessionStopped   += this.Connector_SessionStopped;

            // Open a connection to Direct Line Speech channel
            this.connector.ConnectAsync();

            if (this.settings.RuntimeSettings.Profile.CustomSpeechEnabled)
            {
                this.customSpeechConfig = new CustomSpeechConfiguration(this.settings.RuntimeSettings.Profile.CustomSpeechEndpointId);
            }

            if (this.settings.RuntimeSettings.Profile.WakeWordEnabled)
            {
                // Configure wake word (also known as "keyword")
                this.activeWakeWordConfig = new WakeWordConfiguration(this.settings.RuntimeSettings.Profile.WakeWordPath);
                this.connector.StartKeywordRecognitionAsync(this.activeWakeWordConfig.WakeWordModel);
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// The method reads user-entered settings and creates a new instance of the DialogServiceConnector object
        /// when the "Reconnect" button is pressed (or the microphone button is pressed for the first time).
        /// </summary>
        private void InitSpeechConnector()
        {
            DialogServiceConfig config = null;

            // Save the Direct Line Speech channel secret key. This is one of two keys you get when you register your bot with Direct Line speech
            // channel. It uniquely defines the bot. Here we call it bot secret for short.
            this.botSecret = this.botSecretLabel.Text;

            if (!string.IsNullOrWhiteSpace(this.settings.Settings.SubscriptionKey) &&
                !string.IsNullOrWhiteSpace(this.botSecret))
            {
                // Set the dialog service configuration object based on three items:
                // - Direct Line Speech channel secret (aka "bot secret")
                // - Cognitive services speech subscription key. It is needed for billing.
                // - The Azure region of the subscription key (e.g. "westus").
                config = DialogServiceConfig.FromBotSecret(this.botSecret, this.settings.Settings.SubscriptionKey, this.settings.Settings.SubscriptionKeyRegion);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.Settings.Language))
            {
                // Set the speech recognition language. If not set, the default is "en-us".
                config.SetProperty("SPEECH-RecoLanguage", this.settings.Settings.Language);
            }

            if (!string.IsNullOrEmpty(this.settings.Settings.FromId))
            {
                // Set the from.id in the Bot-Framework Activity sent by this tool.
                // from.id field identifies who generated the activity, and may be required by some bots.
                // See https://github.com/microsoft/botframework-sdk/blob/master/specs/botframework-activity/botframework-activity.md
                // for Bot Framework Activity schema and from.id.
                config.SetProperty("BOT-FromId", this.settings.Settings.FromId);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.Settings.LogFilePath))
            {
                // Speech SDK has verbose logging to local file, which may be useful when reporting issues.
                // Supply the path to a text file on disk here. By default no logging happens.
                config.SetProperty("SPEECH-LogFilename", this.settings.Settings.LogFilePath);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.Settings.UrlOverride))
            {
                // For prototyping new Direct Line Speech channel service feature, a custom service URL may be
                // provided by Microsoft and entered in this tool.
                config.SetProperty("SPEECH-Endpoint", this.settings.Settings.UrlOverride);
            }

            if (!string.IsNullOrWhiteSpace(this.settings.Settings.ProxyHostName) &&
                !string.IsNullOrWhiteSpace(this.settings.Settings.ProxyPortNumber) &&
                int.TryParse(this.settings.Settings.ProxyPortNumber, out var proxyPortNumber))
            {
                // To funnel network traffic via a proxy, set the host name and port number here
                config.SetProxy(this.settings.Settings.ProxyHostName, proxyPortNumber, string.Empty, string.Empty);
            }

            // If a the DialogServiceConnector object already exists, destroy it first
            if (this.connector != null)
            {
                // First, unregister all events
                this.connector.ActivityReceived -= this.Connector_ActivityReceived;
                this.connector.Recognizing      -= this.Connector_Recognizing;
                this.connector.Recognized       -= this.Connector_Recognized;
                this.connector.Canceled         -= this.Connector_Canceled;
                this.connector.SessionStarted   -= this.Connector_SessionStarted;
                this.connector.SessionStopped   -= this.Connector_SessionStopped;

                // Then dispose the object
                this.connector.Dispose();
                this.connector = null;
            }

            // Create a new Dialog Service Connector for the above configuration and register to receive events
            this.connector = new DialogServiceConnector(config, AudioConfig.FromDefaultMicrophoneInput());
            this.connector.ActivityReceived += this.Connector_ActivityReceived;
            this.connector.Recognizing      += this.Connector_Recognizing;
            this.connector.Recognized       += this.Connector_Recognized;
            this.connector.Canceled         += this.Connector_Canceled;
            this.connector.SessionStarted   += this.Connector_SessionStarted;
            this.connector.SessionStopped   += this.Connector_SessionStopped;

            // Open a connection to Direct Line Speech channel
            this.connector.ConnectAsync();

            // Save the recent bot secret in the history, so it can easily be retrieved later on
            this.AddBotIdEntryIntoHistory(this.botSecret);

            if (this.settings.Settings.WakeWordEnabled)
            {
                // Configure wake word (also known as "keyword")
                this.activeWakeWordConfig = new WakeWordConfiguration(this.settings.Settings.WakeWordPath);
                this.connector.StartKeywordRecognitionAsync(this.activeWakeWordConfig.WakeWordModel);
            }
        }