public AIResponse VoiceRequest(Stream voiceStream, RequestExtras requestExtras = null)
        {
            var request = new AIRequest();

            request.Language  = config.Language.code;
            request.Timezone  = TimeZone.CurrentTimeZone.StandardName;
            request.SessionId = sessionId;

            if (requestExtras != null)
            {
                requestExtras.CopyTo(request);
            }

            try
            {
                var httpRequest = (HttpWebRequest)WebRequest.Create(config.RequestUrl);
                httpRequest.Method = "POST";
                httpRequest.Accept = "application/json";

                httpRequest.Headers.Add("Authorization", "Bearer " + config.ClientAccessToken);
                httpRequest.Headers.Add("ocp-apim-subscription-key", config.SubscriptionKey);

                var jsonSettings = new JsonSerializerSettings
                {
                    NullValueHandling = NullValueHandling.Ignore
                };

                var jsonRequest = JsonConvert.SerializeObject(request, Formatting.None, jsonSettings);

                if (config.DebugLog)
                {
                    Debug.WriteLine("Request: " + jsonRequest);
                }

                var multipartClient = new MultipartHttpClient(httpRequest);
                multipartClient.connect();

                multipartClient.addStringPart("request", jsonRequest);
                multipartClient.addFilePart("voiceData", "voice.wav", voiceStream);

                multipartClient.finish();

                var responseJsonString = multipartClient.getResponse();

                if (config.DebugLog)
                {
                    Debug.WriteLine("Response: " + responseJsonString);
                }

                var aiResponse = JsonConvert.DeserializeObject <AIResponse>(responseJsonString);

                CheckForErrors(aiResponse);

                return(aiResponse);
            }
            catch (Exception e)
            {
                throw new AIServiceException(e);
            }
        }
Beispiel #2
0
        public AIResponse VoiceRequest(Stream voiceStream, RequestExtras requestExtras = null)
        {
            if (config.Language == SupportedLanguage.Italian)
            {
                throw new AIServiceException("Sorry, but Italian language now is not supported in Speaktoit recognition. Please use some another speech recognition engine.");
            }

            return(dataService.VoiceRequest(voiceStream, requestExtras));
        }
Beispiel #3
0
        public async Task <AIResponse> TextRequestAsync(string text, RequestExtras requestExtras)
        {
            if (string.IsNullOrEmpty(text))
            {
                throw new ArgumentNullException("text");
            }

            return(await TextRequestAsync(new AIRequest(text, requestExtras)));
        }
        /// <summary>
        /// Send simple text request. This method does not use OnResult and OnSuccess callbacks for simplicity
        /// </summary>
        /// <param name="text">Request in text form</param>
        /// <param name="requestExtras">Optional request parameters such as Entities and Contexts</param>
        /// <returns>Server response</returns>
        /// <exception cref="ArgumentNullException">If text null or empty</exception>
        /// <exception cref="AIServiceException">If any error appears while request</exception>
        public async Task<AIResponse> TextRequestAsync(string text, RequestExtras requestExtras = null)
        {
            if (string.IsNullOrEmpty(text))
            {
                throw new ArgumentNullException("text");
            }

            return await TextRequestAsync(new AIRequest(text, requestExtras));
        }
Beispiel #5
0
        public AIResponse TextRequest(string text, RequestExtras requestExtras)
        {
            if (string.IsNullOrEmpty(text))
            {
                throw new ArgumentNullException("text");
            }

            return(TextRequest(new AIRequest(text, requestExtras)));
        }
        public async Task <AIResponse> VoiceRequestAsync(Stream voiceStream, RequestExtras requestExtras = null)
        {
            var request = new AIRequest
            {
                Language  = config.Language.code,
                Timezone  = TimeZoneInfo.Local.StandardName,
                SessionId = sessionId
            };

            if (requestExtras != null)
            {
                if (requestExtras.HasContexts)
                {
                    request.Contexts = requestExtras.Contexts;
                }

                if (requestExtras.HasEntities)
                {
                    request.Entities = requestExtras.Entities;
                }
            }

            try
            {
                var content = new HttpMultipartFormDataContent();

                var jsonRequest = JsonConvert.SerializeObject(request, Formatting.None, jsonSettings);

                if (config.DebugLog)
                {
                    Debug.WriteLine($"Request: {jsonRequest}");
                }

                content.Add(new HttpStringContent(jsonRequest, UnicodeEncoding.Utf8, "application/json"), "request");
                content.Add(new HttpStreamContent(voiceStream.AsInputStream()), "voiceData", "voice.wav");

                var response = await httpClient.PostAsync(new Uri(config.RequestUrl), content);

                return(await ProcessResponse(response));
            }
            catch (Exception e)
            {
                throw new AIServiceException(e);
            }
        }
        private async Task ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = new AIRequest();
                request.Query = new[] { results.Text };
                try
                {
                    request.Confidence = new[] { Convert.ToSingle(results.RawConfidence) };
                }
                catch
                {
                }

                try
                {
                    if (requestExtras != null)
                    {
                        requestExtras.CopyTo(request);
                    }

                    var response = await dataService.RequestAsync(request);
                    FireOnResult(response);
                }
                catch (Exception e)
                {
                    FireOnError(new AIServiceException(e));
                }
            }
        }
Beispiel #8
0
		public AIResponse VoiceRequest(Stream voiceStream, RequestExtras requestExtras = null)
		{
		    if (config.Language == SupportedLanguage.Italian)
		    {
		        throw new AIServiceException("Sorry, but Italian language now is not supported in Speaktoit recognition. Please use some another speech recognition engine.");
		    }

			return dataService.VoiceRequest(voiceStream, requestExtras);
		}
 /// <summary>
 /// Start listening
 /// </summary>
 /// <returns></returns>
 public abstract Task <AIResponse> StartRecognitionAsync(RequestExtras requestExtras = null);
        private async Task<AIResponse> ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras, CancellationToken cancellationToken)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = CreateAIRequest(results);

                requestExtras?.CopyTo(request);

                var response = await DataService.RequestAsync(request, cancellationToken);
                return response;   
            }
            else
            {
                return null;
            }
        }
Beispiel #11
0
 public async Task<AIResponse> VoiceRequestAsync(Stream voiceStream, RequestExtras requestExtras = null)
 {
     return await dataService.VoiceRequestAsync(voiceStream, requestExtras);
 }
Beispiel #12
0
        private async Task ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = new AIRequest();
                request.Query = new[] { results.Text };
                try
                {
                    request.Confidence = new[] { Convert.ToSingle(results.RawConfidence) };
                }
                catch
                {
                }

                try
                {
                    if (requestExtras != null)
                    {
                        requestExtras.CopyTo(request);
                    }

                    var response = await dataService.RequestAsync(request);

                    FireOnResult(response);
                }
                catch (Exception e)
                {
                    FireOnError(new AIServiceException(e));
                }
            }
        }
Beispiel #13
0
 public async Task <AIResponse> VoiceRequestAsync(Stream voiceStream, RequestExtras requestExtras = null)
 {
     return(await dataService.VoiceRequestAsync(voiceStream, requestExtras));
 }
Beispiel #14
0
        public async Task <AIResponse> VoiceRequestAsync(Stream voiceStream, RequestExtras requestExtras = null)
        {
            var request = new AIRequest();

            request.Language  = config.Language.code;
            request.Timezone  = TimeZoneInfo.Local.StandardName;
            request.SessionId = sessionId;

            if (requestExtras != null)
            {
                if (requestExtras.HasContexts)
                {
                    request.Contexts = requestExtras.Contexts;
                }

                if (requestExtras.HasEntities)
                {
                    request.Entities = requestExtras.Entities;
                }
            }

            try
            {
                var httpRequest = (HttpWebRequest)WebRequest.Create(config.RequestUrl);
                httpRequest.Method = "POST";
                httpRequest.Accept = "application/json";

                httpRequest.Headers["Authorization"] = "Bearer " + config.ClientAccessToken;

                var jsonSettings = new JsonSerializerSettings
                {
                    NullValueHandling = NullValueHandling.Ignore
                };

                var jsonRequest = JsonConvert.SerializeObject(request, Formatting.None, jsonSettings);

                if (config.DebugLog)
                {
                    Debug.WriteLine("Request: " + jsonRequest);
                }

                var multipartClient = new MultipartHttpClient(httpRequest);
                multipartClient.Connect();

                multipartClient.AddStringPart("request", jsonRequest);
                multipartClient.AddFilePart("voiceData", "voice.wav", voiceStream);

                multipartClient.Finish();

                var responseJsonString = await multipartClient.GetResponse();

                if (config.DebugLog)
                {
                    Debug.WriteLine("Response: " + responseJsonString);
                }

                var aiResponse = JsonConvert.DeserializeObject <AIResponse>(responseJsonString);

                CheckForErrors(aiResponse);

                return(aiResponse);
            }
            catch (Exception e)
            {
                throw new AIServiceException(e);
            }
        }
        public override async Task StartRecognitionAsync(RequestExtras requestExtras = null)
        {
            if (currentOperation == null)
            {
                try
                {
                    var speechRecognitionResultTask = speechRecognizer.RecognizeAsync();
                    currentOperation = speechRecognitionResultTask;

                    var results = await speechRecognitionResultTask;
                    currentOperation = null;

                    switch (results.Status)
                    {
                        case SpeechRecognitionResultStatus.Success:
                            ProcessRecognitionResultsAsync(results, requestExtras);
                            break;
                        case SpeechRecognitionResultStatus.TopicLanguageNotSupported:
                            FireOnError(new AIServiceException("This language is not supported"));
                            break;
                        case SpeechRecognitionResultStatus.GrammarLanguageMismatch:
                            FireOnError(new AIServiceException("GrammarLanguageMismatch"));
                            break;
                        case SpeechRecognitionResultStatus.GrammarCompilationFailure:
                            FireOnError(new AIServiceException("GrammarCompilationFailure"));
                            break;
                        case SpeechRecognitionResultStatus.AudioQualityFailure:
                            FireOnError(new AIServiceException("AudioQualityFailure"));
                            break;
                        case SpeechRecognitionResultStatus.UserCanceled:
                            // do nothing
                            break;
                        case SpeechRecognitionResultStatus.Unknown:
                            FireOnError(new AIServiceException("Unknown recognition error"));
                            break;
                        default:
                            throw new ArgumentOutOfRangeException();
                    }
                }
                catch (TaskCanceledException)
                {
                    currentOperation = null;
                    
                }
                catch (Exception e)
                {
                    currentOperation = null;

                    if ((uint) e.HResult == HRESULT_PRIVACY_STATEMENT_DECLINED)
                    {
                        throw new PrivacyStatementDeclinedException(
                            "You must accept privacy statement before using speech recognition.", e);
                    }
                    else
                    {
                        throw new AIServiceException("Exception while recognition", e);
                    }
                }
            }

        }
        public AIResponse VoiceRequest(Stream voiceStream, RequestExtras requestExtras = null)
        {
            var request = new AIRequest();
            request.Language = config.Language.code;
            request.Timezone = TimeZone.CurrentTimeZone.StandardName;
            request.SessionId = sessionId;

            if (requestExtras != null)
            {
                requestExtras.CopyTo(request);
            }

            try
            {
                var httpRequest = (HttpWebRequest)WebRequest.Create(config.RequestUrl);
                httpRequest.Method = "POST";
                httpRequest.Accept = "application/json";
				
                httpRequest.Headers.Add("Authorization", "Bearer " + config.ClientAccessToken);
                httpRequest.Headers.Add("ocp-apim-subscription-key", config.SubscriptionKey);

                var jsonSettings = new JsonSerializerSettings
                { 
                    NullValueHandling = NullValueHandling.Ignore
                };
				
                var jsonRequest = JsonConvert.SerializeObject(request, Formatting.None, jsonSettings);

                if (config.DebugLog)
                {
                    Debug.WriteLine("Request: " + jsonRequest);
                }

                var multipartClient = new MultipartHttpClient(httpRequest);
                multipartClient.connect();

                multipartClient.addStringPart("request", jsonRequest);
                multipartClient.addFilePart("voiceData", "voice.wav", voiceStream);

                multipartClient.finish();

                var responseJsonString = multipartClient.getResponse();

                if (config.DebugLog)
                {
                    Debug.WriteLine("Response: " + responseJsonString);
                }

                var aiResponse = JsonConvert.DeserializeObject<AIResponse>(responseJsonString);

                CheckForErrors(aiResponse);

                return aiResponse;

            }
            catch (Exception e)
            {
                throw new AIServiceException(e);
            }
        }
Beispiel #17
0
 /// <summary>
 /// Start listening
 /// </summary>
 /// <returns></returns>
 public abstract Task StartRecognitionAsync(RequestExtras requestExtras = null);
Beispiel #18
0
 /// <summary>
 /// Start listening
 /// </summary>
 /// <returns></returns>
 public abstract Task StartRecognitionAsync(RequestExtras requestExtras = null);
        public async Task<AIResponse> VoiceRequestAsync(Stream voiceStream, RequestExtras requestExtras = null)
        {
            var request = new AIRequest
            {
                Language = config.Language.code,
                Timezone = TimeZoneInfo.Local.StandardName,
                SessionId = sessionId
            };

            if (requestExtras != null)
            {
                if (requestExtras.HasContexts)
                {
                    request.Contexts = requestExtras.Contexts;
                }

                if (requestExtras.HasEntities)
                {
                    request.Entities = requestExtras.Entities;
                }
            }

            try
            {
                var content = new HttpMultipartFormDataContent();
                
                var jsonRequest = JsonConvert.SerializeObject(request, Formatting.None, jsonSettings);

                if (config.DebugLog)
                {
                    Debug.WriteLine($"Request: {jsonRequest}");
                }

                content.Add(new HttpStringContent(jsonRequest, UnicodeEncoding.Utf8, "application/json"), "request");
                content.Add(new HttpStreamContent(voiceStream.AsInputStream()), "voiceData", "voice.wav");
                
                var response = await httpClient.PostAsync(new Uri(config.RequestUrl), content);
                return await ProcessResponse(response);
            }
            catch (Exception e)
            {
                throw new AIServiceException(e);
            }
        }
Beispiel #20
0
        public override async Task StartRecognitionAsync(RequestExtras requestExtras = null)
        {
            if (currentOperation == null)
            {
                try
                {
                    var speechRecognitionResultTask = speechRecognizer.RecognizeAsync();
                    currentOperation = speechRecognitionResultTask;

                    var results = await speechRecognitionResultTask;
                    currentOperation = null;

                    switch (results.Status)
                    {
                    case SpeechRecognitionResultStatus.Success:
                        ProcessRecognitionResultsAsync(results, requestExtras);
                        break;

                    case SpeechRecognitionResultStatus.TopicLanguageNotSupported:
                        FireOnError(new AIServiceException("This language is not supported"));
                        break;

                    case SpeechRecognitionResultStatus.GrammarLanguageMismatch:
                        FireOnError(new AIServiceException("GrammarLanguageMismatch"));
                        break;

                    case SpeechRecognitionResultStatus.GrammarCompilationFailure:
                        FireOnError(new AIServiceException("GrammarCompilationFailure"));
                        break;

                    case SpeechRecognitionResultStatus.AudioQualityFailure:
                        FireOnError(new AIServiceException("AudioQualityFailure"));
                        break;

                    case SpeechRecognitionResultStatus.UserCanceled:
                        // do nothing
                        break;

                    case SpeechRecognitionResultStatus.Unknown:
                        FireOnError(new AIServiceException("Unknown recognition error"));
                        break;

                    default:
                        throw new ArgumentOutOfRangeException();
                    }
                }
                catch (TaskCanceledException)
                {
                    currentOperation = null;
                }
                catch (Exception e)
                {
                    currentOperation = null;

                    if ((uint)e.HResult == HRESULT_PRIVACY_STATEMENT_DECLINED)
                    {
                        throw new PrivacyStatementDeclinedException(
                                  "You must accept privacy statement before using speech recognition.", e);
                    }
                    else
                    {
                        throw new AIServiceException("Exception while recognition", e);
                    }
                }
            }
        }
        public override async Task<AIResponse> StartRecognitionAsync(RequestExtras requestExtras = null)
        {
            if (cancellationTokenSource != null)
            {
                cancellationTokenSource.Cancel();
                cancellationTokenSource.Dispose();
            }
            
            try
            {
                cancellationTokenSource = new CancellationTokenSource();

                var speechRecognitionResultTask = speechRecognizer.RecognizeAsync().AsTask(cancellationTokenSource.Token);
                var results = await speechRecognitionResultTask;
                
                switch (results.Status)
                {
                    case SpeechRecognitionResultStatus.Success:
                        var response = await ProcessRecognitionResultsAsync(results, requestExtras, cancellationTokenSource.Token);
                        return response;
                    case SpeechRecognitionResultStatus.TopicLanguageNotSupported:
                        throw new AIServiceException("This language is not supported");
                    case SpeechRecognitionResultStatus.GrammarLanguageMismatch:
                        throw new AIServiceException("GrammarLanguageMismatch");
                    case SpeechRecognitionResultStatus.GrammarCompilationFailure:
                        throw new AIServiceException("GrammarCompilationFailure");
                    case SpeechRecognitionResultStatus.AudioQualityFailure:
                        throw new AIServiceException("AudioQualityFailure");
                    case SpeechRecognitionResultStatus.UserCanceled:
                        // do nothing
                        return null;
                    case SpeechRecognitionResultStatus.Unknown:
                        throw new AIServiceException("Unknown recognition error");
                    default:
                        throw new ArgumentOutOfRangeException();
                }
            }
            catch (OperationCanceledException)
            {
                cancellationTokenSource = null;
                throw;
            }
            catch (Exception e)
            {
                cancellationTokenSource = null;

                if ((uint) e.HResult == HRESULT_PRIVACY_STATEMENT_DECLINED)
                {
                    throw new AIServiceException(
                        "You must accept privacy statement before using speech recognition.", e);
                }
                else
                {
                    throw new AIServiceException("Exception while recognition", e);
                }
            }
            
        }
        private async Task <AIResponse> ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras, CancellationToken cancellationToken)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = CreateAIRequest(results);

                requestExtras?.CopyTo(request);

                var response = await DataService.RequestAsync(request, cancellationToken);

                return(response);
            }
            else
            {
                return(null);
            }
        }
        public override async Task <AIResponse> StartRecognitionAsync(RequestExtras requestExtras = null)
        {
            if (cancellationTokenSource != null)
            {
                cancellationTokenSource.Cancel();
                cancellationTokenSource.Dispose();
            }

            try
            {
                cancellationTokenSource = new CancellationTokenSource();

                var speechRecognitionResultTask = speechRecognizer.RecognizeAsync().AsTask(cancellationTokenSource.Token);
                var results = await speechRecognitionResultTask;

                switch (results.Status)
                {
                case SpeechRecognitionResultStatus.Success:
                    var response = await ProcessRecognitionResultsAsync(results, requestExtras, cancellationTokenSource.Token);

                    return(response);

                case SpeechRecognitionResultStatus.TopicLanguageNotSupported:
                    throw new AIServiceException("This language is not supported");

                case SpeechRecognitionResultStatus.GrammarLanguageMismatch:
                    throw new AIServiceException("GrammarLanguageMismatch");

                case SpeechRecognitionResultStatus.GrammarCompilationFailure:
                    throw new AIServiceException("GrammarCompilationFailure");

                case SpeechRecognitionResultStatus.AudioQualityFailure:
                    throw new AIServiceException("AudioQualityFailure");

                case SpeechRecognitionResultStatus.UserCanceled:
                    // do nothing
                    return(null);

                case SpeechRecognitionResultStatus.Unknown:
                    throw new AIServiceException("Unknown recognition error");

                default:
                    throw new ArgumentOutOfRangeException();
                }
            }
            catch (OperationCanceledException)
            {
                cancellationTokenSource = null;
                throw;
            }
            catch (Exception e)
            {
                cancellationTokenSource = null;

                if ((uint)e.HResult == HRESULT_PRIVACY_STATEMENT_DECLINED)
                {
                    throw new AIServiceException(
                              "You must accept privacy statement before using speech recognition.", e);
                }
                else
                {
                    throw new AIServiceException("Exception while recognition", e);
                }
            }
        }
Beispiel #24
0
 /// <summary>
 /// Start listening
 /// </summary>
 /// <returns></returns>
 public abstract Task<AIResponse> StartRecognitionAsync(RequestExtras requestExtras = null);
Beispiel #25
-1
        public AIResponse TextRequest(string text, RequestExtras requestExtras)
        {
            if (string.IsNullOrEmpty(text)) {
                throw new ArgumentNullException("text");
            }

            return TextRequest(new AIRequest(text, requestExtras));
        }