Пример #1
0
 private void HandleTimeoutAndAsyncRecoComplete(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate callback, string asyncRecoType)
 {
     lock (this.thisLock)
     {
         if (this.recoContext.Event == null)
         {
             ExTraceGlobals.SpeechRecognitionTracer.TraceWarning <string>(0L, "The RecoContext for RecoType:'{0}' has already been disposed of, sending internal error to complete the reco request loop", asyncRecoType);
             callback(new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, SpeechRecognitionProcessor.SpeechHttpStatus.InternalServerError));
         }
         else
         {
             ThreadPool.RegisterWaitForSingleObject(this.recoContext.Event, delegate(object state, bool timedOut)
             {
                 SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args;
                 if (timedOut)
                 {
                     ExTraceGlobals.SpeechRecognitionTracer.TraceError <string, string>(0L, "The Async call:'{0}' for Recognition:'{1}' Timed out", asyncRecoType, this.RequestType.ToString());
                     UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_MobileSpeechRecoClientAsyncCallTimedOut, null, new object[]
                     {
                         this.RequestId,
                         asyncRecoType,
                         this.RequestType.ToString()
                     });
                     args = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, SpeechRecognitionProcessor.SpeechHttpStatus.InternalServerError);
                 }
                 else
                 {
                     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string, string>(0L, "The {0} for Recognition:{1} did not time out", asyncRecoType, this.RequestType.ToString());
                     args = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(this.recoContext.Results, this.recoContext.Status);
                 }
                 callback(args);
             }, null, TimeSpan.FromMilliseconds(30000.0), true);
         }
     }
 }
Пример #2
0
 // Token: 0x06001D61 RID: 7521 RVA: 0x000754DB File Offset: 0x000736DB
 private void OnRecognizeCompleted(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "Entering SpeechRecognitionProcessor.OnRecognizeCompleted");
     if (args.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.Success)
     {
         ThreadPool.QueueUserWorkItem(new WaitCallback(this.HandleRecoResults), args);
         return;
     }
     this.CompleteRequest(args);
 }
Пример #3
0
 // Token: 0x06001CE0 RID: 7392 RVA: 0x00073BF0 File Offset: 0x00071DF0
 private void HandleException(Exception e, SpeechRecognitionProcessor.SpeechHttpStatus status)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceError <Exception, int, string>((long)this.GetHashCode(), "SpeechRecognitionScenarioBase - Exception='{0}', Status Code='{1}', Status Description='{2}'", e, status.StatusCode, status.StatusDescription);
     UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_SpeechRecoRequestFailed, null, new object[]
     {
         this.Parameters.RequestId,
         this.Parameters.UserObjectGuid,
         this.Parameters.TenantGuid,
         CommonUtil.ToEventLogString(e)
     });
     SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs argResults = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, status);
     this.InvokeHandlerCallbackAndDisposeHelpers(argResults);
 }
Пример #4
0
 // Token: 0x06001C80 RID: 7296 RVA: 0x00071E90 File Offset: 0x00070090
 private void SetAsyncArgsAndSignalEvent(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int, string>((long)this.GetHashCode(), "SpeechRecoContext HttpStatus code:{0} ,  ResponseText:{1}", args.HttpStatus.StatusCode, args.ResponseText);
     lock (this.thisLock)
     {
         if (this.Event != null)
         {
             this.Status  = args.HttpStatus;
             this.Results = args.ResponseText;
             this.Event.Set();
         }
     }
 }
Пример #5
0
        // Token: 0x06001C72 RID: 7282 RVA: 0x00071B3C File Offset: 0x0006FD3C
        private void OnRecognizeCompleted(IAsyncResult asyncResult)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <Guid, string>((long)this.GetHashCode(), "LocalSpeechRecognition.OnRecognizeCompleted - RequestId='{0}' RequestType='{1}'", base.Parameters.RequestId, base.Parameters.RequestType.ToString());
            base.CollectAndLogStatisticsInformation(MobileSpeechRecoRequestStepLogId.RecognizeCompleted, -1);
            MobileSpeechRecoRpcClient mobileSpeechRecoRpcClient = (MobileSpeechRecoRpcClient)asyncResult.AsyncState;

            SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate speechProcessorAsyncCompletedDelegate = mobileSpeechRecoRpcClient.State as SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate;
            try
            {
                MobileRecoRPCAsyncCompletedArgs             mobileRecoRPCAsyncCompletedArgs = mobileSpeechRecoRpcClient.EndRecognize(asyncResult);
                SpeechRecognitionProcessor.SpeechHttpStatus httpStatus            = LocalSpeechRecognition.MapRpcErrorCodeToHttpErrorCode(mobileRecoRPCAsyncCompletedArgs.ErrorCode);
                SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(mobileRecoRPCAsyncCompletedArgs.Result, httpStatus);
                speechProcessorAsyncCompletedDelegate(args);
            }
            catch (Exception e)
            {
                this.HandleUnexpectedException(e, speechProcessorAsyncCompletedDelegate);
            }
        }
Пример #6
0
 // Token: 0x06001D63 RID: 7523 RVA: 0x000755B4 File Offset: 0x000737B4
 private void CompleteRequest(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args)
 {
     if (this.IsSpeechRequestNotCompleted())
     {
         ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int, string>((long)this.GetHashCode(), "SpeechRecognitionProcessor - Status code='{0}', Status message='{1}'", args.HttpStatus.StatusCode, args.HttpStatus.StatusDescription);
         this.CollectAndLogStatisticsInformation(SpeechLoggerProcessType.RequestCompleted, -1);
         this.asyncResult.StatusCode                  = args.HttpStatus.StatusCode;
         this.asyncResult.StatusDescription           = args.HttpStatus.StatusDescription;
         this.asyncResult.ResponseText                = args.ResponseText;
         this.asyncResult.ThrottlingDelay             = -1.0;
         this.asyncResult.ThrottlingNotEnforcedReason = string.Empty;
         if (this.budget != null)
         {
             try
             {
                 this.budget.EndLocal();
                 DelayEnforcementResults delayEnforcementResults = ResourceLoadDelayInfo.EnforceDelay(this.budget, new WorkloadSettings(WorkloadType.OwaVoice, false), null, TimeSpan.MaxValue, null);
                 if (delayEnforcementResults != null && delayEnforcementResults.DelayInfo != null)
                 {
                     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "SpeechRecognitionProcessor - Request id={0}, Delayed amount={1}s, Capped delay={2}s, Delay Required={3}, NotEnforcedReason={4}", new object[]
                     {
                         this.RequestId,
                         delayEnforcementResults.DelayedAmount.TotalSeconds,
                         delayEnforcementResults.DelayInfo.Delay.TotalSeconds,
                         delayEnforcementResults.DelayInfo.Required,
                         delayEnforcementResults.NotEnforcedReason
                     });
                     this.asyncResult.ThrottlingDelay             = delayEnforcementResults.DelayedAmount.TotalSeconds;
                     this.asyncResult.ThrottlingNotEnforcedReason = delayEnforcementResults.NotEnforcedReason;
                 }
                 this.budget.EndConnection();
             }
             finally
             {
                 this.budget.Dispose();
                 this.budget = null;
             }
         }
         this.asyncResult.IsCompleted = true;
         return;
     }
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "SpeechRecognitionProcessor.CompleteRequest: speech request already completed, ignoring this request.");
 }
Пример #7
0
 // Token: 0x06001D62 RID: 7522 RVA: 0x0007551C File Offset: 0x0007371C
 private void HandleRecoResults(object state)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "Entering SpeechRecognitionProcessor.HandleRecoResults");
     try
     {
         SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs speechProcessorAsyncCompletedArgs = (SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs)state;
         string responseText;
         SpeechRecognitionProcessor.SpeechHttpStatus httpStatus;
         SpeechRecognitionResultHandler.HandleRecoResult(speechProcessorAsyncCompletedArgs.ResponseText, this.parameters, this.HttpContext, this.userContext, out responseText, out httpStatus);
         SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(responseText, httpStatus);
         this.CompleteRequest(args);
     }
     catch (ArgumentException e)
     {
         this.HandleException(e, SpeechRecognitionProcessor.SpeechHttpStatus.BadRequest);
     }
     catch (Exception e2)
     {
         this.HandleUnexpectedException(e2);
     }
 }
Пример #8
0
 private void OnRecognizeCompleted(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <MobileSpeechRecoRequestType>((long)this.GetHashCode(), "Entering SpeechRecognition.OnRecognizeCompleted request type:{0}", this.RequestType);
     try
     {
         if (args.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.Success)
         {
             this.GetHighestConfidenceValueAndResultTypeFromResult(args.ResponseText);
             ThreadPool.QueueUserWorkItem(new WaitCallback(this.HandleResults), args);
         }
         else
         {
             ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int, string>((long)this.GetHashCode(), "SpeechRecognition.OnRecognizeCompleted not successful,  HttpStatus Code:{0} HttpStatus Description:{1}", args.HttpStatus.StatusCode, args.HttpStatus.StatusDescription);
             SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs state = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, args.HttpStatus);
             ThreadPool.QueueUserWorkItem(new WaitCallback(this.HandleResults), state);
         }
     }
     catch (Exception e)
     {
         this.HandleUnexpectedException(e);
     }
 }
Пример #9
0
 private void OnAddRecoRequestCompleted(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <MobileSpeechRecoRequestType>((long)this.GetHashCode(), "Entering SpeechRecognition.OnAddRecoRequestCompleted request type:{0}", this.RequestType);
     try
     {
         if (args.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.Success)
         {
             lock (this.thisLock)
             {
                 this.addRecoRequestReady = true;
                 this.ConsumeAudioIfReady();
                 goto IL_A3;
             }
         }
         ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int, string>((long)this.GetHashCode(), "SpeechRecognition.OnAddRecoRequestCompleted not successful,  HttpStatus Code:{0} HttpStatus Description:{1}", args.HttpStatus.StatusCode, args.HttpStatus.StatusDescription);
         SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs state = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, args.HttpStatus);
         ThreadPool.QueueUserWorkItem(new WaitCallback(this.HandleResults), state);
         IL_A3 :;
     }
     catch (Exception e)
     {
         this.HandleUnexpectedException(e);
     }
 }
Пример #10
0
 // Token: 0x06001CDD RID: 7389 RVA: 0x00073AF8 File Offset: 0x00071CF8
 private void InvokeHandlerCallbackAndDisposeHelpers(SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs argResults)
 {
     this.resultHandlerCallback(argResults);
     this.DisposeRecognitionHelpers();
 }
Пример #11
0
 // Token: 0x06001CDC RID: 7388 RVA: 0x00073974 File Offset: 0x00071B74
 private void ProcessWaitingPriorityProcessor(SpeechRecognition helper)
 {
     lock (this.thisLock)
     {
         if (!this.resultsProcessed)
         {
             this.resultsProcessed = true;
             if (helper.Results.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.NoSpeechDetected)
             {
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "ProcessWaitingPriorityProcessor: No Speech Detected from Speech Recognition:'{0}'", helper.RequestType.ToString());
                 this.InvokeHandlerCallbackAndDisposeHelpers(helper.Results);
             }
             else
             {
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "ProcessWaitingPriorityProcessor: Final Results hasnt been processed yet. Initiate processing...");
                 SpeechRecognition speechRecognition = helper;
                 foreach (KeyValuePair <MobileSpeechRecoRequestType, SpeechRecognition> keyValuePair in this.RecognitionHelpers)
                 {
                     if (!keyValuePair.Value.ResultsReceived)
                     {
                         ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <MobileSpeechRecoRequestType>((long)this.GetHashCode(), "ProcessWaitingPriorityProcessor Recognition: {0} result is not available yet and will be waited on", keyValuePair.Key);
                         this.resultsProcessed = false;
                         break;
                     }
                     if (speechRecognition.HighestConfidenceResult < keyValuePair.Value.HighestConfidenceResult)
                     {
                         speechRecognition = keyValuePair.Value;
                     }
                 }
                 if (this.resultsProcessed)
                 {
                     SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs formattedResultsForHighestConfidenceProcessor = this.GetFormattedResultsForHighestConfidenceProcessor(speechRecognition);
                     this.InvokeHandlerCallbackAndDisposeHelpers(formattedResultsForHighestConfidenceProcessor);
                 }
             }
         }
         else
         {
             ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "ProcessWaitingPriorityProcessor: Final Results already processed. Skip processing stage.");
         }
     }
 }