예제 #1
0
        // Token: 0x06001D38 RID: 7480 RVA: 0x00074628 File Offset: 0x00072828
        internal static SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs GetCombinedPeopleSearchResult(SpeechRecognition galRecoHelper, SpeechRecognition personalContactsRecoHelper, MobileSpeechRecoResultType highestRecoResultType)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>(0L, "Gal Result response text:'{0}'", galRecoHelper.Results.ResponseText);
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>(0L, "Personal Contacts Result response text:'{0}'", personalContactsRecoHelper.Results.ResponseText);
            string galResults = string.Empty;
            string personalContactsResults = string.Empty;

            if (galRecoHelper.Results.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.Success && galRecoHelper.ResultType == highestRecoResultType)
            {
                galResults = galRecoHelper.Results.ResponseText;
            }
            if (personalContactsRecoHelper.Results.HttpStatus == SpeechRecognitionProcessor.SpeechHttpStatus.Success && personalContactsRecoHelper.ResultType == highestRecoResultType)
            {
                personalContactsResults = personalContactsRecoHelper.Results.ResponseText;
            }
            string text = SpeechRecognitionUtils.CombineGALandPersonalContactXMLResults(galResults, personalContactsResults, highestRecoResultType);

            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>(0L, "Response Text to send to ResultHandler:'{0}'", text);
            SpeechRecognitionProcessor.SpeechHttpStatus httpStatus = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
            if (galRecoHelper.Results.HttpStatus != SpeechRecognitionProcessor.SpeechHttpStatus.Success && personalContactsRecoHelper.Results.HttpStatus != SpeechRecognitionProcessor.SpeechHttpStatus.Success)
            {
                httpStatus = personalContactsRecoHelper.Results.HttpStatus;
                text       = string.Empty;
            }
            return(new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(text, httpStatus));
        }
예제 #2
0
 // Token: 0x06001D68 RID: 7528 RVA: 0x000758D8 File Offset: 0x00073AD8
 private void HandleException(Exception e, SpeechRecognitionProcessor.SpeechHttpStatus status)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceError <Exception, int, string>((long)this.GetHashCode(), "SpeechRecognitionProcessor - Exception='{0}', Status Code='{1}', Status Description='{2}'", e, status.StatusCode, status.StatusDescription);
     UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_SpeechRecoRequestFailed, null, new object[]
     {
         this.RequestId,
         this.UserObjectGuid,
         this.TenantGuid,
         CommonUtil.ToEventLogString(e)
     });
     this.CompleteRequest(new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, status));
 }
예제 #3
0
 // Token: 0x06001CE0 RID: 7392 RVA: 0x00073BF0 File Offset: 0x00071DF0
 private void HandleException(Exception e, SpeechRecognitionProcessor.SpeechHttpStatus status)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceError <Exception, int, string>((long)this.GetHashCode(), "SpeechRecognitionScenarioBase - Exception='{0}', Status Code='{1}', Status Description='{2}'", e, status.StatusCode, status.StatusDescription);
     UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_SpeechRecoRequestFailed, null, new object[]
     {
         this.Parameters.RequestId,
         this.Parameters.UserObjectGuid,
         this.Parameters.TenantGuid,
         CommonUtil.ToEventLogString(e)
     });
     SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs argResults = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, status);
     this.InvokeHandlerCallbackAndDisposeHelpers(argResults);
 }
예제 #4
0
 // Token: 0x06001C74 RID: 7284 RVA: 0x00071C18 File Offset: 0x0006FE18
 private void HandleException(Exception e, int errorCode, SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate callback)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceError <Exception, int>((long)this.GetHashCode(), "LocalSpeechRecognition - Exception='{0}', Error Code='{1}'", e, errorCode);
     UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_SpeechRecoRequestFailed, null, new object[]
     {
         base.RequestId,
         base.Parameters.UserObjectGuid,
         base.Parameters.TenantGuid,
         CommonUtil.ToEventLogString(e)
     });
     SpeechRecognitionProcessor.SpeechHttpStatus httpStatus = LocalSpeechRecognition.MapRpcErrorCodeToHttpErrorCode(errorCode);
     callback(new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(string.Empty, httpStatus));
 }
예제 #5
0
        // Token: 0x06001C72 RID: 7282 RVA: 0x00071B3C File Offset: 0x0006FD3C
        private void OnRecognizeCompleted(IAsyncResult asyncResult)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <Guid, string>((long)this.GetHashCode(), "LocalSpeechRecognition.OnRecognizeCompleted - RequestId='{0}' RequestType='{1}'", base.Parameters.RequestId, base.Parameters.RequestType.ToString());
            base.CollectAndLogStatisticsInformation(MobileSpeechRecoRequestStepLogId.RecognizeCompleted, -1);
            MobileSpeechRecoRpcClient mobileSpeechRecoRpcClient = (MobileSpeechRecoRpcClient)asyncResult.AsyncState;

            SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate speechProcessorAsyncCompletedDelegate = mobileSpeechRecoRpcClient.State as SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedDelegate;
            try
            {
                MobileRecoRPCAsyncCompletedArgs             mobileRecoRPCAsyncCompletedArgs = mobileSpeechRecoRpcClient.EndRecognize(asyncResult);
                SpeechRecognitionProcessor.SpeechHttpStatus httpStatus            = LocalSpeechRecognition.MapRpcErrorCodeToHttpErrorCode(mobileRecoRPCAsyncCompletedArgs.ErrorCode);
                SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs args = new SpeechRecognitionProcessor.SpeechProcessorAsyncCompletedArgs(mobileRecoRPCAsyncCompletedArgs.Result, httpStatus);
                speechProcessorAsyncCompletedDelegate(args);
            }
            catch (Exception e)
            {
                this.HandleUnexpectedException(e, speechProcessorAsyncCompletedDelegate);
            }
        }
예제 #6
0
 public void ProcessAndFormatSpeechRecognitionResults(string result, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "Entering DateTimeandDurationRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults");
     jsonResponse = null;
     httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
     using (XmlReader xmlReader = XmlReader.Create(new StringReader(result)))
     {
         int num  = 0;
         int num2 = 0;
         int num3 = 0;
         int num4 = 0;
         CalendarSpeechRecoResultType calendarSpeechRecoResultType = CalendarSpeechRecoResultType.None;
         int num5 = 0;
         int num6 = 0;
         while (xmlReader.Read())
         {
             if (xmlReader.IsStartElement("Day"))
             {
                 string text = xmlReader.ReadString();
                 if (!int.TryParse(text, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num) || num < 1 || num > 31)
                 {
                     throw new ArgumentException("Invalid day value " + text);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Day value read: {0}", num);
             }
             if (xmlReader.IsStartElement("Month"))
             {
                 string text2 = xmlReader.ReadString();
                 if (!int.TryParse(text2, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num2) || num2 < 1 || num2 > 12)
                 {
                     throw new ArgumentException("Invalid month value " + text2);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Month value read: {0}", num2);
             }
             if (xmlReader.IsStartElement("Year"))
             {
                 string text3 = xmlReader.ReadString();
                 if (!int.TryParse(text3, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num3) || num3 < 2000 || num3 > 2099)
                 {
                     throw new ArgumentException("Invalid year value " + text3);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Year value read: {0}", num3);
             }
             if (xmlReader.IsStartElement("DurationInMinutes"))
             {
                 string text4 = xmlReader.ReadString();
                 if (!int.TryParse(text4, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num4) || num4 < 0)
                 {
                     throw new ArgumentException("Invalid duration value " + text4);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Duration value read: {0}", num4);
             }
             if (xmlReader.IsStartElement("StartHour"))
             {
                 string text5 = xmlReader.ReadString();
                 if (!int.TryParse(text5, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num5) || num5 < 0 || num5 > 24)
                 {
                     throw new ArgumentException("Invalid hour value " + text5);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Start Hour value read: {0}", num5);
             }
             if (xmlReader.IsStartElement("StartMinute"))
             {
                 string text6 = xmlReader.ReadString();
                 if (!int.TryParse(text6, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num6) || num6 < 0 || num6 > 60)
                 {
                     throw new ArgumentException("Invalid hour value " + text6);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid Start Minute value read: {0}", num6);
             }
             if (xmlReader.IsStartElement("RecoEvent"))
             {
                 string text7 = xmlReader.ReadString();
                 if (string.Equals(text7, "recoCompleteDateWithStartTime", StringComparison.OrdinalIgnoreCase))
                 {
                     calendarSpeechRecoResultType = CalendarSpeechRecoResultType.CompleteDateWithStartTime;
                 }
                 else if (string.Equals(text7, "recoCompleteDate", StringComparison.OrdinalIgnoreCase))
                 {
                     calendarSpeechRecoResultType = CalendarSpeechRecoResultType.CompleteDate;
                 }
                 else
                 {
                     if (!string.Equals(text7, "recoCompleteDateWithStartTimeAndDuration", StringComparison.OrdinalIgnoreCase))
                     {
                         throw new ArgumentException("Invalid RecoResultType: " + text7);
                     }
                     calendarSpeechRecoResultType = CalendarSpeechRecoResultType.CompleteDateWithStartTimeAndDuration;
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <CalendarSpeechRecoResultType>((long)this.GetHashCode(), "Valid RecoEvent value read: {0}", calendarSpeechRecoResultType);
             }
         }
         DayTimeDurationRecoResult[] obj;
         if (calendarSpeechRecoResultType == CalendarSpeechRecoResultType.None)
         {
             obj = new DayTimeDurationRecoResult[0];
         }
         else
         {
             if (num == 0 || num2 == 0 || num3 == 0 || num4 == 0)
             {
                 throw new ArgumentException("No valid results from speech recognition");
             }
             DayTimeDurationRecoResult dayTimeDurationRecoResult = new DayTimeDurationRecoResult();
             dayTimeDurationRecoResult.ResultType  = calendarSpeechRecoResultType;
             dayTimeDurationRecoResult.Date        = new ExDateTime(this.timeZone, num3, num2, num, num5, num6, 0).ToString("s");
             dayTimeDurationRecoResult.AllDayEvent = (num5 == 0 && num6 == 0 && num4 >= 1440 && num4 % 1440 == 0);
             if (dayTimeDurationRecoResult.AllDayEvent)
             {
                 dayTimeDurationRecoResult.Duration = num4 - 1;
             }
             else
             {
                 dayTimeDurationRecoResult.Duration = num4;
             }
             obj = new DayTimeDurationRecoResult[]
             {
                 dayTimeDurationRecoResult
             };
         }
         jsonResponse = DayTimeDurationRecoResult.JsonSerialize(obj);
     }
 }
예제 #7
0
        public void ProcessAndFormatSpeechRecognitionResults(string result, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Entering CombinedScenariosRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults with results '{0}'", result);
            jsonResponse = null;
            httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
            MobileSpeechRecoResultType            mobileSpeechRecoResultType           = SpeechRecognitionUtils.ParseMobileScenarioXML(result);
            IMobileSpeechRecognitionResultHandler mobileSpeechRecognitionResultHandler = null;

            switch (mobileSpeechRecoResultType)
            {
            case MobileSpeechRecoResultType.DaySearch:
                mobileSpeechRecognitionResultHandler = new DaySearchRecognitionResultHandler(this.parameters.TimeZone);
                break;

            case MobileSpeechRecoResultType.AppointmentCreation:
                mobileSpeechRecognitionResultHandler = new DateTimeandDurationRecognitionResultHandler(this.parameters.TimeZone);
                break;

            case MobileSpeechRecoResultType.FindPeople:
                mobileSpeechRecognitionResultHandler = new FindPeopleSpeechRecognitionResultHandler(this.parameters, this.userContext, this.httpContext);
                break;

            case MobileSpeechRecoResultType.EmailPeople:
                mobileSpeechRecognitionResultHandler = new EmailPeopleSpeechRecognitionResultHandler(this.parameters, this.userContext, this.httpContext);
                break;

            case MobileSpeechRecoResultType.None:
                mobileSpeechRecognitionResultHandler = null;
                break;

            default:
                ExAssert.RetailAssert(false, "Invalid result type '{0}'", new object[]
                {
                    mobileSpeechRecoResultType.ToString()
                });
                break;
            }
            string text = string.Empty;

            CombinedScenarioRecoResult[] obj;
            if (mobileSpeechRecoResultType == MobileSpeechRecoResultType.None)
            {
                obj = new CombinedScenarioRecoResult[0];
            }
            else
            {
                mobileSpeechRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults(result, out jsonResponse, out httpStatus);
                if (httpStatus != SpeechRecognitionProcessor.SpeechHttpStatus.Success)
                {
                    return;
                }
                CombinedScenarioRecoResult combinedScenarioRecoResult = new CombinedScenarioRecoResult();
                combinedScenarioRecoResult.RequestId = this.parameters.RequestId.ToString("N", CultureInfo.InvariantCulture);
                text = this.GetResultTextForLogging(result);
                combinedScenarioRecoResult.Text         = text;
                combinedScenarioRecoResult.JsonResponse = jsonResponse;
                combinedScenarioRecoResult.ResultType   = CombinedScenarioRecoResult.MapSpeechRecoResultTypeToCombinedRecoResultType(mobileSpeechRecoResultType);
                obj = new CombinedScenarioRecoResult[]
                {
                    combinedScenarioRecoResult
                };
            }
            jsonResponse = CombinedScenarioRecoResult.JsonSerialize(obj);
            this.CollectAndLogStatisticsInformation(mobileSpeechRecoResultType, text);
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Return json from CombinedScenarioResult: '{0}'", jsonResponse);
        }
        public override void ProcessAndFormatSpeechRecognitionResults(string result, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Entering EmailPeopleSearchRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults with results '{0}'", result);
            jsonResponse = null;
            httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
            List <Persona> uniquePersonaList = base.GetUniquePersonaList(result);
            List <Persona> list = new List <Persona>();

            foreach (Persona persona in uniquePersonaList)
            {
                if (persona.EmailAddresses != null && persona.EmailAddresses.Length > 0)
                {
                    list.Add(persona);
                }
            }
            if (list.Count == 0)
            {
                httpStatus = SpeechRecognitionProcessor.SpeechHttpStatus.NoContactWithEmailAddress;
                return;
            }
            jsonResponse = SpeechRecognitionResultHandler.JsonSerialize(list.ToArray());
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Persona array json:{0}", jsonResponse);
        }
        // Token: 0x06001CCB RID: 7371 RVA: 0x00073480 File Offset: 0x00071680
        public static void HandleRecoResult(string result, RequestParameters parameters, HttpContext httpContext, UserContext userContext, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
        {
            jsonResponse = null;
            httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
            if (string.IsNullOrEmpty(result))
            {
                return;
            }
            IMobileSpeechRecognitionResultHandler mobileSpeechRecognitionResultHandler = null;

            switch (parameters.RequestType)
            {
            case MobileSpeechRecoRequestType.FindPeople:
                mobileSpeechRecognitionResultHandler = new FindPeopleSpeechRecognitionResultHandler(parameters, userContext, httpContext);
                goto IL_95;

            case MobileSpeechRecoRequestType.CombinedScenarios:
                mobileSpeechRecognitionResultHandler = new CombinedScenarioRecognitionResultHandler(parameters, userContext, httpContext);
                goto IL_95;

            case MobileSpeechRecoRequestType.DaySearch:
                mobileSpeechRecognitionResultHandler = new DaySearchRecognitionResultHandler(parameters.TimeZone);
                goto IL_95;

            case MobileSpeechRecoRequestType.AppointmentCreation:
                mobileSpeechRecognitionResultHandler = new DateTimeandDurationRecognitionResultHandler(parameters.TimeZone);
                goto IL_95;
            }
            ExAssert.RetailAssert(false, "Invalid request type '{0}'", new object[]
            {
                parameters.RequestType
            });
IL_95:
            mobileSpeechRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults(result, out jsonResponse, out httpStatus);
        }
예제 #10
0
 public void ProcessAndFormatSpeechRecognitionResults(string result, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceDebug((long)this.GetHashCode(), "Entering DaySearchRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults");
     jsonResponse = null;
     httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
     using (XmlReader xmlReader = XmlReader.Create(new StringReader(result)))
     {
         int num  = 0;
         int num2 = 0;
         int num3 = 0;
         CalendarSpeechRecoResultType calendarSpeechRecoResultType = CalendarSpeechRecoResultType.None;
         while (xmlReader.Read())
         {
             if (xmlReader.IsStartElement("Day"))
             {
                 string text = xmlReader.ReadString();
                 if (!int.TryParse(text, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num) || num < 1 || num > 31)
                 {
                     throw new ArgumentException("Invalid day value " + text);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid day value read: {0}", num);
             }
             if (xmlReader.IsStartElement("Month"))
             {
                 string text2 = xmlReader.ReadString();
                 if (!int.TryParse(text2, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num2) || num2 < 1 || num2 > 12)
                 {
                     throw new ArgumentException("Invalid month value " + text2);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid month value read: {0}", num2);
             }
             if (xmlReader.IsStartElement("Year"))
             {
                 string text3 = xmlReader.ReadString();
                 if (!int.TryParse(text3, NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out num3) || num3 < 1999 || num3 > 2100)
                 {
                     throw new ArgumentException("Invalid year value " + text3);
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <int>((long)this.GetHashCode(), "Valid month value read: {0}", num3);
             }
             if (xmlReader.IsStartElement("RecoEvent"))
             {
                 string a = xmlReader.ReadString();
                 if (string.Equals(a, "recoPartialDate", StringComparison.OrdinalIgnoreCase))
                 {
                     calendarSpeechRecoResultType = CalendarSpeechRecoResultType.PartialDate;
                 }
                 else
                 {
                     if (!string.Equals(a, "recoCompleteDate", StringComparison.OrdinalIgnoreCase))
                     {
                         throw new ArgumentException("Invalid RecoResultType");
                     }
                     calendarSpeechRecoResultType = CalendarSpeechRecoResultType.CompleteDate;
                 }
                 ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <CalendarSpeechRecoResultType>((long)this.GetHashCode(), "Valid RecoEvent value read: {0}", calendarSpeechRecoResultType);
             }
         }
         DayTimeDurationRecoResult[] obj;
         if (calendarSpeechRecoResultType == CalendarSpeechRecoResultType.None)
         {
             obj = new DayTimeDurationRecoResult[0];
         }
         else
         {
             if (num == 0 || num2 == 0 || num3 == 0)
             {
                 throw new ArgumentException("No valid results from speech recognition");
             }
             obj = new DayTimeDurationRecoResult[]
             {
                 new DayTimeDurationRecoResult
                 {
                     ResultType  = calendarSpeechRecoResultType,
                     Date        = new ExDateTime(this.timeZone, num3, num2, num).ToString("s"),
                     Duration    = 0,
                     AllDayEvent = false
                 }
             };
         }
         jsonResponse = DayTimeDurationRecoResult.JsonSerialize(obj);
     }
 }
예제 #11
0
 public InvalidRequestSpeechRecognitionScenario(SpeechRecognitionProcessor.SpeechHttpStatus status) : base(null, null)
 {
     ValidateArgument.NotNull(status, "status");
     this.status = status;
 }
        // Token: 0x06001CAB RID: 7339 RVA: 0x00072A34 File Offset: 0x00070C34
        public virtual void ProcessAndFormatSpeechRecognitionResults(string result, out string jsonResponse, out SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
        {
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Entering FindPeopleSearchRecognitionResultHandler.ProcessAndFormatSpeechRecognitionResults with results '{0}'", result);
            jsonResponse = null;
            httpStatus   = SpeechRecognitionProcessor.SpeechHttpStatus.Success;
            List <Persona> uniquePersonaList = this.GetUniquePersonaList(result);

            jsonResponse = SpeechRecognitionResultHandler.JsonSerialize(uniquePersonaList.ToArray());
            ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <string>((long)this.GetHashCode(), "Persona array json:{0}", jsonResponse);
        }
예제 #13
0
        // Token: 0x06001D57 RID: 7511 RVA: 0x00074C30 File Offset: 0x00072E30
        private static SpeechRecognitionScenarioBase CreateSpeechRecognitionScenario(HttpContext httpContext, out IStandardBudget budget)
        {
            ValidateArgument.NotNull(httpContext, "httpContext is null");
            budget = null;
            Exception ex = null;
            SpeechRecognitionScenarioBase result = null;

            try
            {
                Guid   guid = Guid.NewGuid();
                string text;
                MobileSpeechRecoRequestType mobileSpeechRecoRequestType;
                CultureInfo cultureInfo;
                ExTimeZone  exTimeZone;
                SpeechRecognitionProcessor.GetQueryStringParameters(httpContext.Request, out text, out mobileSpeechRecoRequestType, out cultureInfo, out exTimeZone);
                ExTraceGlobals.SpeechRecognitionTracer.TraceDebug(0L, "SpeechRecognitionProcessor.CreateSpeechRecognitionProcessor - requestId='{0}', tag='{1}', requestType='{2}', culture='{3}', timeZone='{4}'", new object[]
                {
                    guid,
                    text,
                    mobileSpeechRecoRequestType,
                    cultureInfo,
                    exTimeZone
                });
                Guid           guid2;
                Guid           guid3;
                OrganizationId organizationId;
                UserContext    userContext;
                SpeechRecognitionProcessor.GetUserIdentity(httpContext, out guid2, out guid3, out organizationId, out userContext);
                ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <Guid, Guid, OrganizationId>(0L, "SpeechRecognitionProcessor.CreateSpeechRecognitionProcessor - userObjectGuid='{0}', tenantGuid='{1}', orgId='{2}'", guid2, guid3, organizationId);
                RequestParameters requestParameters = new RequestParameters(guid, text, mobileSpeechRecoRequestType, cultureInfo, exTimeZone, guid2, guid3, organizationId);
                switch (mobileSpeechRecoRequestType)
                {
                case MobileSpeechRecoRequestType.FindInGAL:
                case MobileSpeechRecoRequestType.FindInPersonalContacts:
                case MobileSpeechRecoRequestType.StaticGrammarsCombined:
                    throw new ArgumentOutOfRangeException("operation", mobileSpeechRecoRequestType, "Invalid parameter");

                case MobileSpeechRecoRequestType.FindPeople:
                    result = new FindPeopleSpeechRecognitionScenario(requestParameters, userContext);
                    break;

                case MobileSpeechRecoRequestType.CombinedScenarios:
                    result = new CombinedSpeechRecognitionScenario(requestParameters, userContext);
                    break;

                case MobileSpeechRecoRequestType.DaySearch:
                case MobileSpeechRecoRequestType.AppointmentCreation:
                    result = new SingleSpeechRecognitionScenario(requestParameters, userContext);
                    break;

                default:
                    ExAssert.RetailAssert(false, "Invalid request type '{0}'", new object[]
                    {
                        mobileSpeechRecoRequestType
                    });
                    break;
                }
                UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_SpeechRecoRequestParams, null, new object[]
                {
                    guid,
                    text,
                    mobileSpeechRecoRequestType,
                    cultureInfo,
                    exTimeZone,
                    guid2,
                    guid3,
                    organizationId
                });
                string      text2   = null;
                HttpRequest request = httpContext.Request;
                if (request.QueryString != null)
                {
                    text2 = request.QueryString.ToString();
                }
                if (request.Headers != null && !string.IsNullOrEmpty(request.Headers["X-OWA-CorrelationId"]))
                {
                    text2 = text2 + "." + request.Headers["X-OWA-CorrelationId"];
                }
                SpeechRecognitionProcessor.InitializeThrottlingBudget(userContext, text2, out budget);
            }
            catch (OverBudgetException ex2)
            {
                ex = ex2;
            }
            catch (ArgumentOutOfRangeException ex3)
            {
                ex = ex3;
            }
            catch (Exception ex4)
            {
                ex = ex4;
                ExWatson.SendReport(ex4, ReportOptions.None, null);
            }
            finally
            {
                if (ex != null)
                {
                    ExTraceGlobals.SpeechRecognitionTracer.TraceDebug <Exception>(0L, "SpeechRecognitionProcessor.CreateSpeechRecognitionProcessor - Exception='{0}'", ex);
                    UmGlobals.ExEvent.LogEvent(UMEventLogConstants.Tuple_InvalidSpeechRecoRequest, null, new object[]
                    {
                        CommonUtil.ToEventLogString(ex)
                    });
                    SpeechRecognitionProcessor.SpeechHttpStatus status = SpeechRecognitionProcessor.MapInvalidRequestToHttpStatus(ex);
                    result = new InvalidRequestSpeechRecognitionScenario(status);
                }
            }
            return(result);
        }
예제 #14
0
 // Token: 0x06001D73 RID: 7539 RVA: 0x00075A2B File Offset: 0x00073C2B
 public SpeechProcessorAsyncCompletedArgs(string responseText, SpeechRecognitionProcessor.SpeechHttpStatus httpStatus)
 {
     this.ResponseText = responseText;
     this.HttpStatus   = httpStatus;
 }
예제 #15
0
 // Token: 0x06001D5F RID: 7519 RVA: 0x00075404 File Offset: 0x00073604
 private void SignalRecognizeWithEmptyAudioAndBailOut(SpeechRecognitionProcessor.SpeechStreamBuffer speechStreamBuffer, Exception e, SpeechRecognitionProcessor.SpeechHttpStatus status, bool expectedException)
 {
     ExTraceGlobals.SpeechRecognitionTracer.TraceError <string, int, string>((long)this.GetHashCode(), "SignalRecognizeWithEmptyAudioAndBailOut - exception='{0}' Status code='{1}', Status message='{2}'", e.Message, status.StatusCode, status.StatusDescription);
     if (this.audioMemoryStream != null)
     {
         this.audioMemoryStream.Close();
     }
     this.audioMemoryStream = new MemoryStream();
     this.SignalRecognizeAsync();
     if (speechStreamBuffer != null)
     {
         speechStreamBuffer.Dispose();
     }
     if (expectedException)
     {
         this.HandleException(e, status);
         return;
     }
     this.HandleUnexpectedException(e);
 }