Пример #1
0
        /// <summary>
        /// Updates or creates a resource based on the resource identifier. The PUT operation is used to update or create a resource by identifier.  If the resource doesn't exist, the resource will be created using that identifier.  Additionally, natural key values cannot be changed using this operation, and will not be modified in the database.  If the resource &quot;id&quot; is provided in the JSON body, it will be ignored as well.
        /// </summary>
        /// <param name="id">A resource identifier specifying the resource to be updated.</param>
        /// <param name="IfMatch">The ETag header value used to prevent the PUT from updating a resource modified by another consumer.</param>
        /// <param name="body">The JSON representation of the &quot;recognitionType&quot; resource to be updated.</param>
        /// <returns>A RestSharp <see cref="IRestResponse"/> instance containing the API response details.</returns>
        public IRestResponse PutRecognitionType(string id, string IfMatch, RecognitionType body)
        {
            var request = new RestRequest("/recognitionTypes/{id}", Method.PUT);

            request.RequestFormat = DataFormat.Json;

            request.AddUrlSegment("id", id);
            // verify required params are set
            if (id == null || body == null)
            {
                throw new ArgumentException("API method call is missing required parameters");
            }
            request.AddHeader("If-Match", IfMatch);
            request.AddBody(body);
            request.Parameters.First(param => param.Type == ParameterType.RequestBody).Name = "application/json";
            var response = client.Execute(request);

            var location = response.Headers.FirstOrDefault(x => x.Name == "Location");

            if (location != null && !string.IsNullOrWhiteSpace(location.Value.ToString()))
            {
                body.id = location.Value.ToString().Split('/').Last();
            }
            return(response);
        }
Пример #2
0
        public ActionResult DeleteConfirmed(int id)
        {
            RecognitionType recognitiontype = db.RecognitionTypes.Single(r => r.RecognitionTypeId == id);

            db.RecognitionTypes.DeleteObject(recognitiontype);
            db.SaveChanges();
            return(RedirectToAction("Index"));
        }
Пример #3
0
        //
        // GET: /RecognitionType/Delete/5

        public ActionResult Delete(int id = 0)
        {
            RecognitionType recognitiontype = db.RecognitionTypes.Single(r => r.RecognitionTypeId == id);

            if (recognitiontype == null)
            {
                return(HttpNotFound());
            }
            return(View(recognitiontype));
        }
Пример #4
0
 public ActionResult Edit(RecognitionType recognitiontype)
 {
     if (ModelState.IsValid)
     {
         db.RecognitionTypes.Attach(recognitiontype);
         db.ObjectStateManager.ChangeObjectState(recognitiontype, EntityState.Modified);
         db.SaveChanges();
         return(RedirectToAction("Index"));
     }
     return(View(recognitiontype));
 }
Пример #5
0
        public ActionResult Create(RecognitionType recognitiontype)
        {
            if (ModelState.IsValid)
            {
                db.RecognitionTypes.AddObject(recognitiontype);
                db.SaveChanges();
                return(RedirectToAction("Index"));
            }

            return(View(recognitiontype));
        }
Пример #6
0
        public static RecognitionType Recognize(string businessCardText)
        {
            RecognitionType type = RecognitionType.Other;

            foreach (KeyValuePair <RecognitionType, string> expression in expressions)
            {
                if (Regex.IsMatch(businessCardText, expression.Value))
                {
                    type = expression.Key;
                    break;
                }
            }
            return(type);
        }
Пример #7
0
        public static RecognitionType Recognize(string businessCardText)
        {
            RecognitionType type = RecognitionType.Other;

            //iterate through each type to try and find a match.
            // once a match is found stop and return the type
            foreach (KeyValuePair <RecognitionType, string> expression in expressions)
            {
                if (Regex.IsMatch(businessCardText, expression.Value))
                {
                    type = expression.Key;
                    break;
                }
            }

            return(type);
        }
        IObservable <string> Listen(RecognitionType type, SilenceDetection?silence = null) => Observable.Create <string>(ob =>
        {
            var handler = new EventHandler <RecognitionResultEventArgs>((sender, args) =>
            {
                //args.Result == ResultEvent.ResultEvent.FinalResult
                //args.Result == ResultEvent.PartialResult
            });

            var client = new SttClient();
            client.RecognitionResult += handler;
            client.Start("en-US", type);
            if (silence != null)
            {
                client.SetSilenceDetection(silence.Value);
            }

            return(() =>
            {
                client.Stop();
                client.RecognitionResult -= handler;
            });
        });
Пример #9
0
        /// <summary>
        /// Creates or updates resources based on the natural key values of the supplied resource. The POST operation can be used to create or update resources. In database terms, this is often referred to as an &quot;upsert&quot; operation (insert + update).  Clients should NOT include the resource &quot;id&quot; in the JSON body because it will result in an error (you must use a PUT operation to update a resource by &quot;id&quot;). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately.
        /// </summary>
        /// <param name="body">The JSON representation of the &quot;recognitionType&quot; resource to be created or updated.</param>
        /// <returns>A RestSharp <see cref="IRestResponse"/> instance containing the API response details.</returns>
        public IRestResponse PostRecognitionTypes(RecognitionType body)
        {
            var request = new RestRequest("/recognitionTypes", Method.POST);

            request.RequestFormat = DataFormat.Json;

            // verify required params are set
            if (body == null)
            {
                throw new ArgumentException("API method call is missing required parameters");
            }
            request.AddBody(body);
            var response = client.Execute(request);

            var location = response.Headers.FirstOrDefault(x => x.Name == "Location");

            if (location != null && !string.IsNullOrWhiteSpace(location.Value.ToString()))
            {
                body.id = location.Value.ToString().Split('/').Last();
            }
            return(response);
        }
Пример #10
0
 public Task <CognitiveResult> RecognizeAsync(string language, byte[] buffer, RecognitionType recognitionType = RecognitionType.Vision | RecognitionType.Emotion, Func <RecognitionPhase, Task> onProgress = null)
 => RecognizeAsync(new MemoryStream(buffer), language, recognitionType, onProgress);
Пример #11
0
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList <Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     // navigate to results page if type of recognition is SUCCESSFUL
     if (recognitionType == RecognitionType.SUCCESSFUL)
     {
         // Find barcode results in list of results.
         bool resultFound = false;
         foreach (var result in resultList)
         {
             if (result.Valid && !result.Empty)
             {
                 // check if result is a PDF417 result
                 if (result is Microblink.PDF417RecognitionResult)
                 {
                     // obtain the PDF417 result
                     Microblink.PDF417RecognitionResult pdf417Result = (Microblink.PDF417RecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.dataType      = "PDF417";
                     ResultsPage.uncertain     = pdf417Result.Uncertain ? "yes" : "no";
                     ResultsPage.raw           = pdf417Result.RawData;
                     ResultsPage.rawExt        = null;
                     ResultsPage.stringData    = pdf417Result.StringData;
                     ResultsPage.stringDataExt = null;
                     // mark as found
                     resultFound = true;
                     break;
                 }
                 // check if result is a ZXing result
                 else if (result is Microblink.ZXingRecognitionResult)
                 {
                     // obtain the ZXing result
                     Microblink.ZXingRecognitionResult zxingResult = (Microblink.ZXingRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.dataType      = zxingResult.BarcodeTypeString;
                     ResultsPage.uncertain     = null;
                     ResultsPage.raw           = zxingResult.RawData;
                     ResultsPage.rawExt        = zxingResult.ExtendedRawData;
                     ResultsPage.stringData    = zxingResult.StringData;
                     ResultsPage.stringDataExt = zxingResult.ExtendedStringData;
                     // mark as found
                     resultFound = true;
                     break;
                 }
             }
         }
         // navigate to results page if some result was found
         if (resultFound)
         {
             NavigationService.Navigate(new Uri("/ResultsPage.xaml", UriKind.Relative));
         }
     }
 }
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList <Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     if (recognitionType == RecognitionType.SUCCESSFUL)
     {
         if (OnComplete != null)
         {
             OnComplete(resultList);
             if (Option_Beep)
             {
                 mBeepSound.Play();
             }
         }
         if (!Option_NoDialog)
         {
             // Find barcode results in list of results.
             bool resultFound = false;
             IReadOnlyDictionary <string, object> elements = null;
             foreach (var result in resultList)
             {
                 if (result.Valid && !result.Empty)
                 {
                     // check if result is a PDF417 result
                     if (result is Microblink.PDF417RecognitionResult)
                     {
                         // obtain the PDF417 result
                         Microblink.PDF417RecognitionResult pdf417Result = (Microblink.PDF417RecognitionResult)result;
                         elements = pdf417Result.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                     // check if result is a ZXing result
                     else if (result is Microblink.ZXingRecognitionResult)
                     {
                         // obtain the ZXing result
                         Microblink.ZXingRecognitionResult zxingResult = (Microblink.ZXingRecognitionResult)result;
                         elements = zxingResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                     // check if result is a Bardecoder result
                     else if (result is Microblink.BarDecoderRecognitionResult)
                     {
                         // obtain the Bardecoder result
                         Microblink.BarDecoderRecognitionResult bdecoderResult = (Microblink.BarDecoderRecognitionResult)result;
                         elements = bdecoderResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                     // check if result is a USDL result
                     else if (result is Microblink.USDLRecognitionResult)
                     {
                         // obtain the USDL result
                         Microblink.USDLRecognitionResult usdlResult = (Microblink.USDLRecognitionResult)result;
                         elements = usdlResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                 }
             }
             // display dialog if result are found
             if (resultFound && elements != null)
             {
                 StringBuilder msg = new StringBuilder();
                 foreach (string key in elements.Keys)
                 {
                     msg.Append(key);
                     msg.Append(": ");
                     msg.Append(elements[key] != null ? elements[key].ToString().Trim() : "");
                     msg.Append("\n");
                 }
                 MessageBox.Show(msg.ToString());
             }
         }
         // navigate back to caller page
         NavigationService.GoBack();
     }
 }
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList<Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     if (recognitionType == RecognitionType.SUCCESSFUL) {
         if (OnComplete != null) {
             OnComplete(resultList);
             if (Option_Beep) {
                 mBeepSound.Play();
             }
         }
         if (!Option_NoDialog) {
             // Find barcode results in list of results.
             bool resultFound = false;
             IReadOnlyDictionary<string, object> elements = null;
             foreach (var result in resultList) {
                 if (result.Valid && !result.Empty) {
                     // check if result is a PDF417 result
                     if (result is Microblink.PDF417RecognitionResult) {
                         // obtain the PDF417 result
                         Microblink.PDF417RecognitionResult pdf417Result = (Microblink.PDF417RecognitionResult)result;
                         elements = pdf417Result.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                      // check if result is a ZXing result
                     else if (result is Microblink.ZXingRecognitionResult) {
                         // obtain the ZXing result
                         Microblink.ZXingRecognitionResult zxingResult = (Microblink.ZXingRecognitionResult)result;
                         elements = zxingResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                     // check if result is a Bardecoder result
                     else if (result is Microblink.BarDecoderRecognitionResult) {
                         // obtain the Bardecoder result
                         Microblink.BarDecoderRecognitionResult bdecoderResult = (Microblink.BarDecoderRecognitionResult)result;
                         elements = bdecoderResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                     // check if result is a USDL result
                     else if (result is Microblink.USDLRecognitionResult) {
                         // obtain the USDL result
                         Microblink.USDLRecognitionResult usdlResult = (Microblink.USDLRecognitionResult)result;
                         elements = usdlResult.Elements;
                         // mark as found
                         resultFound = true;
                         break;
                     }
                 }
             }
             // display dialog if result are found
             if (resultFound && elements != null) {
                 StringBuilder msg = new StringBuilder();
                 foreach (string key in elements.Keys) {
                     msg.Append(key);
                     msg.Append(": ");
                     msg.Append(elements[key] != null ? elements[key].ToString().Trim() : "");
                     msg.Append("\n");
                 }
                 MessageBox.Show(msg.ToString());
             }
         }
         // navigate back to caller page
         NavigationService.GoBack();
     }
 }
Пример #14
0
        //Matches data from bitmap to contact info
        private void ApplyPatternMatching(OcrResult ocrResult)
        {
            Contact contact = new Contact();

            contact.SourceDisplayPicture = _photoFile;

            this.RepeatForOcrWords(ocrResult, (result, word) =>
            {
                switch (CardRecognizer.Recognize(word.Text))
                {
                case RecognitionType.Other:
                    break;

                case RecognitionType.Email:
                    contact.Emails.Add(new ContactEmail()
                    {
                        Address = word.Text
                    });
                    break;

                case RecognitionType.Name:
                    contact.FirstName = word.Text;
                    break;

                case RecognitionType.Number:
                    _phoneNumber        += word.Text;
                    RecognitionType type = CardRecognizer.Recognize(_phoneNumber);
                    if (type == RecognitionType.PhoneNumber)
                    {
                        contact.Phones.Add(new ContactPhone()
                        {
                            Number = _phoneNumber
                        });
                    }
                    break;

                case RecognitionType.WebPage:
                    try
                    {
                        contact.Websites.Add(new ContactWebsite()
                        {
                            Uri = new Uri(word.Text)
                        });
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("OCR Result cannot be converted to a URI");
                    }
                    break;

                default:
                    break;
                }
            });
            #region //This requires at least a phone or email to be valid
            if (!contact.Phones.Any())
            {
                if (!contact.Emails.Any())
                {
                    Debug.WriteLine("Contact must have phone or email info.");
                    return;
                }
            }
            #endregion
            Rect rect = GetElementRect(GetDetailsButton);
            ContactManager.ShowContactCard(contact, rect, Windows.UI.Popups.Placement.Default);
        }
Пример #15
0
        private void ApplyPatternMatching(OcrResult ocrResult)
        {
            Contact contact = new Contact();

            //set the picture
            contact.SourceDisplayPicture = _photoFile;
            // this method uses an action that will run as a 'callback' for the method
            // more info here https://msdn.microsoft.com/en-us/library/018hxwa8(v=vs.110).aspx
            RepeatForOcrWords(ocrResult, (result, word) =>
            {
                bool isNumber = false;
                //check the recognized type and then add the type to the contact
                switch (CardRecognizer.Recognize(word.Text))
                {
                case RecognitionType.Other:
                    break;

                case RecognitionType.Email:
                    contact.Emails.Add(new ContactEmail()
                    {
                        Address = word.Text
                    });
                    break;

                case RecognitionType.Name:
                    contact.FirstName = word.Text;
                    break;

                case RecognitionType.Number:
                    isNumber = true;
                    //NOTE: Phone numbers are not as easy to validate because OCR results splits the numbers if they contain spaces.
                    _phoneNumber        += word.Text;
                    RecognitionType type = CardRecognizer.Recognize(_phoneNumber);
                    if (type == RecognitionType.PhoneNumber)
                    {
                        contact.Phones.Add(new ContactPhone()
                        {
                            Number = _phoneNumber
                        });
                    }
                    break;

                case RecognitionType.WebPage:
                    try
                    {
                        contact.Websites.Add(new ContactWebsite()
                        {
                            Uri = new Uri(word.Text)
                        });
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("OCR Result cannot be converted to a URI");
                    }
                    break;

                default:
                    break;
                }

                //Encounted a word or a value other than a number.
                //If we havent validated as a phone number at this stage it is clearly not a phone number so clear the string
                if (!isNumber)
                {
                    _phoneNumber = string.Empty;
                }
            });

            if (!contact.Phones.Any()) //contact must have either a phone or email when calling ContactManager.ShowContactCard.
            {
                if (!contact.Emails.Any())
                {
                    Debug.WriteLine("Contact must have phone or email info.");

                    return;
                }
            }

            Rect rect = GetElementRect(GetDetailsButton);

            ContactManager.ShowContactCard(contact, rect, Windows.UI.Popups.Placement.Default);
        }
Пример #16
0
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList<Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     // navigate to results page if type of recognition is SUCCESSFUL
     if (recognitionType == RecognitionType.SUCCESSFUL) {
         // Find barcode results in list of results.
         bool resultFound = false;
         foreach (var result in resultList) {
             if (result.Valid && !result.Empty) {
                 // check if result is a PDF417 result
                 if (result is Microblink.PDF417RecognitionResult) {
                     // obtain the PDF417 result
                     Microblink.PDF417RecognitionResult pdf417Result = (Microblink.PDF417RecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.dataType = "PDF417";
                     ResultsPage.uncertain = pdf417Result.Uncertain ? "yes" : "no";
                     ResultsPage.raw = pdf417Result.RawData;
                     ResultsPage.rawExt = null;
                     ResultsPage.stringData = pdf417Result.StringData;
                     ResultsPage.stringDataExt = null;
                     // mark as found
                     resultFound = true;
                     break;
                 }
                 // check if result is a ZXing result
                 else if (result is Microblink.ZXingRecognitionResult) {
                     // obtain the ZXing result
                     Microblink.ZXingRecognitionResult zxingResult = (Microblink.ZXingRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.dataType = zxingResult.BarcodeTypeString;
                     ResultsPage.uncertain = null;
                     ResultsPage.raw = zxingResult.RawData;
                     ResultsPage.rawExt = zxingResult.ExtendedRawData;
                     ResultsPage.stringData = zxingResult.StringData;
                     ResultsPage.stringDataExt = zxingResult.ExtendedStringData;
                     // mark as found
                     resultFound = true;
                     break;
                 }
             }
         }
         // navigate to results page if some result was found
         if (resultFound) {
             NavigationService.Navigate(new Uri("/ResultsPage.xaml", UriKind.Relative));
         }
     }
 }
Пример #17
0
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList<Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     // terminate direct API
     Recognizer.GetSingletonInstance().Terminate();
     // navigate to results page
     bool resultFound = false;
     if (recognitionType == RecognitionType.SUCCESSFUL) {
         // Find croatian payslip results in list of results.
         foreach (var result in resultList) {
             if (result.Valid && !result.Empty) {
                 // check if result is a MRTD result
                 if (result.Valid && !result.Empty && result is Microblink.MRTDRecognitionResult) {
                     // obtain the MRTD result
                     Microblink.MRTDRecognitionResult mrtdResult = (Microblink.MRTDRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.results = mrtdResult.Elements;
                     // mark as found
                     resultFound = true;
                     break;
                 }
             }
         }
     }
     // send scan status to results page
     ResultsPage.resultFound = resultFound;
     // navigate to results page
     NavigationService.Navigate(new Uri("/ResultsPage.xaml", UriKind.Relative));
     // reenable photo choosing
     ReenableButton();
 }
Пример #18
0
 public Task <CognitiveResult> AnalyzeAsync(byte[] buffer, string language, RecognitionType recognitionType = RecognitionType.All, Func <RecognitionPhase, Task> onProgress = null)
 => AnalyzeAsync(new MemoryStream(buffer), language, recognitionType, onProgress);
 /// <summary>
 /// Handles completed scanning events.
 /// Navigates to results page if scanning was successful.
 /// </summary>
 /// <param name="resultList">list of recognition results</param>
 /// <param name="recognitionType">type of recognition</param>
 void mRecognizer_OnScanningDone(IList<Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
 {
     // navigate to results page if type of recognition is SUCCESSFUL
     if (recognitionType == RecognitionType.SUCCESSFUL) {
         // Find MRTD result in list of results. Should be the only result in the list.
         bool resultFound = false;
         foreach (var result in resultList) {
             if (result.Valid && !result.Empty) {
                 // check if result is a MRTD result
                 if (result is Microblink.MRTDRecognitionResult) {
                     // obtain the MRTD result
                     Microblink.MRTDRecognitionResult mrtdResult = (Microblink.MRTDRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.results = mrtdResult.Elements;
                     ResultsPage.resultsType = "MRTD";
                     // mark as found
                     resultFound = true;
                     break;
                 }
                     // check if result is a MyKad result
                 else if (result is Microblink.MyKadRecognitionResult) {
                     // obtain the MyKad result
                     Microblink.MyKadRecognitionResult mykadResult = (Microblink.MyKadRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.results = mykadResult.Elements;
                     ResultsPage.resultsType = "MyKad";
                     // mark as found
                     resultFound = true;
                     break;
                 }
                     // check if result is a EUDL result
                 else if (result is Microblink.EUDLRecognitionResult) {
                     // obtain the EUDL result
                     Microblink.EUDLRecognitionResult eudlResult = (Microblink.EUDLRecognitionResult)result;
                     // set it as input for results page
                     ResultsPage.results = eudlResult.Elements;
                     ResultsPage.resultsType = "EUDL";
                     // mark as found
                     resultFound = true;
                     break;
                 }
             }
         }
         // navigate to results page if MRTD result was found
         if (resultFound) {
             NavigationService.Navigate(new Uri("/ResultsPage.xaml", UriKind.Relative));
         }
     }
 }
Пример #20
0
        public async Task <CognitiveResult> AnalyzeAsync(Stream stream, string language, RecognitionType recognitionType = RecognitionType.All, Func <RecognitionPhase, Task> onProgress = null)
        {
            var result = new CognitiveResult();

            var imageBytes = await stream.ToArrayAsync();

            await RaiseOnProgressAsync(onProgress, RecognitionPhase.QueryingService);

            var            visionService      = new VisionServiceClient(Settings.VisionSubscriptionKey);
            AnalysisResult analyzeImageResult = null;

            if (recognitionType.HasFlag(RecognitionType.Vision))
            {
                var features = new HashSet <VisualFeature> {
                    VisualFeature.Description
                };

                if (recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    // If recognition types include face or emotions, add also the Faces Visual Feature, so Face and Emotion services are called
                    // only if really needed.
                    features.Add(VisualFeature.Faces);
                }

                try
                {
                    analyzeImageResult = await visionService.AnalyzeImageAsync(stream, features);
                }
                catch (ClientException ex)
                {
                    var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress);

                    throw exception;
                }

                Caption originalDescription;
                Caption filteredDescription;
                var     visionSettings = VisionSettingsProvider != null ? await VisionSettingsProvider.GetSettingsAsync() : null;

                var isValid = analyzeImageResult.IsValid(out originalDescription, out filteredDescription, visionSettings);

                var visionResult = result.VisionResult;
                visionResult.IsValid        = isValid;
                visionResult.RawDescription = originalDescription.Text;
                visionResult.Confidence     = originalDescription.Confidence;

                if (isValid)
                {
                    visionResult.Description           = filteredDescription.Text;
                    visionResult.TranslatedDescription = await TranslateAsync(filteredDescription.Text, language, onProgress);
                }
            }

            if ((recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion)) &&
                (analyzeImageResult?.Faces.Any() ?? true))      // If Vision service was previously called, checks if any face was detected.
            {
                var faceService = new FaceServiceClient(Settings.FaceSubscriptionKey);

                await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                try
                {
                    stream.Position = 0;

                    var attributes = new HashSet <FaceAttributeType> {
                        FaceAttributeType.Gender, FaceAttributeType.Age
                    };

                    if (recognitionType.HasFlag(RecognitionType.Emotion))
                    {
                        // If recognition types include emotions, add also the Emotion Face Attribute Type, so this feature is called
                        // only if really needed.
                        attributes.Add(FaceAttributeType.Emotion);
                    }

                    var faces = await faceService.DetectAsync(stream, returnFaceAttributes : attributes);

                    if (faces.Any())
                    {
                        if (!faceServiceInitialized)
                        {
                            // If necessary, initializes face service by obtaining the face group used for identification, if any.
                            await InitializeFaceServiceAsync(faceService);
                        }

                        // Tries to identify faces in the image.
                        IdentifyResult[] faceIdentificationResult = null;

                        if (!string.IsNullOrWhiteSpace(identifyPersonGroupId))
                        {
                            var faceIds = faces.Select(face => face.FaceId).ToArray();
                            faceIdentificationResult = await faceService.IdentifyAsync(identifyPersonGroupId, faceIds);
                        }

                        var faceTasks = new List <Task>();

                        foreach (var face in faces)
                        {
                            await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                            // Runs face identification in parallel.
                            var task = Task.Run(async() =>
                            {
                                var faceResult = face.GetFaceResult();

                                // Checks if there is a candidate (i.e. a known person) in the identification result.
                                var candidate = faceIdentificationResult?.FirstOrDefault(r => r.FaceId == face.FaceId)?.Candidates.FirstOrDefault();
                                if (candidate != null)
                                {
                                    // Gets the person name.
                                    var person = await faceService.GetPersonAsync(identifyPersonGroupId, candidate.PersonId);
                                    faceResult.IdentifyConfidence = candidate.Confidence;
                                    faceResult.Name = person?.Name;
                                }

                                result.FaceResults.Add(faceResult);
                            });

                            faceTasks.Add(task);
                        }

                        await Task.WhenAll(faceTasks);
                    }
                }
                catch (FaceAPIException ex)
                {
                    var exception = await CreateExceptionAsync(ex.ErrorCode, ex.ErrorMessage, "Face", ex.HttpStatus, ex, language, onProgress);

                    throw exception;
                }
            }

            if (recognitionType.HasFlag(RecognitionType.Text))
            {
                await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingText);

                try
                {
                    using (var ms = new MemoryStream(imageBytes))
                    {
                        var results = await visionService.RecognizeTextAsync(ms);

                        var text = results.GetRecognizedText();
                        result.OcrResult.Text = text;
                    }
                }
                catch (Microsoft.ProjectOxford.Vision.ClientException ex)
                {
                    var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress);

                    throw exception;
                }
            }

            return(result);
        }
Пример #21
0
        public async Task <CognitiveResult> RecognizeAsync(Stream stream, string language, RecognitionType recognitionType = RecognitionType.Vision | RecognitionType.Emotion, Func <RecognitionPhase, Task> onProgress = null)
        {
            await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.QueryingService);

            var visionService = new VisionServiceClient(Settings.VisionSubscriptionKey);
            var result        = new CognitiveResult();

            if (recognitionType.HasFlag(RecognitionType.Vision) || recognitionType.HasFlag(RecognitionType.Emotion))
            {
                var imageBytes = await stream.ToArrayAsync();

                var features = new HashSet <VisualFeature> {
                    VisualFeature.Description
                };
                if (recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    features.Add(VisualFeature.Faces);
                }

                var visionSettings = await VisionSettingsProvider?.GetSettingsAsync();

                var analyzeImageResult = await visionService.AnalyzeImageAsync(stream, features);

                var visionResult = result.VisionResult;

                Caption originalDescription;
                Caption filteredDescription;

                var isValid = analyzeImageResult.IsValid(out originalDescription, out filteredDescription, visionSettings);

                visionResult.IsValid        = isValid;
                visionResult.RawDescription = originalDescription.Text;
                visionResult.Confidence     = originalDescription.Confidence;

                if (isValid)
                {
                    visionResult.Description = filteredDescription.Text;

                    if (language != DefaultLanguge && IsTranslatorServiceRegistered)
                    {
                        // Make sure to use the updated translator subscription key.
                        translatorService.SubscriptionKey = Settings.TranslatorSubscriptionKey;

                        // The description needs to be translated.
                        await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.Translating);

                        var translation = await translatorService.TranslateAsync(filteredDescription.Text, from : DefaultLanguge, to : language);

                        visionResult.TranslatedDescription = translation;
                    }
                }

                if (recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    // If there is one or more faces, asks the service information about them.
                    if (IsEmotionServiceRegistered && (analyzeImageResult.Faces?.Any() ?? false))
                    {
                        await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                        var emotionService = new EmotionServiceClient(Settings.EmotionSubscriptionKey);

                        foreach (var face in analyzeImageResult.Faces)
                        {
                            using (var ms = new MemoryStream(imageBytes))
                            {
                                var emotions = await emotionService.RecognizeAsync(ms, face.FaceRectangle.ToRectangle());

                                var emotionResult = emotions.GetEmotionResult(face);
                                result.EmotionResults.Add(emotionResult);
                            }
                        }
                    }
                }
            }

            if (recognitionType.HasFlag(RecognitionType.Text))
            {
                await this.RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingText);

                var results = await visionService.RecognizeTextAsync(stream);

                var text = results.GetRecognizedText();

                result.OcrResult.Text = text;
            }

            return(result);
        }
        /// <summary>
        /// Handles completed scanning events.
        /// Navigates to results page if scanning was successful.
        /// </summary>
        /// <param name="resultList">list of recognition results</param>
        /// <param name="recognitionType">type of recognition</param>
        void mRecognizer_OnScanningDone(IList <Microblink.IRecognitionResult> resultList, RecognitionType recognitionType)
        {
            // terminate direct API
            Recognizer.GetSingletonInstance().Terminate();
            // navigate to results page
            bool resultFound = false;

            if (recognitionType == RecognitionType.SUCCESSFUL)
            {
                // Find croatian payslip results in list of results.
                foreach (var result in resultList)
                {
                    if (result.Valid && !result.Empty)
                    {
                        // check if result is a PDF417 result
                        if (result is Microblink.PDF417RecognitionResult)
                        {
                            // obtain the PDF417 result
                            Microblink.PDF417RecognitionResult pdf417Result = (Microblink.PDF417RecognitionResult)result;
                            // set it as input for results page
                            ResultsPage.results = pdf417Result.Elements;
                            // mark as found
                            resultFound = true;
                            break;
                        }
                        // check if result is a ZXing result
                        else if (result is Microblink.ZXingRecognitionResult)
                        {
                            // obtain the ZXing result
                            Microblink.ZXingRecognitionResult zxingResult = (Microblink.ZXingRecognitionResult)result;
                            // set it as input for results page
                            ResultsPage.results = zxingResult.Elements;
                            resultFound         = true;
                            break;
                        }
                    }
                }
            }
            // send scan status to results page
            ResultsPage.resultFound = resultFound;
            // navigate to results page
            NavigationService.Navigate(new Uri("/ResultsPage.xaml", UriKind.Relative));
            // reenable photo choosing
            ReenableButton();
        }