private void OnMicShortPhraseReceiveHandler(object sender, SpeechResponseEventArgs e) { MicClient.EndMicAndRecognition(); IsMicroUse = false; for (int i = 0; i < e.PhraseResponse.Results.Length; i++) { RecognizeText.Add(e.PhraseResponse.Results[i].DisplayText); } }
private string ExtractTextByRegions(RecognizeText ocrOutput) { string resultText = String.Empty; resultText += string.Join("\n", from LineNew sline in ocrOutput.RecognitionResult.Lines select sline.Text); return(resultText); }
// NEW API Extract Text & Text Regions private void ExtractTextAndRegionsNewAPIFromResponse() { var response = JObject.Parse(OCRResponse); RecognizeText ocrOutput = JsonConvert.DeserializeObject <RecognizeText>(OCRResponse); if (ocrOutput != null && ocrOutput.RecognitionResult != null && ocrOutput.RecognitionResult.Lines != null) { foreach (LineNew sline in ocrOutput.RecognitionResult.Lines) { // Draw rectangles for the lines CreateRectangle("line", sline.BoundingBox, _lineColor); foreach (WordNew sword in sline.Words) { // Draw rectangles for the words CreateImageLabelsAndRectangle(sword.BoundingBox, sword.Text); } } // Visibility check for the bounding box layers if (regionVis) { canvasRegions.Visibility = Visibility.Visible; } if (lineVis) { canvasLines.Visibility = Visibility.Visible; } if (wordVis) { canvasWords.Visibility = Visibility.Visible; } if (textVis) { canvasText.Visibility = Visibility.Visible; } // Visibility check for text output stckOutput.Visibility = Visibility.Visible; txtOcrOutput.Text = ExtractTextByRegions(ocrOutput); //Search key-value pairs if keys are defined in Settings. if (!string.IsNullOrEmpty(searchKeys)) { ExtractKeyValuePairs(ocrOutput.RecognitionResult.Lines); } } else { MessageBox.Show("Output can't be extracted"); } }
private void OnPartialReponseReceiveHandler(object sender, PartialSpeechResponseEventArgs e) { RecognizeText.Add(e.PartialResult); }
private void OnConversationErrorHandler(object sender, SpeechErrorEventArgs e) { RecognizeText.Add(e.SpeechErrorText); RecognizeText.Add(e.SpeechErrorCode.ToString()); Error = true; }
private void onIntentHandler(object sender, SpeechIntentEventArgs e) { RecognizeText.Add(e.Payload); MicClient.EndMicAndRecognition(); }
/// <summary> /// Gets the analysis of the specified image file by using /// the Computer Vision REST API. /// </summary> /// <param name="imageFilePath">The image file to analyze.</param> /// static async Task MakeAnalysisRequest(string imageFilePath) { try { HttpClient client = new HttpClient(); // Request headers. client.DefaultRequestHeaders.Add( "Ocp-Apim-Subscription-Key", subscriptionKey); var queryString = HttpUtility.ParseQueryString(string.Empty); // Request parameters. A third optional parameter is "details". // The Analyze Image method returns information about the following // visual features: // Categories: categorizes image content according to a // taxonomy defined in documentation. // Description: describes the image content with a complete // sentence in supported languages. // Color: determines the accent color, dominant color, // and whether an image is black & white. string requestParameters = String.Format("?mode=Printed"); // Assemble the URI for the REST API method. string uri = uriBase + requestParameters; HttpResponseMessage response; // Read the contents of the specified local image // into a byte array. byte[] byteData = GetImageAsByteArray(imageFilePath); // Add the byte array as an octet stream to the request body. using (ByteArrayContent content = new ByteArrayContent(byteData)) { // This example uses the "application/octet-stream" content type. // The other content types you can use are "application/json" // and "multipart/form-data". content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); // Asynchronously call the REST API method. response = await client.PostAsync(uri, content); } string operationLocation = null; // The response contains the URI to retrieve the result of the process. if (response.IsSuccessStatusCode) { operationLocation = response.Headers.GetValues("Operation-Location").FirstOrDefault(); } string contentString; int s = 0; do { System.Threading.Thread.Sleep(1000); response = await client.GetAsync(operationLocation); contentString = await response.Content.ReadAsStringAsync(); ++s; }while (s < 10 && contentString.IndexOf("\"status\":\"Succeeded\"") == -1); string json = response.Content.ReadAsStringAsync().Result; json = json.TrimStart(new char[] { '[' }).TrimEnd(new char[] { ']' }); RecognizeText ocrOutput = JsonConvert.DeserializeObject <RecognizeText>(json); // Asynchronously get the JSON response. //string contentString = await response.Content.ReadAsStringAsync(); //string contentString_parsed = JToken.Parse(contentString).ToString() List <Word> textvalues = new List <Word>(); List <Line> linevalues = new List <Line>(); List <string> result = new List <string>(); foreach (Line sline in ocrOutput.RecognitionResult.Lines) { int[] lvalues = sline.BoundingBox; linevalues.Add(new Line { Text = sline.Text, BoundingBox = lvalues }); foreach (Word sword in sline.Words) { int[] wvalues = sword.BoundingBox; textvalues.Add(new Word { Text = sword.Text, BoundingBox = wvalues }); } } var csv = new StringBuilder(); string fileName = imageFilePath; string csvname = Path.GetFileNameWithoutExtension(fileName); string filePath = @outputfilepath + "result_" + csvname + ".csv"; //TARIH var date = new System.Text.StringBuilder(); bool success = false; string date_pattern = @"(\d{2})[-.\/](\d{2})[-.\/](\d{4})"; for (int i = 0; i < textvalues.Count - 1; i++) { date.Append(textvalues[i + 1].Text.ToString()); Match m1 = Regex.Match(date.ToString(), date_pattern, RegexOptions.IgnoreCase); if (m1.Success) { success = true; var date_key = "TARIH"; var dateLine = string.Format("{0},{1}", date_key, m1.Value); csv.AppendLine(dateLine); //Console.WriteLine("\nTARIH", dateLine); break; } } if (success == false) { var date_key = "TARIH"; var dateLine = string.Format("{0},{1}", date_key, "Not Found"); csv.AppendLine(dateLine); } //FIS NO bool IsDigitsOnly(string str) { foreach (char c in str) { if (c < '0' || c > '9') { return(false); } } return(true); } var receipt_number = new List <string>(); bool afterFis = false; for (int i = 0; i < textvalues.Count; i++) { if (textvalues[i].Text == "Fis" || textvalues[i].Text == "FIS" || textvalues[i].Text == "FiS" || textvalues[i].Text == "Fi$") { afterFis = true; } if (afterFis && (textvalues[i].Text == "NO" || textvalues[i].Text == "NO:")) { afterFis = false; if (IsDigitsOnly(textvalues[i + 1].Text)) { receipt_number.Add(textvalues[i + 1].Text); } else if (IsDigitsOnly(textvalues[i + 2].Text)) { receipt_number.Add(textvalues[i + 2].Text); } else { string fis_no = "FIS NO"; string csv2 = string.Format("{0},{1}\n ", fis_no, "Not found"); csv.Append(csv2); break; } string fisno = "FIS NO"; string csv3 = string.Format("{0},{1}\n ", fisno, receipt_number[0]); csv.Append(csv3); } } // TOPLAM var newtoplam = new System.Text.StringBuilder(); bool success2 = false; string total_pattern = @"\d+[,]\d{2}"; for (int i = 0; i < textvalues.Count - 1; i++) { if (textvalues[i].Text == "TOPLAM" && textvalues[i - 1].Text != "ARA") { for (int k = 0; k < 5; i++) { newtoplam.Append(textvalues[i + 1].Text.ToString()); Match m2 = Regex.Match(newtoplam.ToString(), total_pattern, RegexOptions.IgnoreCase); if (m2.Success) { success2 = true; var total_key = "TOPLAM"; var newValue = m2.Value.Replace(",", "."); var totalLine = string.Format("{0},{1}", total_key, newValue); csv.AppendLine(totalLine); break; } } break; } } if (success2 == false) { var total_key = "TOPLAM"; var totalLine = string.Format("{0},{1} ", total_key, "Not Found"); //csv.AppendLine(totalLine); } //TOP var newtop = new System.Text.StringBuilder(); bool success3 = false; string top_pattern = @"\d+[,]\d{2}"; for (int i = 0; i < textvalues.Count - 1; i++) { if (textvalues[i].Text == "TOP" && textvalues[i - 1].Text != "ARA") { for (int k = 0; k < 5; i++) { newtop.Append(textvalues[i + 1].Text.ToString()); Match m3 = Regex.Match(newtop.ToString(), top_pattern, RegexOptions.IgnoreCase); if (m3.Success) { success3 = true; var top_key = "TOP"; var replaced_value = m3.Value.Replace(",", "."); var totalLine = string.Format("{0},{1} ", top_key, replaced_value); csv.AppendLine(totalLine); break; } } break; } } if (success3 == false) { var top_key = "TOP"; var topLine = string.Format("{0},{1} ", top_key, "Not Found"); //csv.AppendLine(topLine); } //TUTAR var newtutar = new System.Text.StringBuilder(); bool success4 = false; string tutar_pattern = @"\d+[,]\d{2}"; for (int i = 0; i < textvalues.Count - 1; i++) { if (textvalues[i].Text == "TUTAR") { for (int k = 0; k < 5; i++) { newtutar.Append(textvalues[i + 1].Text.ToString()); Match m4 = Regex.Match(newtop.ToString(), tutar_pattern, RegexOptions.IgnoreCase); if (m4.Success) { success4 = true; var tutar_key = "\nTUTAR"; var replaced_value = m4.Value.Replace(",", "."); var tutarLine = string.Format("{0},{1} ", tutar_key, replaced_value); csv.AppendLine(tutarLine); break; } } break; } } if (success4 == false) { var tutar_key = "TUTAR"; var tutarLine = string.Format("{0},{1} ", tutar_key, "Not Found"); //csv.AppendLine(tutarLine); } File.AppendAllText(filePath, csv.ToString()); // Display the JSON response. Console.WriteLine("\nResponse:\n\n{0}\n", csv.ToString()); Console.WriteLine("\nResponse:\n\n{0}\n", JToken.Parse(contentString).ToString()); } catch (Exception e) { Console.WriteLine("\n" + e.Message); } }
// OCR Request by URL & Extract Key-Value Pairs public static async Task <List <string> > MakeOCRRequestByUrl(string imageUrl, string formType, ExecutionContext executionContext) { Console.Write("C# HTTP trigger function processed: MakeOCRRequestByUrl"); string urlBase = Environment.GetEnvironmentVariable("CognitiveServicesUrlBase"); string key = Environment.GetEnvironmentVariable("CognitiveServicesKey"); var client = new HttpClient(); var queryString = HttpUtility.ParseQueryString(string.Empty); // Request headers client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", key); // Request parameters queryString["mode"] = "Printed"; Uri uri = new Uri(urlBase + "recognizeText?" + queryString); HttpResponseMessage response; var requstbody = "{\"url\":\"" + $"{imageUrl}" + "\"}"; // Request body byte[] byteData = Encoding.UTF8.GetBytes(requstbody); using (var content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); response = await client.PostAsync(uri, content); } string operationLocation = null; // The response contains the URI to retrieve the result of the process. if (response.IsSuccessStatusCode) { operationLocation = response.Headers.GetValues("Operation-Location").FirstOrDefault(); } string contentString; int i = 0; do { System.Threading.Thread.Sleep(1000); response = await client.GetAsync(operationLocation); contentString = await response.Content.ReadAsStringAsync(); ++i; }while (i < 10 && contentString.IndexOf("\"status\":\"Succeeded\"") == -1); string json = response.Content.ReadAsStringAsync().Result; json = json.TrimStart(new char[] { '[' }).TrimEnd(new char[] { ']' }); RecognizeText ocrOutput = JsonConvert.DeserializeObject <RecognizeText>(json); if (ocrOutput != null && ocrOutput.RecognitionResult != null && ocrOutput.RecognitionResult.Lines != null) { List <string> resultText = new List <string>(); resultText = (from Line sline in ocrOutput.RecognitionResult.Lines select(string) sline.Text).ToList <string>(); // Extract Key-Value Pairs resultText = OCRHelper.ExtractKeyValuePairs(ocrOutput.RecognitionResult.Lines, formType, executionContext); return(resultText); } else { return(null); } }
public static async Task <List <string> > MakeAnalysisWithImage(Stream stream, string subscriptionKey, string uriEndPoint) { List <string> result = new List <string>(); HttpClient client = new HttpClient(); // Request headers client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", subscriptionKey); string requestParameters = String.Format("?mode=Printed"); // Assemble the URI for the REST API method. string uri = uriEndPoint + requestParameters; HttpResponseMessage response; BinaryReader binaryReader = new BinaryReader(stream); byte[] byteData = binaryReader.ReadBytes((int)stream.Length); using (ByteArrayContent content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); // Asynchronously call the REST API method. response = await client.PostAsync(uri, content); } string operationLocation = null; // The response contains the URI to retrieve the result of the process. if (response.IsSuccessStatusCode) { operationLocation = response.Headers.GetValues("Operation-Location").FirstOrDefault(); } string contentString; int i = 0; do { System.Threading.Thread.Sleep(1000); response = await client.GetAsync(operationLocation); contentString = await response.Content.ReadAsStringAsync(); ++i; }while (i < 10 && contentString.IndexOf("\"status\":\"Succeeded\"") == -1); string json = response.Content.ReadAsStringAsync().Result; json = json.TrimStart(new char[] { '[' }).TrimEnd(new char[] { ']' }); RecognizeText ocrOutput = JsonConvert.DeserializeObject <RecognizeText>(json); if (ocrOutput != null && ocrOutput.RecognitionResult != null && ocrOutput.RecognitionResult.Lines != null) { List <string> resultText = new List <string>(); resultText = (from Line sline in ocrOutput.RecognitionResult.Lines select(string) sline.Text).ToList <string>(); resultText = OCRHelper.ExtractKeyValuePairs(ocrOutput.RecognitionResult.Lines); return(resultText); } else { return(null); } }