public int detectCheckType(string check) // Código 1: Banco Comercial, Código 2: BBVA { System.Drawing.Image img = System.Drawing.Image.FromFile(check); //Recorto logo banco var imageFactory = new ImageFactory(false); var croppedImg = imageFactory.Load(check); int x, y, width, height; x = Convert.ToInt32(img.Width * 0.33); y = 0; width = Convert.ToInt32(img.Width * 0.33); height = Convert.ToInt32(img.Height * 0.25); Rectangle logoZone = new Rectangle(x, y, width, height); croppedImg.Crop(logoZone); //Guardo archivo con recorte en formanto png croppedImg.Format(new PngFormat { Quality = 100 }); string logoImg = string.Concat(workingDirectory, "\\ManusE1_temporal\\logo.png"); croppedImg.Save(logoImg); //Proceso logo try { //Evalúo lo devuelto por Vision var image = Google.Cloud.Vision.V1.Image.FromFile(logoImg); var response = client.DetectText(image, ic); string OCRtext = response.ElementAt(0).Description; //Elimino recorte creado luego de haberlo procesado croppedImg.Dispose(); File.Delete(string.Concat(workingDirectory, "\\ManusE1_temporal\\logo.png")); if (OCRtext.Contains("MER")) { return(1); } else if (OCRtext.Contains("BBVA")) { return(2); } } catch (ArgumentOutOfRangeException) { //Si ElementAt(0) = null porque Vision no devolvió texto, asumo que es cheque de Banco Comercial por mayor probabilidad return(1); } return(0); //Nunca devolverá este valor, pero el compilador lo demandaba }
public static string Recognize(string imagePath) { if (credential == null) { var googleCredsPath = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? @"C:\mydev\My Project-77101559a6d3.json" : "/Users/slav/Downloads/My Project-d1092d64586a.json"; credential = GoogleCredential.FromFile(googleCredsPath) .CreateScoped(ImageAnnotatorClient.DefaultScopes); channel = new Grpc.Core.Channel( ImageAnnotatorClient.DefaultEndpoint.ToString(), credential.ToChannelCredentials()); var client = ImageAnnotatorClient.Create(channel); } var image = Image.FromFile(imagePath); var response = client.DetectText(image); int count = response.Count; foreach (var annotation in response) { if (annotation.Description != null) { return(annotation.Description); } } return(null); }
public override async Task RunCommand(object sender) { var engine = (IAutomationEngineInstance)sender; var vAPICredentials = (string)await v_APICredentials.EvaluateCode(engine); string vFilePath = null; if (v_ImageType == "Filepath") { vFilePath = (string)await v_FilePath.EvaluateCode(engine); } else { Bitmap vBitmap = (Bitmap)await v_Bitmap.EvaluateCode(engine); vFilePath = engine.EngineContext.ProjectPath + "\\tempOCRBitmap.bmp"; FileStream imageStream = new FileStream(vFilePath, FileMode.OpenOrCreate); vBitmap.Save(imageStream, ImageFormat.Bmp); imageStream.Close(); } Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", vAPICredentials); string foundText = ""; GoogleImage image = GoogleImage.FromFile(vFilePath); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> textAnnotations = client.DetectText(image); foundText = textAnnotations[0].Description; foundText.SetVariableValue(engine, v_OutputUserVariableName); }
/// <summary> /// Send a request to Google Vision to extract extract text from an image represented as a series of bytes. /// </summary> /// <param name="imageBytes">the image to analyze represented as a series of bytes</param> /// <returns>A string extracted from the image, or null if the call failed.</returns> public static string ExtractText(byte[] imageBytes) { if (imageBytes == null) { throw new NullReferenceException("GoogleVisionConnectionManager.ExtractText(Image): Image provided is null"); } Image image = Image.FromBytes(imageBytes); System.Environment.SetEnvironmentVariable(GOOGLE_APPLICATION_CREDENTIALS, JsonPath); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> textAnnotations = client.DetectText(image); if (textAnnotations.Count <= 0) { return(""); } string text = textAnnotations.First().Description; if (text.Last() == '\n') { text = text.Substring(0, text.Length - 1); } return(text.Replace('\n', ' ')); }
public IReadOnlyList <EntityAnnotation> GetTextDetectionResponse(Image image) { if (image == null) { throw new ArgumentNullException(); } var textDetectionResponse = _imageAnnotatorClient.DetectText(image); return(textDetectionResponse); }
private string RecognizeFromBytes(byte[] data) { var image = Image.FromBytes(data); var response = googleClient.DetectText(image, context); if (response.Count == 0) { return(""); } return(response.FirstOrDefault().Description); }
public IActionResult CaptureImage(IFormFile file) { if (file == null) { return(Json(new { Status = 0 })); } byte[] imageBytes = null; using (var ms = new MemoryStream()) { file.CopyTo(ms); imageBytes = ms.ToArray(); } Image image5 = Image.FromBytes(imageBytes); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> labels = client.DetectLabels(image5); client.DetectFaces(image5); var faceAttributeList = new List <FaceAttribute>(); foreach (EntityAnnotation label in labels) { faceAttributeList.Add(new FaceAttribute() { Score = ((int)(label.Score * 100)).ToString(), Description = label.Description });; // Console.WriteLine($"Score: {(int)(label.Score * 100)}%; Description: {label.Description}"); } IReadOnlyList <FaceAnnotation> faceAnnotations = client.DetectFaces(image5); IReadOnlyList <EntityAnnotation> texts = client.DetectText(image5); var textList = new List <FaceAttribute>(); foreach (EntityAnnotation label in texts) { textList.Add(new FaceAttribute() { Score = ((int)(label.Score * 100)).ToString(), Description = label.Description });; } SafeSearchAnnotation searchAnnotations = client.DetectSafeSearch(image5); return(Json(new FaceResponse() { labels = faceAttributeList, faceAnnotations = faceAnnotations, texts = textList, searchAnnotations = searchAnnotations })); }
static void LoadImg(string imgPath) { Image image = Image.FromFile(imgPath); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> response = client.DetectText(image); for (int i = 1; i < response.Count; i++) { if (response[i].Description != null) { Console.WriteLine(response[i].Description); } } }
public void DetectText() { Image image = LoadResourceImage("Ellesborough.png"); // Snippet: DetectText ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> textAnnotations = client.DetectText(image); foreach (EntityAnnotation text in textAnnotations) { Console.WriteLine($"Description: {text.Description}"); } // End snippet var descriptions = textAnnotations.Select(t => t.Description).ToList(); Assert.Contains("Ellesborough", descriptions); }
public ActionResult Capture(string base64String) { byte[] imageBytes = null; if (!string.IsNullOrEmpty(base64String)) { var imageParts = base64String.Split(',').ToList <string>(); imageBytes = Convert.FromBase64String(imageParts[1]); } Image image5 = Image.FromBytes(imageBytes); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> labels = client.DetectLabels(image5); client.DetectFaces(image5); var faceAttributeList = new List <FaceAttribute>(); foreach (EntityAnnotation label in labels) { faceAttributeList.Add(new FaceAttribute() { Score = ((int)(label.Score * 100)).ToString(), Description = label.Description });; // Console.WriteLine($"Score: {(int)(label.Score * 100)}%; Description: {label.Description}"); } IReadOnlyList <FaceAnnotation> faceAnnotations = client.DetectFaces(image5); IReadOnlyList <EntityAnnotation> texts = client.DetectText(image5); var textList = new List <FaceAttribute>(); foreach (EntityAnnotation label in texts) { textList.Add(new FaceAttribute() { Score = ((int)(label.Score * 100)).ToString(), Description = label.Description });; } SafeSearchAnnotation searchAnnotations = client.DetectSafeSearch(image5); return(Json(new FaceResponse() { labels = faceAttributeList, faceAnnotations = faceAnnotations, texts = textList, searchAnnotations = searchAnnotations })); }
private void ExecuteProcessCommand(object obj) { var image = Google.Cloud.Vision.V1.Image.FromFile(Image); var documentText = _client.DetectDocumentText(image, _imageContext); var detection = _client.DetectWebInformation(image, _imageContext); var text = _client.DetectText(image, _imageContext); var label = detection.BestGuessLabels.FirstOrDefault().Label.ToUpper(); Detector mainDetector = new Detector(documentText.Text.ToUpper(), label, detection, text); var document = mainDetector.Execute(); DocumentType = document.Type.ToString(); DocumentData = document.ToString(); OnPropertyChanged("DocumentData"); OnPropertyChanged("DocumentType"); }
private void processButton_Click(object sender, EventArgs e) { textBox1.Text = "Procesando..."; //Pido a la API Vision var image = Image.FromFile(fotoPath); var response = client.DetectText(image); //Creo archivo txt CreateTXT(directory); string text = ""; //Escribo lo devuelto por Vision text = response.ElementAt(0).Description; writeTXT(txtPath, text); textBox1.Text = "Se creó un txt con el resultado."; processButton.Enabled = false; //Elimino imagen creada por improveImage en directorio File.Delete(string.Concat(directory, "\\improvedImg.jpg")); }
public string GetText(string fileName) { // Definir essa variável no ambiente (linux ou windows) com base na conta de serviço criada no Google //https://cloud.google.com/docs/authentication/production#linux-or-macos string value = Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS"); Image img = Image.FromFile(fileName); var result = string.Empty; ImageAnnotatorClient client = ImageAnnotatorClient.Create(); IReadOnlyList <EntityAnnotation> textAnnotations = client.DetectText(img); foreach (EntityAnnotation text in textAnnotations) { result += text.Description; } return(result); }
private List <MediaProperties> AnalyseLabels(ImageAnnotatorClient client, Image image) { var facesList = new List <MediaProperties>(); var imageLabels = client.DetectText(image); if (imageLabels != null && imageLabels.Count > 0) { foreach (var label in imageLabels) { facesList.Add(new MediaProperties { Type = MediaPropertyType.Labels, Value = label.Description, Score = label.Confidence }); } } return(facesList); }
public ResultFromOCRBindingModel GetData( ResultFromOCRBindingModel model, IMatFileUploadEntry file) { ResultFromOCRBindingModel result = new ResultFromOCRBindingModel(); Regex snRegex = SerialNumberRegexes .GetSNRegex(model.ApplianceBrand); Regex modelRegex = LGModels .GetModelRegex(model.ApplianceType); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); MemoryStream stream = new MemoryStream(); file.WriteToStreamAsync(stream); Google.Cloud.Vision.V1.Image image = Google.Cloud.Vision.V1.Image.FromStream(stream); var annotations = client.DetectText(image); foreach (var annotation in annotations) { if (snRegex.Match(annotation.Description) .Success) { result.ApplianceSerialNumber = annotation.Description; } else if (modelRegex.Match(annotation.Description) .Success) { result.ApplianceModel = annotation.Description; } } return(result); }
private IReadOnlyList <EntityAnnotation> GetImageData() { Image image = Image.FromFile(@"E:\ALEKS\test_photo4.jpg"); return(client.DetectText(image)); }
public static async Task SendStreaks(string AmountOfStreaks, string StartOfStreak) { AdbServer server = new AdbServer(); StartServerResult Result; try { Result = server.StartServer($"{Directory.GetParent(System.Reflection.Assembly.GetEntryAssembly().Location)}\\adb.exe", restartServerIfNewer: false); Console.WriteLine($"Server now running on {AdbClient.AdbServerPort}"); } catch { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Failed to start the server"); Console.ResetColor(); return; } var Devices = AdbClient.Instance.GetDevices(); if (Devices.Count > 1) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("You cannot have more than 1 phone plugged in"); Console.ResetColor(); return; } Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", $"{Directory.GetParent(System.Reflection.Assembly.GetEntryAssembly().Location)}/Auth.json"); ImageAnnotatorClient client = ImageAnnotatorClient.Create(); int StreaksToSend = 0; try { StreaksToSend = int.Parse(AmountOfStreaks); } catch { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Failed to parse AmountOfStreaks"); Console.ResetColor(); return; } List <string> Names = new List <string>(); int StreaksSent = 0; DeviceData Device = Devices[0]; var GetSizeReceiver = new ConsoleOutputReceiver(); Device.ExecuteShellCommand($"wm size", GetSizeReceiver); string[] lines = GetSizeReceiver.ToString().Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries); string DeviceSize = lines[1]; DeviceSize = DeviceSize.Replace("Override size: ", string.Empty); string[] Split = DeviceSize.Split("x"); string Width = Split[0]; string Height = Split[1]; Device.ExecuteShellCommand($"input tap {int.Parse(Width) / 2} {int.Parse(Height) - 150}", GetSizeReceiver); Thread.Sleep(150); Device.ExecuteShellCommand($"input tap {(int.Parse(Width) / 2) - 50} {int.Parse(Height) - 290}", GetSizeReceiver); Thread.Sleep(1200); Device.ExecuteShellCommand($"input tap {int.Parse(Width) - 50} {int.Parse(Height) - 100}", GetSizeReceiver); Thread.Sleep(500); for (int i = 0; i <= 20; i++) { var Receiver = new ConsoleOutputReceiver(); string RandomFileName = GenerateRandomString(25); Device.ExecuteShellCommand($"exec screencap -p > /storage/emulated/0/Pictures/{RandomFileName}.png", Receiver); if (string.IsNullOrWhiteSpace(Receiver.ToString())) { Console.WriteLine("Screenshot taken"); } else { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Receiver.ToString()); Console.ResetColor(); return; } using (SyncService service = new SyncService(new AdbSocket(new IPEndPoint(IPAddress.Loopback, AdbClient.AdbServerPort)), Device)) using (Stream stream = File.OpenWrite(@$ "{Directory.GetParent(System.Reflection.Assembly.GetEntryAssembly().Location)}/Screenshots/{RandomFileName}.png")) { service.Pull($"/storage/emulated/0/Pictures/{RandomFileName}.png", stream, null, CancellationToken.None); } Device.ExecuteShellCommand($"rm /storage/emulated/0/Pictures/{RandomFileName}.png", Receiver); if (!string.IsNullOrWhiteSpace(Receiver.ToString())) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Failed to delete screenshot on phone while moving it locally"); Console.ResetColor(); return; } var image = Google.Cloud.Vision.V1.Image.FromFile(@$ "{Directory.GetParent(System.Reflection.Assembly.GetEntryAssembly().Location)}/Screenshots/{RandomFileName}.png"); var response = client.DetectText(image); foreach (var annotation in response) { if (annotation != null) { if (annotation.Description.StartsWith(StartOfStreak)) { if (annotation.BoundingPoly.Vertices[0].Y <= 2100) { if (!Names.Contains(annotation.Description)) { if (StreaksSent >= StreaksToSend) { i = 21; break; } Console.WriteLine(annotation.Description); Names.Add(annotation.Description); Device.ExecuteShellCommand($"input tap {annotation.BoundingPoly.Vertices[0].X} {annotation.BoundingPoly.Vertices[0].Y}", Receiver); StreaksSent++; } } } } Thread.Sleep(50); } Device.ExecuteShellCommand($"input swipe 250 1100 250 400 300", Receiver); Thread.Sleep(950); } Console.WriteLine("Streaks have been selected, Make sure the application didn't skip any1"); }