/// <summary>Snippet for AsyncBatchAnnotateFilesAsync</summary>
        public async Task AsyncBatchAnnotateFilesAsync()
        {
            // Snippet: AsyncBatchAnnotateFilesAsync(IEnumerable<AsyncAnnotateFileRequest>, CallSettings)
            // Additional: AsyncBatchAnnotateFilesAsync(IEnumerable<AsyncAnnotateFileRequest>, CancellationToken)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            IEnumerable <AsyncAnnotateFileRequest> requests = new AsyncAnnotateFileRequest[]
            {
                new AsyncAnnotateFileRequest(),
            };
            // Make the request
            Operation <AsyncBatchAnnotateFilesResponse, OperationMetadata> response = await imageAnnotatorClient.AsyncBatchAnnotateFilesAsync(requests);

            // Poll until the returned long-running operation is complete
            Operation <AsyncBatchAnnotateFilesResponse, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();

            // Retrieve the operation result
            AsyncBatchAnnotateFilesResponse result = completedResponse.Result;

            // Or get the name of the operation
            string operationName = response.Name;
            // This name can be stored, then the long-running operation retrieved later by name
            Operation <AsyncBatchAnnotateFilesResponse, OperationMetadata> retrievedResponse = await imageAnnotatorClient.PollOnceAsyncBatchAnnotateFilesAsync(operationName);

            // Check if the retrieved long-running operation has completed
            if (retrievedResponse.IsCompleted)
            {
                // If it has completed, then access the result
                AsyncBatchAnnotateFilesResponse retrievedResult = retrievedResponse.Result;
            }
            // End snippet
        }
Exemplo n.º 2
0
 public async Task <ImageAnnotatorClient> CreateApp()
 {
     try
     {
         return(await ImageAnnotatorClient.CreateAsync());
     }
     catch (Exception)
     {
         return(null);
     }
 }
Exemplo n.º 3
0
        /// <summary>Snippet for BatchAnnotateImagesAsync</summary>
        public async Task BatchAnnotateImagesAsync()
        {
            // Snippet: BatchAnnotateImagesAsync(IEnumerable<AnnotateImageRequest>,CallSettings)
            // Additional: BatchAnnotateImagesAsync(IEnumerable<AnnotateImageRequest>,CancellationToken)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            IEnumerable <AnnotateImageRequest> requests = new List <AnnotateImageRequest>();
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(requests);

            // End snippet
        }
        /// <summary>Snippet for BatchAnnotateImagesAsync</summary>
        public async Task BatchAnnotateImagesAsync_RequestObject()
        {
            // Snippet: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest,CallSettings)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests = { },
            };
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(request);

            // End snippet
        }
Exemplo n.º 5
0
        /// <summary>
        /// Распознать печатный текст
        /// </summary>
        /// <param name="localImagePath"></param>
        /// <returns></returns>
        public static async Task <string> ReadPrintedText(string localImagePath)
        {
            string detectedText;
            ImageAnnotatorClient client = await ImageAnnotatorClient.CreateAsync();

            IReadOnlyList <EntityAnnotation> textAnnotations = await client.DetectTextAsync(await Google.Cloud.Vision.V1.Image.FromFileAsync(localImagePath));

            detectedText = textAnnotations[0].Description;
            if (detectedText != null)
            {
                return(detectedText);
            }
            else
            {
                throw new TextDetectorException();
            }
        }
Exemplo n.º 6
0
        public async Task <ResultFromOCRBindingModel> GetData()
        {
            Environment.SetEnvironmentVariable(
                "GOOGLE_APPLICATION_CREDENTIALS",
                CommonSecurityConstants.PathToGoogleCloudJson);

            ResultFromOCRBindingModel result =
                new ResultFromOCRBindingModel();

            Regex snRegex = SerialNumberRegexes
                            .GetSNRegex("LG");

            Regex modelRegex = UnitModels
                               .GetModelRegex(
                "LG",
                "TV");

            ImageAnnotatorClient client =
                await ImageAnnotatorClient.CreateAsync();

            Image image = await Image
                          .FromFileAsync(
                @"E:\ALEKS\Images\pictures-diploma-project\1.jpg");

            IReadOnlyList <EntityAnnotation> annotations =
                await client.DetectTextAsync(image);

            foreach (EntityAnnotation annotation in annotations)
            {
                if (snRegex.Match(annotation.Description)
                    .Success)
                {
                    result.ApplianceSerialNumber =
                        annotation.Description;
                }
                else if (modelRegex.Match(annotation.Description)
                         .Success)
                {
                    result.ApplianceModel = annotation.Description;
                }
            }

            return(result);
        }
        /// <summary>Snippet for BatchAnnotateImagesAsync</summary>
        public async Task BatchAnnotateImagesRequestObjectAsync()
        {
            // Snippet: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest, CallSettings)
            // Additional: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest, CancellationToken)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests =
                {
                    new AnnotateImageRequest(),
                },
                Parent = "",
            };
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(request);

            // End snippet
        }
Exemplo n.º 8
0
        public async Task <string> RecognizeImage(byte[] imageArr)
        {
            Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", AppSettings.Default.GOOGLE_APPLICATION_CREDENTIALS);

            var image = await Task.Run(() => Image.FromBytes(imageArr));

            var client = await ImageAnnotatorClient.CreateAsync();


            //var imageContext = new ImageContext();
            //imageContext.LanguageHints.Add("ja");
            //var request = new AnnotateImageRequest { ImageContext = imageContext, Image = image, Features = { new Feature { Type = Feature.Types.Type.TextDetection } } };
            //var response = await client.AnnotateAsync(request);

            //return response.TextAnnotations.ToString() == "[ ]" ? "No text found" : response.TextAnnotations[0].Description; ;


            var textAnnotations = await client.DetectTextAsync(image);

            return(textAnnotations.ToString() == "[ ]" ? "No text found" : textAnnotations[0].Description);
        }
        /// <summary>Snippet for AsyncBatchAnnotateImagesAsync</summary>
        public async Task AsyncBatchAnnotateImagesAsync_RequestObject()
        {
            // Snippet: AsyncBatchAnnotateImagesAsync(AsyncBatchAnnotateImagesRequest,CallSettings)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            AsyncBatchAnnotateImagesRequest request = new AsyncBatchAnnotateImagesRequest
            {
                Requests     = { },
                OutputConfig = new OutputConfig(),
            };
            // Make the request
            Operation <AsyncBatchAnnotateImagesResponse, OperationMetadata> response =
                await imageAnnotatorClient.AsyncBatchAnnotateImagesAsync(request);

            // Poll until the returned long-running operation is complete
            Operation <AsyncBatchAnnotateImagesResponse, OperationMetadata> completedResponse =
                await response.PollUntilCompletedAsync();

            // Retrieve the operation result
            AsyncBatchAnnotateImagesResponse result = completedResponse.Result;

            // Or get the name of the operation
            string operationName = response.Name;
            // This name can be stored, then the long-running operation retrieved later by name
            Operation <AsyncBatchAnnotateImagesResponse, OperationMetadata> retrievedResponse =
                await imageAnnotatorClient.PollOnceAsyncBatchAnnotateImagesAsync(operationName);

            // Check if the retrieved long-running operation has completed
            if (retrievedResponse.IsCompleted)
            {
                // If it has completed, then access the result
                AsyncBatchAnnotateImagesResponse retrievedResult = retrievedResponse.Result;
            }
            // End snippet
        }
Exemplo n.º 10
0
        public static async System.Threading.Tasks.Task Run(
            [BlobTrigger("pending/{name}")] Stream image,
            [Queue(Constants.QUEUE_NAME)] IAsyncCollector <string> applicationQueue,
            string name,
            ILogger log,
            ExecutionContext executionContext)
        {
            var sourceStream = new MemoryStream();
            await image.CopyToAsync(sourceStream);

            var bitmap = new Bitmap(sourceStream);

            var customVisionPredictionClient = new CustomVisionPredictionClient
            {
                ApiKey   = Environment.GetEnvironmentVariable("CustomVisionPredictionClient_ApiKey"),
                Endpoint = Environment.GetEnvironmentVariable("CustomVisionPredictionClient_Endpoint")
            };

            sourceStream.Position = 0;

            var response = await customVisionPredictionClient.DetectImageAsync(Guid.Parse(Environment.GetEnvironmentVariable("CustomVisionPredictionClient_ProjectId")), "Completed Route",
                                                                               sourceStream);

            var routes = new List <string>();

            foreach (var predictionModel in response.Predictions)
            {
                if (predictionModel.TagName == "Completed Route" && predictionModel.Probability > 0.85)
                {
                    var cropped = CropBitmap(bitmap,
                                             predictionModel.BoundingBox.Left,
                                             predictionModel.BoundingBox.Top,
                                             predictionModel.BoundingBox.Width,
                                             predictionModel.BoundingBox.Height);

                    var memoryStream = new MemoryStream();
                    //ONLY FOR DEBUG
                    //cropped.Save(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop),Guid.NewGuid().ToString()));
                    cropped.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                    memoryStream.Position = 0;

                    //https://stackoverflow.com/questions/53367132/where-to-store-files-for-azure-function
                    var path = Path.Combine(executionContext.FunctionAppDirectory, "Zwift-5c2367dfe003.json");

                    Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", path);

                    Image tmpImage = await Image.FromStreamAsync(memoryStream);

                    var client = await ImageAnnotatorClient.CreateAsync();

                    var tmp = await client.DetectTextAsync(tmpImage);

                    var annotation = tmp.FirstOrDefault();

                    if (annotation?.Description != null)
                    {
                        routes.Add(annotation.Description.Replace("\n", " ").Trim());
                    }
                }
            }

            if (routes.Count > 0)
            {
                var user = name.Split("_").First();
                await applicationQueue.AddAsync(JsonConvert.SerializeObject(new MultipleRoutesCompletedModel
                {
                    UserId = user,
                    Routes = routes
                }));

                await applicationQueue.FlushAsync();
            }
        }
Exemplo n.º 11
0
        public async Task <AnalysisResult> AnalyzeAsync(Image image, ICollection <string> categories)
        {
            var client = await ImageAnnotatorClient.CreateAsync().ConfigureAwait(false);

            var labels = await client.DetectLabelsAsync(image).ConfigureAwait(false);

            var suggestions    = new List <string>();
            var category       = Constants.NoValueString;
            var categoryFound  = false;
            var bestLabel      = Constants.NoValueString;
            var bestLabelFound = false;

            foreach (var label in labels)
            {
                if (!bestLabelFound)
                {
                    var invalid = false;

                    foreach (var word in Constants.InvalidSuggestions)
                    {
                        if (label.Description.ToLower().Equals(word.ToLower()))
                        {
                            invalid = true;
                        }
                    }

                    if (!invalid)
                    {
                        bestLabel      = label.Description;
                        bestLabelFound = true;
                    }
                }

                suggestions.Add(label.Description);

                if (!categoryFound)
                {
                    foreach (var cat in categories)
                    {
                        if (!Constants.AllCategoriesToKeywords.ContainsKey(cat))
                        {
                            throw new ArgumentException(Constants.InvalidCategories);
                        }
                        else
                        {
                            foreach (var keyword in Constants.AllCategoriesToKeywords[cat])
                            {
                                if (label.Description.ToLower().Contains(keyword.ToLower()) && !categoryFound)
                                {
                                    category      = cat;
                                    categoryFound = true;
                                }
                            }
                        }
                    }
                }
            }

            if (!categoryFound)
            {
                category = Constants.NoCategory;
            }

            // Refactor: apply logic to read labels
            if (category.Equals(Constants.NoCategory) || category.Equals(Constants.ManufacturedCategory))
            {
                bestLabel = "";
            }

            Console.WriteLine(category);

            var name = bestLabel;

            AnalysisResult result = new AnalysisResult(suggestions, category, name);

            return(result);
        }
Exemplo n.º 12
0
        public async Task <IHttpActionResult> describeImageWithVoice([FromBody] ImageToVoice imageToVoice)
        {
            try
            {
                var user = await UserManager.FindByIdAsync(imageToVoice.userId);

                var faceDescription = new FaceDescription();
                var googleClient    = await ImageAnnotatorClient.CreateAsync();

                var byteImage = Convert.FromBase64String(imageToVoice.base64Image);
                var image     = Image.FromBytes(byteImage);

                var responseForFacesGoogle = await googleClient.DetectFacesAsync(image);

                var responseForLabels = await googleClient.DetectLabelsAsync(image);

                var responseForLandmark = await googleClient.DetectLandmarksAsync(image);

                var responseForLogo = await googleClient.DetectLogosAsync(image);

                var analyzeImage = new AnalyzeImage();
                analyzeImage.responseFaceGoogle     = responseForFacesGoogle;
                analyzeImage.responseForLabels      = responseForLabels;
                analyzeImage.responseForLogoGoogle  = responseForLogo;
                analyzeImage.responseLandMarkGoogle = responseForLandmark;

                var responseFormMicrosoftFace = new List <CognitiveMicrosoft>();
                if (responseForFacesGoogle.Count > 0)
                {
                    responseFormMicrosoftFace = await faceDescription.MakeAnalysisRequestAsync(byteImage);

                    analyzeImage.responseForFacesMicrosft = responseFormMicrosoftFace;
                }

                string base64Voice = analyzeImage.describeImageWithVoice();

                rela.ImagesProceseds.Add(new ImagesProcesed {
                    UserId = user.Id, date = DateTime.Now, image = imageToVoice.base64Image
                });
                await rela.SaveChangesAsync();

                int imageId = rela.ImagesProceseds.OrderByDescending(img => img.date).ToList()[0].imageId;

                if (responseForFacesGoogle.Count > 0)
                {
                    rela.GoogleFaces.Add(new GoogleFace {
                        GoogleFace1 = JsonConvert.SerializeObject(responseForFacesGoogle), imageId = imageId
                    });
                }

                if (responseForLabels.Count > 0)
                {
                    rela.GoogleLabels.Add(new GoogleLabel {
                        GoogleLabel1 = JsonConvert.SerializeObject(responseForLabels), imageId = imageId
                    });
                }

                if (responseForLandmark.Count > 0)
                {
                    rela.GoogleLandmarks.Add(new GoogleLandmark {
                        GoogleLandamark = JsonConvert.SerializeObject(responseForLandmark), imageId = imageId
                    });
                }

                if (responseForLogo.Count > 0)
                {
                    rela.GoogleLogoes.Add(new GoogleLogo {
                        GoogleLogo1 = JsonConvert.SerializeObject(responseForLogo), imageId = imageId
                    });
                }

                if (responseFormMicrosoftFace.Count > 0)
                {
                    rela.MicrosoftFaces.Add(new MicrosoftFace {
                        imageId = imageId, MicrosoftFace1 = JsonConvert.SerializeObject(responseFormMicrosoftFace)
                    });
                }

                rela.Voices.Add(new Voice {
                    imageId = imageId, GoogleVoice = base64Voice
                });

                await rela.SaveChangesAsync();

                DescribeImage describeImage = new DescribeImage()
                {
                    googleFace    = responseForFacesGoogle,
                    label         = responseForLabels,
                    landmark      = responseForLandmark,
                    logo          = responseForLogo,
                    voiceBase64   = base64Voice,
                    microsoftFace = responseFormMicrosoftFace
                };

                return(Ok(describeImage));
            }
            catch (Exception ex) {
                return(BadRequest("Error"));
            }
        }
Exemplo n.º 13
0
        public async Task OnPostAsync()
        {
            if (Upload == null)
            {
                return;
            }
            var image = await Image.FromStreamAsync(Upload.OpenReadStream());

            var client = await ImageAnnotatorClient.CreateAsync();

            var response = await client.DetectTextAsync(image);

            var responseList = response.Skip(1).OrderBy(x => x.BoundingPoly.Vertices.First().Y).ToList();
            var height       = 0;
            var oldY         = 0;
            var result       = new List <List <EntityAnnotation> >();
            List <EntityAnnotation> currentList = null;

            foreach (var item in responseList)
            {
                if (oldY == 0 || item.BoundingPoly.Vertices[0].Y > oldY + height * 0.5)
                {
                    oldY   = item.BoundingPoly.Vertices[0].Y;
                    height = item.BoundingPoly.Vertices[0].Y - item.BoundingPoly.Vertices[3].Y;
                    if (height < 0)
                    {
                        height *= -1;
                    }

                    currentList = new List <EntityAnnotation>();
                    result.Add(currentList);
                }

                currentList.Add(item);
            }

            var rows = new List <string>();

            foreach (var cells in result.Select(row => row.OrderBy(x => x.BoundingPoly.Vertices.First().X).ToArray()))
            {
                var stringBuilder = new StringBuilder();
                foreach (var cell in cells)
                {
                    stringBuilder.Append(cell.Description);
                    stringBuilder.Append(" ");
                }

                rows.Add(stringBuilder.ToString().Trim());
            }

            var output = new List <List <string> >();

            foreach (var row in rows)
            {
                var regex  = Regex.Match(row, @"(?<price>-?\d{1,3},\d{2})?\s?(?<bonus>P?B)?$");
                var length = regex.Groups["price"].Index;
                length = length == 0 ? row.Length : length;

                var currentRow = new List <string>
                {
                    row.Substring(0, length),
                    regex.Groups["price"].Value,
                    regex.Groups["bonus"].Value
                };
                output.Add(currentRow);
            }

            Output = JsonConvert.SerializeObject(output);
        }