public void BatchAnnotateImages()
        {
            moq::Mock <ImageAnnotator.ImageAnnotatorClient> mockGrpcClient = new moq::Mock <ImageAnnotator.ImageAnnotatorClient>(moq::MockBehavior.Strict);

            mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock <lro::Operations.OperationsClient>().Object);
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests =
                {
                    new AnnotateImageRequest(),
                },
            };
            BatchAnnotateImagesResponse expectedResponse = new BatchAnnotateImagesResponse
            {
                Responses =
                {
                    new AnnotateImageResponse(),
                },
            };

            mockGrpcClient.Setup(x => x.BatchAnnotateImages(request, moq::It.IsAny <grpccore::CallOptions>())).Returns(expectedResponse);
            ImageAnnotatorClient        client   = new ImageAnnotatorClientImpl(mockGrpcClient.Object, null);
            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(request.Requests);

            xunit::Assert.Same(expectedResponse, response);
            mockGrpcClient.VerifyAll();
        }
Example #2
0
        public void ThrowOnAnyError()
        {
            // Snippet: ThrowOnAnyError
            Image image = new Image(); // No content or source!
            // Just a single request in this example, but usually BatchAnnotateImages would be
            // used with multiple requests.
            var request = new AnnotateImageRequest
            {
                Image    = image,
                Features = { new Feature {
                                 Type = Feature.Types.Type.SafeSearchDetection
                             } }
            };
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();

            try
            {
                BatchAnnotateImagesResponse response = client.BatchAnnotateImages(new[] { request });
                // ThrowOnAnyError will throw if any individual response in response.Responses
                // contains an error. Other responses may still have useful results.
                // Errors can be detected manually by checking the Error property in each
                // individual response.
                response.ThrowOnAnyError();
            }
            catch (AggregateException e)
            {
                // Because a batch can have multiple errors, the exception thrown is AggregateException.
                // Each inner exception is an AnnotateImageException
                foreach (AnnotateImageException innerException in e.InnerExceptions)
                {
                    Console.WriteLine(innerException.Response.Error);
                }
            }
            // End snippet
        }
        public void ThrowOnAnyError_MultipleErrors()
        {
            var response = new BatchAnnotateImagesResponse
            {
                Responses =
                {
                    new AnnotateImageResponse {
                        Error = new Rpc.Status{
                            Message = "Boom"
                        }
                    },
                    new AnnotateImageResponse {
                        TextAnnotations =     { new EntityAnnotation{
                                                    Description = "X"
                                                } }
                    },
                    new AnnotateImageResponse {
                        Error = new Rpc.Status{
                            Message = "Bang"
                        }
                    }
                }
            };
            var exception        = Assert.Throws <AggregateException>(() => response.ThrowOnAnyError());
            var nestedExceptions = exception.InnerExceptions.Cast <AnnotateImageException>().ToList();

            Assert.Equal(2, nestedExceptions.Count);
            Assert.Equal("Boom", nestedExceptions[0].Message);
            Assert.Equal("Bang", nestedExceptions[1].Message);
            Assert.Same(response.Responses[0], nestedExceptions[0].Response);
            // response.Responses[1] is skipped as it had no error.
            Assert.Same(response.Responses[2], nestedExceptions[1].Response);
        }
        public async stt::Task BatchAnnotateImagesRequestObjectAsync()
        {
            moq::Mock <ImageAnnotator.ImageAnnotatorClient> mockGrpcClient = new moq::Mock <ImageAnnotator.ImageAnnotatorClient>(moq::MockBehavior.Strict);

            mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock <lro::Operations.OperationsClient>().Object);
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests =
                {
                    new AnnotateImageRequest(),
                },
                Parent = "parent7858e4d0",
            };
            BatchAnnotateImagesResponse expectedResponse = new BatchAnnotateImagesResponse
            {
                Responses =
                {
                    new AnnotateImageResponse(),
                },
            };

            mockGrpcClient.Setup(x => x.BatchAnnotateImagesAsync(request, moq::It.IsAny <grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall <BatchAnnotateImagesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
            ImageAnnotatorClient        client = new ImageAnnotatorClientImpl(mockGrpcClient.Object, null);
            BatchAnnotateImagesResponse responseCallSettings = await client.BatchAnnotateImagesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));

            xunit::Assert.Same(expectedResponse, responseCallSettings);
            BatchAnnotateImagesResponse responseCancellationToken = await client.BatchAnnotateImagesAsync(request, st::CancellationToken.None);

            xunit::Assert.Same(expectedResponse, responseCancellationToken);
            mockGrpcClient.VerifyAll();
        }
Example #5
0
        public BatchAnnotateImagesResponse SendGoogleOcrRequest(List <StorageFile> files)
        {
            VisionService service  = this.CreateService();
            var           annotate = service.Images.Annotate(this.CreateRequest(files));
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            return(batchAnnotateImagesResponse);
        }
Example #6
0
        public static void IniciarServicio()
        {
            //Preparacion Servicio
            VisionSettings vs         = new VisionSettings();
            var            credential = vs.CreateCredential();
            var            service    = vs.CreateService(credential);

            //Preparacion de la peticion
            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();


            var infoImagenes = ObtenerInfoArchivos();

            foreach (var imagen in infoImagenes)
            {
                byte[] file = File.ReadAllBytes(imagen.FullName.ToString());
                batchRequest.Requests.Add(new AnnotateImageRequest()
                {
                    Features = new List <Feature>()
                    {
                        new Feature()
                        {
                            Type = "TEXT_DETECTION", MaxResults = 1
                        },
                    },
                    ImageContext = new ImageContext()
                    {
                        LanguageHints = new List <string>()
                        {
                            "es"
                        }
                    },
                    Image = new Image()
                    {
                        Content = Convert.ToBase64String(file)
                    }
                });
            }
            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();
            var cantidadRespuestasImagenes = batchAnnotateImagesResponse.Responses.Count();

            for (int i = 0; i < cantidadRespuestasImagenes; i++)
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[i];
                if (annotateImageResponse.TextAnnotations != null)
                {
                    var texto = annotateImageResponse.TextAnnotations[0].Description;
                    using (var tw = new StreamWriter(@"D:\" + infoImagenes[i].Name.ToString() + ".txt", true))
                    {
                        tw.WriteLine(texto);
                    }
                    Console.WriteLine(texto);
                }
            }
        }
Example #7
0
        static void Main(string[] args)
        {
            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();

            var service = CreateService(CreateCredential());


            var file = File.ReadAllBytes(@"C:\Users\Loui\Downloads\IMG_20171207_212819.jpg");

            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = "TEXT_DETECTION"
                    },
                },
                ImageContext = new ImageContext()
                {
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                foreach (var res in batchAnnotateImagesResponse.Responses)
                {
                    if (res.Error != null)
                    {
                        if (res.Error.Message != null)
                        {
                            Console.WriteLine(res.Error.Message);
                        }
                    }
                    else
                    {
                        if (res.TextAnnotations != null && res.TextAnnotations.Any())
                        {
                            foreach (var text in res.TextAnnotations)
                            {
                                //text.BoundingPoly.Vertices[0].X // y
                                Console.WriteLine("Description:" + text.Description + " - Etag:" + text.ETag + " - Locale:" + text.Locale + " - Score:" + text.Score);
                            }
                        }
                    }
                }
            }
        }
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public AnnotateImageResponse AnalyseImage(string imgPath, string language, string type, string jsonPath)
        {
            OAuthService oAuth = new OAuthService();

            var credential = oAuth.CreateCredential(jsonPath);
            var service    = oAuth.CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = File.ReadAllBytes(imgPath);

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    return(annotateImageResponse);
                }
            }

            return(new AnnotateImageResponse());
        }
Example #9
0
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public async Task <string> GetText(System.Drawing.Bitmap imagem, string language, string type)
        {
            TextResult = JsonResult = "";
            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = ImageToByte(imagem);


            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type, MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", ";");
                }
            }
            return("");
        }
 public void BatchAnnotateImages()
 {
     // Snippet: BatchAnnotateImages(IEnumerable<AnnotateImageRequest>,CallSettings)
     // Create client
     ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.Create();
     // Initialize request argument(s)
     IEnumerable <AnnotateImageRequest> requests = new List <AnnotateImageRequest>();
     // Make the request
     BatchAnnotateImagesResponse response = imageAnnotatorClient.BatchAnnotateImages(requests);
     // End snippet
 }
        public async Task BatchAnnotateImagesAsync()
        {
            // Snippet: BatchAnnotateImagesAsync(IEnumerable<AnnotateImageRequest>,CallSettings)
            // Additional: BatchAnnotateImagesAsync(IEnumerable<AnnotateImageRequest>,CancellationToken)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.Create();
            // Initialize request argument(s)
            IEnumerable <AnnotateImageRequest> requests = new List <AnnotateImageRequest>();
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(requests);

            // End snippet
        }
Example #12
0
 /// <summary>Snippet for BatchAnnotateImages</summary>
 public void BatchAnnotateImages_RequestObject()
 {
     // Snippet: BatchAnnotateImages(BatchAnnotateImagesRequest,CallSettings)
     // Create client
     ImageAnnotatorClient imageAnnotatorClient = ImageAnnotatorClient.Create();
     // Initialize request argument(s)
     BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
     {
         Requests = { },
     };
     // Make the request
     BatchAnnotateImagesResponse response = imageAnnotatorClient.BatchAnnotateImages(request);
     // End snippet
 }
        /// <summary>Snippet for BatchAnnotateImagesAsync</summary>
        public async Task BatchAnnotateImagesAsync_RequestObject()
        {
            // Snippet: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest,CallSettings)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests = { },
            };
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(request);

            // End snippet
        }
Example #14
0
        public void BatchAnnotateImages2()
        {
            Mock <ImageAnnotator.ImageAnnotatorClient> mockGrpcClient = new Mock <ImageAnnotator.ImageAnnotatorClient>(MockBehavior.Strict);
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests = { },
            };
            BatchAnnotateImagesResponse expectedResponse = new BatchAnnotateImagesResponse();

            mockGrpcClient.Setup(x => x.BatchAnnotateImages(request, It.IsAny <CallOptions>()))
            .Returns(expectedResponse);
            ImageAnnotatorClient        client   = new ImageAnnotatorClientImpl(mockGrpcClient.Object, null);
            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(request);

            Assert.Same(expectedResponse, response);
            mockGrpcClient.VerifyAll();
        }
Example #15
0
        public async Task BatchAnnotateImagesAsync2()
        {
            Mock <ImageAnnotator.ImageAnnotatorClient> mockGrpcClient = new Mock <ImageAnnotator.ImageAnnotatorClient>(MockBehavior.Strict);
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests = { },
            };
            BatchAnnotateImagesResponse expectedResponse = new BatchAnnotateImagesResponse();

            mockGrpcClient.Setup(x => x.BatchAnnotateImagesAsync(request, It.IsAny <CallOptions>()))
            .Returns(new Grpc.Core.AsyncUnaryCall <BatchAnnotateImagesResponse>(Task.FromResult(expectedResponse), null, null, null, null));
            ImageAnnotatorClient        client   = new ImageAnnotatorClientImpl(mockGrpcClient.Object, null);
            BatchAnnotateImagesResponse response = await client.BatchAnnotateImagesAsync(request);

            Assert.Same(expectedResponse, response);
            mockGrpcClient.VerifyAll();
        }
        public void BatchAnnotateImages()
        {
            Image image1 = LoadResourceImage("SchmidtBrinPage.jpg");
            Image image2 = LoadResourceImage("Chrome.png");
            // Sample: BatchAnnotateImages
            // Additional: BatchAnnotateImages(IEnumerable<AnnotateImageRequest>,*)
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();
            // Perform face recognition on one image, and logo recognition on another.
            AnnotateImageRequest request1 = new AnnotateImageRequest
            {
                Image    = image1,
                Features = { new Feature {
                                 Type = Feature.Types.Type.FaceDetection
                             } }
            };
            AnnotateImageRequest request2 = new AnnotateImageRequest
            {
                Image    = image2,
                Features = { new Feature {
                                 Type = Feature.Types.Type.LogoDetection
                             } }
            };

            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(new[] { request1, request2 });

            Console.WriteLine("Faces in image 1:");
            foreach (FaceAnnotation face in response.Responses[0].FaceAnnotations)
            {
                string poly = string.Join(" - ", face.BoundingPoly.Vertices.Select(v => $"({v.X}, {v.Y})"));
                Console.WriteLine($"  Confidence: {(int)(face.DetectionConfidence * 100)}%; BoundingPoly: {poly}");
            }
            Console.WriteLine("Logos in image 2:");
            foreach (EntityAnnotation logo in response.Responses[1].LogoAnnotations)
            {
                Console.WriteLine($"Description: {logo.Description}");
            }
            foreach (Status error in response.Responses.Select(r => r.Error))
            {
                Console.WriteLine($"Error detected: error");
            }
            // End sample

            Assert.Equal(3, response.Responses[0].FaceAnnotations.Count);
            Assert.Equal(1, response.Responses[1].LogoAnnotations.Count);
        }
        public string RecognizeText(Bitmap image, List <string> languages, int timeout)
        {
            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();

            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = "TEXT_DETECTION", MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = languages
                },

                Image = new GoogleOCR.Image()
                {
                    Content = Convert.ToBase64String(image.ImageToBytes())
                },
            });

            string output = string.Empty;

            using (var visioService = this.CreateService())
            {
                visioService.HttpClient.Timeout = new TimeSpan(0, 0, 0, 0, timeout);
                var annotate = visioService.Images.Annotate(batchRequest);
                BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();
                var annotations = batchAnnotateImagesResponse.Responses;
                if (annotations.Count > 0 &&
                    annotations[0].TextAnnotations != null &&
                    annotations[0].TextAnnotations.Count > 0 &&
                    annotations[0].TextAnnotations[0].Description != null)
                {
                    output = annotations[0].TextAnnotations[0].Description.TrimEnd().TrimStart();
                }
            }
            return(output);
        }
        private IEnumerable <AnalysisResponse> Analyze(ImageAnnotatorClient client, List <AnnotateImageRequest> requests, List <Guid> requestSystemIds)
        {
            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(requests);
            var requestIndex = 0;

            foreach (var res in response.Responses)
            {
                var imageSystemId = requestSystemIds[requestIndex];
                var labels        = res.LabelAnnotations.Select(a => a.Description);
                var landmarks     = res.LandmarkAnnotations.Select(a => a.Description);
                requestIndex++;
                yield return(new AnalysisResponse()
                {
                    SystemId = imageSystemId, Tags = landmarks.Concat(labels)
                });
            }
            requests.Clear();
            requestSystemIds.Clear();
        }
Example #19
0
        public IList <FaceAnnotation> detectFaces(string path, int maxResults)
        {
            byte[] data = File.ReadAllBytes(path);

            AnnotateImageRequest request = new AnnotateImageRequest();

            Google.Apis.Vision.v1.Data.Image img = new Google.Apis.Vision.v1.Data.Image();
            img.Content   = Convert.ToBase64String(data);
            request.Image = img;

            Feature feature = new Feature();

            feature.Type       = "FACE_DETECTION";
            feature.MaxResults = maxResults;

            request.Features = new List <Feature>()
            {
                feature
            };



            BatchAnnotateImagesRequest batchAnnotate = new BatchAnnotateImagesRequest();

            batchAnnotate.Requests = new List <AnnotateImageRequest>()
            {
                request
            };
            ImagesResource.AnnotateRequest annotate = _vision.Images.Annotate(batchAnnotate);

            BatchAnnotateImagesResponse batchResponse = annotate.Execute();

            AnnotateImageResponse response = batchResponse.Responses[0];

            if (response.FaceAnnotations == null)
            {
                throw new Exception(response.Error.Message);
            }

            return(response.FaceAnnotations);
        }
        /// <summary>Snippet for BatchAnnotateImagesAsync</summary>
        public async Task BatchAnnotateImagesRequestObjectAsync()
        {
            // Snippet: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest, CallSettings)
            // Additional: BatchAnnotateImagesAsync(BatchAnnotateImagesRequest, CancellationToken)
            // Create client
            ImageAnnotatorClient imageAnnotatorClient = await ImageAnnotatorClient.CreateAsync();

            // Initialize request argument(s)
            BatchAnnotateImagesRequest request = new BatchAnnotateImagesRequest
            {
                Requests =
                {
                    new AnnotateImageRequest(),
                },
                Parent = "",
            };
            // Make the request
            BatchAnnotateImagesResponse response = await imageAnnotatorClient.BatchAnnotateImagesAsync(request);

            // End snippet
        }
        private string convertResponseToString(BatchAnnotateImagesResponse response)
        {
            var message = "I found these things:\n\n";

            List <EntityAnnotation> labels = response.Responses.First().LabelAnnotations.ToList();

            if (labels != null)
            {
                foreach (var label in labels)
                {
                    message += string.Format("%.3f: %s", label.Score, label.Description);
                    message += "\n";
                }
            }
            else
            {
                message += "nothing";
            }

            return(message);
        }
        public void ThrowOnAnyError_NoErrors()
        {
            var response = new BatchAnnotateImagesResponse
            {
                Responses =
                {
                    new AnnotateImageResponse {
                        TextAnnotations =     { new EntityAnnotation {
                                                    Description = "X"
                                                } }
                    },
                    new AnnotateImageResponse {
                        LandmarkAnnotations = { new EntityAnnotation {
                                                    Description = "Y"
                                                } }
                    },
                }
            };

            Assert.Same(response, response.ThrowOnAnyError());
        }
Example #23
0
        /// <summary>
        /// Execute a detecção e anotação de imagens para um lote (lote) de imagens.
        /// </summary>
        private object DetectAnnotateImagesInBatch(List <System.IO.Stream> fileStreamCollection)
        {
            try
            {
                var collectionAnnotateImageRequest   = new List <AnnotateImageRequest>();
                BatchAnnotateImagesResponse response = new BatchAnnotateImagesResponse();

                foreach (Stream item in fileStreamCollection)
                {
                    var image   = Google.Cloud.Vision.V1.Image.FromStream(item);
                    var request = new AnnotateImageRequest
                    {
                        Image    = image,
                        Features = { new Feature {
                                         Type = Feature.Types.Type.TextDetection
                                     } }
                    };

                    collectionAnnotateImageRequest.Add(request);
                }

                response = clienteService.BatchAnnotateImages(collectionAnnotateImageRequest);

                return(response);
            }
            catch (AnnotateImageException e)
            {
                //AnnotateImageResponse response = e.Response;
                //return (response.Error);
                throw new FaultException(e.Response.ToString());
            }
            //catch (AggregateException e)
            //{
            //    foreach (AnnotateImageException innerException in e.InnerExceptions)
            //    {
            //        return (innerException.Response.Error);
            //    }
            //}
        }
Example #24
0
        public static int Main(string[] args)
        {
            // Create client
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();

            // Initialize request argument(s)
            IEnumerable <AnnotateImageRequest> requests = new[]
            {
                new AnnotateImageRequest
                {
                    Image = new Image
                    {
                        Source = new ImageSource
                        {
                            GcsImageUri = "gs://gapic-toolkit/President_Barack_Obama.jpg",
                        },
                    },
                    Features =
                    {
                        new Feature
                        {
                            Type = Feature.Types.Type.FaceDetection,
                        },
                    },
                },
            };

            // Call API method
            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(requests);

            // Show the result
            Console.WriteLine(response);

            // Success
            Console.WriteLine("Smoke test passed OK");
            return(0);
        }
        public static int Main(string[] args)
        {
            // Create client
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();

            // Initialize request argument(s)
            IEnumerable <AnnotateImageRequest> requests = new[]
            {
                new AnnotateImageRequest
                {
                    Image = new Image
                    {
                        Source = new ImageSource
                        {
                            GcsImageUri = "gs://cloud-samples-data/vision/face_detection/celebrity_recognition/sergey.jpg",
                        },
                    },
                    Features =
                    {
                        new Feature
                        {
                            Type = Feature.Types.Type.FaceDetection,
                        },
                    },
                },
            };

            // Call API method
            BatchAnnotateImagesResponse response = client.BatchAnnotateImages(requests);

            // Show the result
            Console.WriteLine(response);

            // Success
            Console.WriteLine("Smoke test passed OK");
            return(0);
        }
        public void ThrowOnAnyError_OneError()
        {
            var response = new BatchAnnotateImagesResponse
            {
                Responses =
                {
                    new AnnotateImageResponse {
                        TextAnnotations =     { new EntityAnnotation{
                                                    Description = "X"
                                                } }
                    },
                    new AnnotateImageResponse {
                        Error = new Rpc.Status{
                            Message = "Bang"
                        }
                    }
                }
            };
            var exception       = Assert.Throws <AggregateException>(() => response.ThrowOnAnyError());
            var nestedException = (AnnotateImageException)exception.InnerExceptions[0];

            Assert.Equal("Bang", nestedException.Message);
            Assert.Same(response.Responses[1], nestedException.Response);
        }
Example #27
0
        IEnumerable <FaceAnnotation> detectFaces(IEnumerable <byte> inputImage, int maxResults)
        {
            var img = new Google.Apis.Vision.v1.Data.Image();

            img.Content = Convert.ToBase64String(inputImage.ToArray());

            AnnotateImageRequest request = new AnnotateImageRequest();

            request.Image    = img;
            request.Features = new List <Feature>();
            request.Features.Add(new Feature()
            {
                MaxResults = maxResults, Type = "FACE_DETECTION"
            });

            var batch = new BatchAnnotateImagesRequest();

            batch.Requests = new List <AnnotateImageRequest>();
            batch.Requests.Add(request);
            var annotate = _visionService.Images.Annotate(batch);
            //annotate.Key = "AIzaSyDbuBnG-8f41OVET1BXXoHjhZRTlQFFnvU";

            BatchAnnotateImagesResponse batchResponse = annotate.Execute();

            AnnotateImageResponse response = batchResponse.Responses[0];

            if (response.FaceAnnotations == null)
            {
                throw new IOException(
                          response.Error != null
                        ? response.Error.Message
                        : "Unknown error getting image annotations");
            }

            return(response.FaceAnnotations);
        }
Example #28
0
        public void GetText(byte[] imgData, string language)
        {
            TextResult = JsonResult = "";

            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = "TEXT_DETECTION", MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(imgData)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                    {
                        TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        JsonResult = JsonConvert.SerializeObject(annotateImageResponse.TextAnnotations[0]);
                    }
                    return;
                }
            }

            return;
            //return TextResult;
        }
Example #29
0
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public async Task <string> GetText(Mat img, string language, string type)
        {
            TextResult = JsonResult = "";
            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = img.ToBytes();

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type, MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    switch (type)
                    {
                    case "TEXT_DETECTION":
                        if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                        {
                            TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        }
                        break;

                    case "DOCUMENT_TEXT_DETECTION":
                        if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                        {
                            TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        }
                        break;

                    case "FACE_DETECTION":
                        if (annotateImageResponse.FaceAnnotations != null && annotateImageResponse.FaceAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.FaceAnnotations[0]);
                        }
                        break;

                    case "LOGO_DETECTION":
                        if (annotateImageResponse.LogoAnnotations != null && annotateImageResponse.LogoAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LogoAnnotations[0]);
                        }
                        break;

                    case "LABEL_DETECTION":
                        if (annotateImageResponse.LabelAnnotations != null && annotateImageResponse.LabelAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LabelAnnotations[0]);
                        }
                        break;

                    case "LANDMARK_DETECTION":
                        if (annotateImageResponse.LandmarkAnnotations != null && annotateImageResponse.LandmarkAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LandmarkAnnotations[0]);
                        }
                        break;

                    case "SAFE_SEARCH_DETECTION":
                        if (annotateImageResponse.SafeSearchAnnotation != null)
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.SafeSearchAnnotation);
                        }
                        break;

                    case "IMAGE_PROPERTIES":
                        if (annotateImageResponse.ImagePropertiesAnnotation != null)
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.ImagePropertiesAnnotation);
                        }
                        break;
                    }


                    Response = annotateImageResponse;
                    return(TextResult);
                }
            }
            Response = null;
            return("");
        }
 private static IList <EntityAnnotation> GetTextResult(BatchAnnotateImagesResponse result)
 {
     return(result?.Responses?.Count > 0 ? result.Responses[0]?.TextAnnotations : new List <EntityAnnotation>());
 }