Пример #1
0
        public void Run()
        {
            try
            {
                Console.WriteLine("Hold on, we're finding faces...");

                var client = ImageAnnotatorClient.Create();

                var sourceDirectory =
                    @$ "{AppContext.BaseDirectory}\Images";

                var files = _helper.GetImageFilePaths(sourceDirectory);

                Console.WriteLine("Analyzing faces");

                var faceResults = _helper.GetFaceResults(files, client);

                _helper.WriteJsonResults(faceResults);

                Console.WriteLine("All done, the faces have been analyzed");
            }
            catch (AnnotateImageException e)
            {
                AnnotateImageResponse response = e.Response;
                Console.WriteLine(response.Error);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }
Пример #2
0
        public async Task Annotate()
        {
            ImageAnnotatorClient client = new FakeImageAnnotator();
            var request = new AnnotateImageRequest
            {
                Image    = s_allAnnotationsImage,
                Features =
                {
                    new Feature {
                        Type = FeatureType.FaceDetection
                    },
                    new Feature {
                        Type = FeatureType.LogoDetection, MaxResults = 1
                    }
                }
            };
            var expectedResponse = new AnnotateImageResponse
            {
                FaceAnnotations = { s_allAnnotationsResponse.FaceAnnotations },
                LogoAnnotations = { s_allAnnotationsResponse.LogoAnnotations.Take(1) }
            };

            Assert.Equal(expectedResponse, client.Annotate(request));
            Assert.Equal(expectedResponse, await client.AnnotateAsync(request));
        }
Пример #3
0
        private List <TextLine> MakeLinesFromWord(AnnotateImageResponse response)
        {
            List <TextLine> lines = new List <TextLine>();

            for (int i = 1; i < response.TextAnnotations.Count; i++)
            {
                if (lines.Count == 0)
                {
                    TextLine l = new TextLine();
                    l.Words = new List <Classes.Word>();
                    GetDataFromResponse(response, lines, i, l);
                }
                else
                {
                    int bottom = response.TextAnnotations[i].BoundingPoly.Vertices[2].Y.Value;
                    var line   = lines.Where(c => Math.Abs(c.Bounds.Bottom - bottom) <= 10).FirstOrDefault();
                    if (line == null)
                    {
                        TextLine l = new TextLine();
                        l.Words = new List <Classes.Word>();
                        GetDataFromResponse(response, lines, i, l);
                    }
                    else
                    {
                        Classes.Word w = new Classes.Word();

                        w.Text       = response.TextAnnotations[i].Description;
                        w.Confidence = 100;
                        int x      = response.TextAnnotations[i].BoundingPoly.Vertices[0].X.Value;
                        int y      = response.TextAnnotations[i].BoundingPoly.Vertices[0].Y.Value;
                        int width  = response.TextAnnotations[i].BoundingPoly.Vertices[1].X.Value - response.TextAnnotations[i].BoundingPoly.Vertices[0].X.Value;
                        int height = response.TextAnnotations[i].BoundingPoly.Vertices[2].Y.Value - response.TextAnnotations[i].BoundingPoly.Vertices[0].Y.Value;
                        w.Bounds = new System.Drawing.Rectangle(x, y, width, height);

                        line.Words.Add(w);
                    }
                }
            }

            lines.All(l =>
            {
                l.Words.Sort(delegate(Classes.Word w1, Classes.Word w2)
                {
                    return(w1.Bounds.X.CompareTo(w2.Bounds.X));
                });
                int x    = l.Words.FirstOrDefault().Bounds.X;
                int y    = l.Words.FirstOrDefault().Bounds.Y;
                int x2   = l.Words.LastOrDefault().Bounds.Right;
                l.Text   = GetTextForLine(l);
                l.Bounds = new System.Drawing.Rectangle(x, y, x2 - x, l.Bounds.Bottom - y);
                return(true);
            });

            lines.Sort(delegate(TextLine l1, TextLine l2)
            {
                return(l1.Bounds.Y.CompareTo(l2.Bounds.Y));
            });

            return(lines);
        }
        public static IEnumerable <string> InitLineSegmentation(AnnotateImageResponse data)
        {
            var yMax = CoordinateHelpers.GetYMax(data);

            data = CoordinateHelpers.InvertAxis(data, yMax);

            // The first index refers to the auto identified words which belongs to a sings line
            var lines = data.TextAnnotations[0].Description.Split('\n');

            // gcp vision full text
            var rawText = new List <EntityAnnotation>();

            for (int i = 1; i < data.TextAnnotations.Count; i++)
            {
                rawText.Add(data.TextAnnotations[i]);
            }

            // reverse to use lifo, because array.shift() will consume 0(n)
            lines = lines.Reverse().ToArray();
            rawText.Reverse();

            var mergedArray = GetMergedLines(lines, rawText);

            var boundingPolygon = CoordinateHelpers.GetBoundingPolygon(mergedArray);

            var combinedPolygon = CoordinateHelpers.CombineBoundingPolygon(boundingPolygon);

            return(ConstructLineWithBoundingPolygon(combinedPolygon));
        }
Пример #5
0
        public static void IniciarServicio()
        {
            //Preparacion Servicio
            VisionSettings vs         = new VisionSettings();
            var            credential = vs.CreateCredential();
            var            service    = vs.CreateService(credential);

            //Preparacion de la peticion
            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();


            var infoImagenes = ObtenerInfoArchivos();

            foreach (var imagen in infoImagenes)
            {
                byte[] file = File.ReadAllBytes(imagen.FullName.ToString());
                batchRequest.Requests.Add(new AnnotateImageRequest()
                {
                    Features = new List <Feature>()
                    {
                        new Feature()
                        {
                            Type = "TEXT_DETECTION", MaxResults = 1
                        },
                    },
                    ImageContext = new ImageContext()
                    {
                        LanguageHints = new List <string>()
                        {
                            "es"
                        }
                    },
                    Image = new Image()
                    {
                        Content = Convert.ToBase64String(file)
                    }
                });
            }
            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();
            var cantidadRespuestasImagenes = batchAnnotateImagesResponse.Responses.Count();

            for (int i = 0; i < cantidadRespuestasImagenes; i++)
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[i];
                if (annotateImageResponse.TextAnnotations != null)
                {
                    var texto = annotateImageResponse.TextAnnotations[0].Description;
                    using (var tw = new StreamWriter(@"D:\" + infoImagenes[i].Name.ToString() + ".txt", true))
                    {
                        tw.WriteLine(texto);
                    }
                    Console.WriteLine(texto);
                }
            }
        }
Пример #6
0
        public AnnotatedImage(AnnotateImageResponse annotateImageResponse)
        {
            if (annotateImageResponse == null)
            {
                throw new ArgumentNullException(nameof(annotateImageResponse));
            }

            this.annotateImageResponse = annotateImageResponse;
        }
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public AnnotateImageResponse AnalyseImage(string imgPath, string language, string type, string jsonPath)
        {
            OAuthService oAuth = new OAuthService();

            var credential = oAuth.CreateCredential(jsonPath);
            var service    = oAuth.CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = File.ReadAllBytes(imgPath);

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    return(annotateImageResponse);
                }
            }

            return(new AnnotateImageResponse());
        }
Пример #8
0
        public void ThrowOnError_NoError()
        {
            var response = new AnnotateImageResponse {
                TextAnnotations = { new EntityAnnotation {
                                        Description = "X"
                                    } }
            };

            Assert.Same(response, response.ThrowOnError());
        }
Пример #9
0
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public async Task <string> GetText(System.Drawing.Bitmap imagem, string language, string type)
        {
            TextResult = JsonResult = "";
            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = ImageToByte(imagem);


            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type, MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", ";");
                }
            }
            return("");
        }
        /// <summary>
        /// Computes the maximum y coordinate from the identified text blob
        /// </summary>
        /// <param name="data">Google vision api AnnotateImage response</param>
        /// <returns>Google vision api AnnotateImage response</returns>
        public static int GetYMax(AnnotateImageResponse data)
        {
            var v      = data.TextAnnotations[0].BoundingPoly.Vertices;
            var yArray = new int[4];

            for (int i = 0; i < 4; i++)
            {
                yArray[i] = v[i].Y;
            }

            return(yArray.Max());
        }
        /// <summary>
        /// Inverts the y axis coordinates for easier computation as the google vision starts the y axis from the bottom
        /// </summary>
        /// <param name="data">Google vision api AnnotateImage response</param>
        /// <param name="yMax">Maximum y coordinate from the identified text blob</param>
        /// <returns>Google vision api AnnotateImage response</returns>
        public static AnnotateImageResponse InvertAxis(AnnotateImageResponse data, int yMax)
        {
            for (int i = 1; i < data.TextAnnotations.Count; i++)
            {
                for (int j = 0; j < 4; j++)
                {
                    data.TextAnnotations[i].BoundingPoly.Vertices[j].Y = yMax - data.TextAnnotations[i].BoundingPoly.Vertices[j].Y;
                }
            }

            return(data);
        }
Пример #12
0
        public void ThrowOnError_Error()
        {
            var response = new AnnotateImageResponse {
                Error = new Rpc.Status {
                    Message = "Bang"
                }
            };
            var exception = Assert.Throws <AnnotateImageException>(() => response.ThrowOnError());

            Assert.Equal("Bang", exception.Message);
            Assert.Same(response, exception.Response);
        }
Пример #13
0
 private static void AddSingularFeature <T>(
     AnnotateImageRequest request,
     AnnotateImageResponse fullResponse,
     AnnotateImageResponse actualResponse,
     FeatureType featureType,
     Func <AnnotateImageResponse, T> extractor,
     Action <AnnotateImageResponse, T> assigner)
 {
     if (request.Features.Any(f => f.Type == featureType))
     {
         assigner(actualResponse, extractor(fullResponse));
     }
 }
        private static void AddRepeatedFeatures <T>(
            AnnotateImageRequest request,
            AnnotateImageResponse fullResponse,
            AnnotateImageResponse actualResponse,
            FeatureType featureType,
            Func <AnnotateImageResponse, RepeatedField <T> > extractor)
        {
            var featureRequest = request.Features.Where(f => f.Type == featureType).FirstOrDefault();

            if (featureRequest == null)
            {
                return;
            }
            var maxEntries = featureRequest.MaxResults == 0 ? int.MaxValue : featureRequest.MaxResults;

            extractor(actualResponse).AddRange(extractor(fullResponse).Take(maxEntries));
        }
Пример #15
0
        private static void GetDataFromResponse(AnnotateImageResponse response, List <TextLine> lines, int i, TextLine l)
        {
            Classes.Word w = new Classes.Word();

            w.Text       = response.TextAnnotations[i].Description;
            w.Confidence = 100;
            int x      = response.TextAnnotations[i].BoundingPoly.Vertices[0].X.Value;
            int y      = response.TextAnnotations[i].BoundingPoly.Vertices[0].Y.Value;
            int width  = response.TextAnnotations[i].BoundingPoly.Vertices[1].X.Value - response.TextAnnotations[i].BoundingPoly.Vertices[0].X.Value;
            int height = response.TextAnnotations[i].BoundingPoly.Vertices[2].Y.Value - response.TextAnnotations[i].BoundingPoly.Vertices[0].Y.Value;

            w.Bounds = new System.Drawing.Rectangle(x, y, width, height);

            l.Words.Add(w);
            l.Bounds = w.Bounds;
            lines.Add(l);
        }
Пример #16
0
        public void ErrorHandling_SingleImage()
        {
            // Sample: ErrorHandling_SingleImage
            Image image = new Image(); // No content or source!
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();

            try
            {
                IReadOnlyList <EntityAnnotation> logos = client.DetectLogos(image);
                // Normally use logos here...
            }
            catch (AnnotateImageException e)
            {
                AnnotateImageResponse response = e.Response;
                Console.WriteLine(response.Error);
            }
            // End sample
        }
Пример #17
0
        private static AnnotateImageResponse ProcessRequest(AnnotateImageRequest request)
        {
            var fullResponse   = AnnotateImageResponse.Parser.ParseFrom(request.Image.Content);
            var actualResponse = new AnnotateImageResponse {
                Error = fullResponse.Error
            };

            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.FaceDetection, r => r.FaceAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LabelDetection, r => r.LabelAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LandmarkDetection, r => r.LandmarkAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LogoDetection, r => r.LogoAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.TextDetection, r => r.TextAnnotations);
            AddSingularFeature(request, fullResponse, actualResponse, FeatureType.CropHints, r => r.CropHintsAnnotation, (r, v) => r.CropHintsAnnotation                   = v);
            AddSingularFeature(request, fullResponse, actualResponse, FeatureType.WebDetection, r => r.WebDetection, (r, v) => r.WebDetection                              = v);
            AddSingularFeature(request, fullResponse, actualResponse, FeatureType.DocumentTextDetection, r => r.FullTextAnnotation, (r, v) => r.FullTextAnnotation         = v);
            AddSingularFeature(request, fullResponse, actualResponse, FeatureType.SafeSearchDetection, r => r.SafeSearchAnnotation, (r, v) => r.SafeSearchAnnotation       = v);
            AddSingularFeature(request, fullResponse, actualResponse, FeatureType.ImageProperties, r => r.ImagePropertiesAnnotation, (r, v) => r.ImagePropertiesAnnotation = v);
            return(actualResponse);
        }
        public void ErrorHandling_SingleImage()
        {
            // Sample: ErrorHandling_SingleImage
            // We create a request which passes simple validation, but isn't a valid image.
            Image image = Image.FromBytes(new byte[10]);
            ImageAnnotatorClient client = ImageAnnotatorClient.Create();

            try
            {
                IReadOnlyList <EntityAnnotation> logos = client.DetectLogos(image);
                // Normally use logos here...
            }
            catch (AnnotateImageException e)
            {
                AnnotateImageResponse response = e.Response;
                Console.WriteLine(response.Error);
            }
            // End sample
        }
        public void Annotate()
        {
            Image image = LoadResourceImage("SchmidtBrinPage.jpg");
            // Snippet: Annotate
            ImageAnnotatorClient client  = ImageAnnotatorClient.Create();
            AnnotateImageRequest request = new AnnotateImageRequest
            {
                Image    = image,
                Features =
                {
                    new Feature {
                        Type = Feature.Types.Type.FaceDetection
                    },
                    // By default, no limits are put on the number of results per annotation.
                    // Use the MaxResults property to specify a limit.
                    new Feature {
                        Type = Feature.Types.Type.LandmarkDetection, MaxResults = 5
                    },
                }
            };
            AnnotateImageResponse response = client.Annotate(request);

            Console.WriteLine("Faces:");
            foreach (FaceAnnotation face in response.FaceAnnotations)
            {
                string poly = string.Join(" - ", face.BoundingPoly.Vertices.Select(v => $"({v.X}, {v.Y})"));
                Console.WriteLine($"  Confidence: {(int)(face.DetectionConfidence * 100)}%; BoundingPoly: {poly}");
            }
            Console.WriteLine("Landmarks:");
            foreach (EntityAnnotation landmark in response.LandmarkAnnotations)
            {
                Console.WriteLine($"Score: {(int)(landmark.Score * 100)}%; Description: {landmark.Description}");
            }
            if (response.Error != null)
            {
                Console.WriteLine($"Error detected: {response.Error}");
            }
            // End snippet

            Assert.Equal(3, response.FaceAnnotations.Count);
            Assert.Equal(0, response.LandmarkAnnotations.Count);
        }
Пример #20
0
        public IList <FaceAnnotation> detectFaces(string path, int maxResults)
        {
            byte[] data = File.ReadAllBytes(path);

            AnnotateImageRequest request = new AnnotateImageRequest();

            Google.Apis.Vision.v1.Data.Image img = new Google.Apis.Vision.v1.Data.Image();
            img.Content   = Convert.ToBase64String(data);
            request.Image = img;

            Feature feature = new Feature();

            feature.Type       = "FACE_DETECTION";
            feature.MaxResults = maxResults;

            request.Features = new List <Feature>()
            {
                feature
            };



            BatchAnnotateImagesRequest batchAnnotate = new BatchAnnotateImagesRequest();

            batchAnnotate.Requests = new List <AnnotateImageRequest>()
            {
                request
            };
            ImagesResource.AnnotateRequest annotate = _vision.Images.Annotate(batchAnnotate);

            BatchAnnotateImagesResponse batchResponse = annotate.Execute();

            AnnotateImageResponse response = batchResponse.Responses[0];

            if (response.FaceAnnotations == null)
            {
                throw new Exception(response.Error.Message);
            }

            return(response.FaceAnnotations);
        }
        public static AnnotateImageResponse f_find_brand_by_DetectDocumentText_in(Mat ar_frame)
        {
            var client = ImageAnnotatorClient.Create();

            Byte[] l_image_in_byte = ar_frame.ToBytes(".png");
            Image  image           = Image.FromBytes(l_image_in_byte);

            AnnotateImageRequest request = new AnnotateImageRequest
            {
                Image    = image,
                Features =
                {
                    new Feature {
                        Type = Feature.Types.Type.DocumentTextDetection
                    },
                }
            };
            AnnotateImageResponse response = client.Annotate(request);

            return(response);
        }
        private static AnnotateImageResponse ProcessRequest(AnnotateImageRequest request)
        {
            var fullResponse   = AnnotateImageResponse.Parser.ParseFrom(request.Image.Content);
            var actualResponse = new AnnotateImageResponse {
                Error = fullResponse.Error
            };

            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.FaceDetection, r => r.FaceAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LabelDetection, r => r.LabelAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LandmarkDetection, r => r.LandmarkAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.LogoDetection, r => r.LogoAnnotations);
            AddRepeatedFeatures(request, fullResponse, actualResponse, FeatureType.TextDetection, r => r.TextAnnotations);
            if (request.Features.Any(f => f.Type == FeatureType.ImageProperties))
            {
                actualResponse.ImagePropertiesAnnotation = fullResponse.ImagePropertiesAnnotation;
            }
            if (request.Features.Any(f => f.Type == FeatureType.SafeSearchDetection))
            {
                actualResponse.SafeSearchAnnotation = fullResponse.SafeSearchAnnotation;
            }
            return(actualResponse);
        }
Пример #23
0
        IEnumerable <FaceAnnotation> detectFaces(IEnumerable <byte> inputImage, int maxResults)
        {
            var img = new Google.Apis.Vision.v1.Data.Image();

            img.Content = Convert.ToBase64String(inputImage.ToArray());

            AnnotateImageRequest request = new AnnotateImageRequest();

            request.Image    = img;
            request.Features = new List <Feature>();
            request.Features.Add(new Feature()
            {
                MaxResults = maxResults, Type = "FACE_DETECTION"
            });

            var batch = new BatchAnnotateImagesRequest();

            batch.Requests = new List <AnnotateImageRequest>();
            batch.Requests.Add(request);
            var annotate = _visionService.Images.Annotate(batch);
            //annotate.Key = "AIzaSyDbuBnG-8f41OVET1BXXoHjhZRTlQFFnvU";

            BatchAnnotateImagesResponse batchResponse = annotate.Execute();

            AnnotateImageResponse response = batchResponse.Responses[0];

            if (response.FaceAnnotations == null)
            {
                throw new IOException(
                          response.Error != null
                        ? response.Error.Message
                        : "Unknown error getting image annotations");
            }

            return(response.FaceAnnotations);
        }
Пример #24
0
        public void GetText(byte[] imgData, string language)
        {
            TextResult = JsonResult = "";

            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = "TEXT_DETECTION", MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(imgData)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                    {
                        TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        JsonResult = JsonConvert.SerializeObject(annotateImageResponse.TextAnnotations[0]);
                    }
                    return;
                }
            }

            return;
            //return TextResult;
        }
Пример #25
0
 private static Image CreateFakeImage(AnnotateImageResponse response) => new Image
 {
     Content = response.ToByteString()
 };
Пример #26
0
        /// <summary>
        /// read image as byte and send to google api
        /// </summary>
        /// <param name="imgPath"></param>
        /// <param name="language"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public async Task <string> GetText(Mat img, string language, string type)
        {
            TextResult = JsonResult = "";
            var credential = CreateCredential();
            var service    = CreateService(credential);

            service.HttpClient.Timeout = new TimeSpan(1, 1, 1);
            byte[] file = img.ToBytes();

            BatchAnnotateImagesRequest batchRequest = new BatchAnnotateImagesRequest();

            batchRequest.Requests = new List <AnnotateImageRequest>();
            batchRequest.Requests.Add(new AnnotateImageRequest()
            {
                Features = new List <Feature>()
                {
                    new Feature()
                    {
                        Type = type, MaxResults = 1
                    },
                },
                ImageContext = new ImageContext()
                {
                    LanguageHints = new List <string>()
                    {
                        language
                    }
                },
                Image = new Image()
                {
                    Content = Convert.ToBase64String(file)
                }
            });

            var annotate = service.Images.Annotate(batchRequest);
            BatchAnnotateImagesResponse batchAnnotateImagesResponse = annotate.Execute();

            if (batchAnnotateImagesResponse.Responses.Any())
            {
                AnnotateImageResponse annotateImageResponse = batchAnnotateImagesResponse.Responses[0];
                if (annotateImageResponse.Error != null)
                {
                    if (annotateImageResponse.Error.Message != null)
                    {
                        Error = annotateImageResponse.Error.Message;
                    }
                }
                else
                {
                    switch (type)
                    {
                    case "TEXT_DETECTION":
                        if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                        {
                            TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        }
                        break;

                    case "DOCUMENT_TEXT_DETECTION":
                        if (annotateImageResponse.TextAnnotations != null && annotateImageResponse.TextAnnotations.Any())
                        {
                            TextResult = annotateImageResponse.TextAnnotations[0].Description.Replace("\n", "\r\n");
                        }
                        break;

                    case "FACE_DETECTION":
                        if (annotateImageResponse.FaceAnnotations != null && annotateImageResponse.FaceAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.FaceAnnotations[0]);
                        }
                        break;

                    case "LOGO_DETECTION":
                        if (annotateImageResponse.LogoAnnotations != null && annotateImageResponse.LogoAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LogoAnnotations[0]);
                        }
                        break;

                    case "LABEL_DETECTION":
                        if (annotateImageResponse.LabelAnnotations != null && annotateImageResponse.LabelAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LabelAnnotations[0]);
                        }
                        break;

                    case "LANDMARK_DETECTION":
                        if (annotateImageResponse.LandmarkAnnotations != null && annotateImageResponse.LandmarkAnnotations.Any())
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.LandmarkAnnotations[0]);
                        }
                        break;

                    case "SAFE_SEARCH_DETECTION":
                        if (annotateImageResponse.SafeSearchAnnotation != null)
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.SafeSearchAnnotation);
                        }
                        break;

                    case "IMAGE_PROPERTIES":
                        if (annotateImageResponse.ImagePropertiesAnnotation != null)
                        {
                            TextResult = JsonConvert.SerializeObject(annotateImageResponse.ImagePropertiesAnnotation);
                        }
                        break;
                    }


                    Response = annotateImageResponse;
                    return(TextResult);
                }
            }
            Response = null;
            return("");
        }
        public static DataResult f_hilight_brand_and_save_frame(Mat ar_frame, AnnotateImageResponse ar_AnnotateImageResponse, string ar_brand_name, double ar_cost_of_1_second, string ar_result_path, string ar_picture_name, int ar_second_number)
        {
            Point                p                 = new Point();
            List <Point>         l_list_points     = new List <Point>();
            List <List <Point> > ListOfListOfPoint = new List <List <Point> >();

            EntityAnnotation annotation = new EntityAnnotation();
            Boolean          l_at_least_1_occurence_found = false;

            List <DataResult> l_data_result = new List <DataResult>();


            Mat l_frame = new Mat();

            ar_frame.CopyTo(l_frame);
            int li_count_of_brand_occurences_found = 0;

            l_at_least_1_occurence_found = false;
            int li_count_of_all_occurences_found = ar_AnnotateImageResponse.TextAnnotations.Count;

            for (int i = 1; i < li_count_of_all_occurences_found; i++) //=== skip i=0 as it contain all occurences
            {
                annotation = ar_AnnotateImageResponse.TextAnnotations[i];

                if (annotation.Description != null && annotation.Description.ToLower().Contains(ar_brand_name.ToLower()))
                {
                    l_at_least_1_occurence_found = true;
                    li_count_of_brand_occurences_found++;

                    l_list_points.Clear();
                    ListOfListOfPoint.Clear();

                    p.X = annotation.BoundingPoly.Vertices.ElementAt(0).X;
                    p.Y = annotation.BoundingPoly.Vertices.ElementAt(0).Y;
                    l_list_points.Add(new Point(p.X, p.Y));

                    p.X = annotation.BoundingPoly.Vertices.ElementAt(1).X;
                    p.Y = annotation.BoundingPoly.Vertices.ElementAt(1).Y;
                    l_list_points.Add(new Point(p.X, p.Y));

                    p.X = annotation.BoundingPoly.Vertices.ElementAt(2).X;
                    p.Y = annotation.BoundingPoly.Vertices.ElementAt(2).Y;
                    l_list_points.Add(new Point(p.X, p.Y));

                    p.X = annotation.BoundingPoly.Vertices.ElementAt(3).X;
                    p.Y = annotation.BoundingPoly.Vertices.ElementAt(3).Y;
                    l_list_points.Add(new Point(p.X, p.Y));

                    ListOfListOfPoint.Add(l_list_points);

                    Cv2.Polylines(l_frame, ListOfListOfPoint, true, Scalar.Red, 1);
                }
            }

            string l_picutre_name;

            if (l_at_least_1_occurence_found)
            {
                l_picutre_name = ar_picture_name + "_pm.png"; //===pm as Positive match
            }

            else
            {
                l_picutre_name = ar_picture_name + "_fm.png";
            }                                                //===fm as False match

            //===save picture
            String ls_brand_result_path = Path.Combine(ar_result_path, ar_brand_name);

            Directory.CreateDirectory(ls_brand_result_path);
            String ls_brand_result_path_and_name = Path.Combine(ls_brand_result_path, l_picutre_name);

            Cv2.ImWrite(ls_brand_result_path_and_name, l_frame);

            //===log result into DataResult
            DataResult l_current_data_result = new DataResult();

            l_current_data_result.BrandName       = ar_brand_name;
            l_current_data_result.FrameRef        = l_picutre_name;
            l_current_data_result.SecondRef       = ar_second_number;
            l_current_data_result.TotalOccurences = li_count_of_brand_occurences_found;
            l_current_data_result.Cost            = li_count_of_brand_occurences_found * ar_cost_of_1_second;
            l_data_result.Add(l_current_data_result);

            //===return DataResult
            return(l_current_data_result);
        }
Пример #28
0
        private static List <ImageInfo> GetInfo(AnnotateImageResponse result, string name)
        {
            var infoList = new List <ImageInfo>();

            //var labels = client.DetectLabels(img);
            //var stringBuilderIndex = new StringBuilder();
            //var stringBuilderDisplay = new StringBuilder();


            //stringBuilderIndex.AppendLine(name);
            //stringBuilderDisplay.AppendLine(name);

            //stringBuilderDisplay.AppendLine("Google Vision Labels:");

            //stringBuilder.AppendLine(String.Join(",", labels.Select(s => s.Description)));
            if (result.LabelAnnotations != null)
            {
                infoList.AddRange(result.LabelAnnotations
                                  .Select(s =>
                                          new ImageInfo()
                {
                    Description        = s.Description,
                    Score              = s.Score,
                    AnnotationCategory = AnnotationCategory.LabelAnnotations
                }
                                          )
                                  );

                //stringBuilderIndex.AppendLine(String.Join(", ", result.LabelAnnotations.Select(s => $"{s.Description} [{Math.Round(s.Score * 100, 0)}%]")));
                //stringBuilderDisplay.AppendLine(String.Join(", ", result.LabelAnnotations.Select(s => $"{s.Description} [{Math.Round(s.Score * 100, 0)}%]")));
            }

            //stringBuilderIndex.AppendLine();
            //stringBuilderIndex.AppendLine("Web Info:");
            //var webinfos = client.DetectWebInformation(img);
            //stringBuilder.AppendLine(String.Join(",", webinfos.WebEntities.Select(s => s.Description)));
            if (result.WebDetection != null && result.WebDetection.WebEntities != null)
            {
                infoList.AddRange(result.WebDetection.WebEntities
                                  .Select(s =>
                                          new ImageInfo()
                {
                    Description        = s.Description,
                    Score              = s.Score,
                    AnnotationCategory = AnnotationCategory.WebEntities
                }
                                          )
                                  );
                //stringBuilderIndex.AppendLine(String.Join(", ", result.WebDetection.WebEntities.Select(s => $"{s.Description} [{Math.Round(s.Score * 100, 0)}%]")));
            }


            //stringBuilderIndex.AppendLine();
            //stringBuilderIndex.AppendLine("LandMark:");
            //var landmarks = client.DetectLandmarks(img);
            //stringBuilder.AppendLine(String.Join(",", landmarks.Select(s => s.Description)));
            if (result.LandmarkAnnotations != null)
            {
                infoList.AddRange(result.LandmarkAnnotations
                                  .Select(s =>
                                          new ImageInfo()
                {
                    Description        = s.Description,
                    Score              = s.Score,
                    AnnotationCategory = AnnotationCategory.LandmarkAnnotations
                }
                                          )
                                  );

                //stringBuilderIndex.AppendLine(String.Join(", ", result.LandmarkAnnotations.Select(s => $"{s.Description} [{Math.Round(s.Score * 100, 0)}%]")));
            }

            //stringBuilderIndex.AppendLine();
            //stringBuilderIndex.AppendLine("Text:");
            //var texts = client.DetectText(img);
            //stringBuilder.AppendLine(String.Join(",", texts.Select(s => s.Description)));
            if (result.TextAnnotations != null)
            {
                infoList.AddRange(result.TextAnnotations
                                  .Select(s =>
                                          new ImageInfo()
                {
                    Description        = s.Description,
                    Score              = s.Score,
                    AnnotationCategory = AnnotationCategory.TextAnnotations
                }
                                          )
                                  );
                //stringBuilderIndex.AppendLine(String.Join(", ", result.TextAnnotations.Select(s => $"{s.Description} [{Math.Round(s.Score * 100, 0)}%]")));
            }
            //stringBuilderIndex.AppendLine();
            //stringBuilderIndex.AppendLine("Color:");

            //var properties = client.DetectImageProperties(img);
            //stringBuilder.AppendLine(String.Join(",", properties.DominantColors.Colors.Select(s =>
            //{
            //    System.Drawing.Color thisColor = System.Drawing.Color.FromArgb(
            //                                                                int.Parse(s.Color.Alpha?.ToString() ?? "0"),
            //                                                                int.Parse(s.Color.Red.ToString()),
            //                                                                int.Parse(s.Color.Green.ToString()),
            //                                                                int.Parse(s.Color.Blue.ToString())
            //                                                                );
            //    return HexConverter(thisColor);

            //})));

            //if (result.ImagePropertiesAnnotation != null && result.ImagePropertiesAnnotation.DominantColors != null
            //    && result.ImagePropertiesAnnotation.DominantColors.Colors != null)
            //    stringBuilder.AppendLine(String.Join(", ", result.ImagePropertiesAnnotation.DominantColors.Colors.Select(s =>
            //    {
            //        System.Drawing.Color thisColor = System.Drawing.Color.FromArgb(
            //                                                                int.Parse(s.Color.Alpha?.ToString() ?? "0"),
            //                                                                int.Parse(s.Color.Red.ToString()),
            //                                                                int.Parse(s.Color.Green.ToString()),
            //                                                                int.Parse(s.Color.Blue.ToString())
            //                                                                );
            //        ;
            //        return $"{HexConverter(thisColor)} [{Math.Round(s.Score * 100, 0)}%]";
            //    })));



            //return stringBuilder.ToString();
            return(infoList);//new Tuple<string, string>(stringBuilderDisplay.ToString(), stringBuilderIndex.ToString());
        }
        public static void f_main(String ar_path_for_uploading_videos, String ar_working_folder_name, String ar_uploaded_video_name, String[] ar_brand_names, double ar_cost_of_1_second)
        {
            //===initializing google API key
            string ls_google_app_credentials_path_and_filename = HostingEnvironment.MapPath("~/CloudVision/google_cloud_credential_for_logo_detection-nowting-bd7886019869.json");

            Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", ls_google_app_credentials_path_and_filename);

            VideoCapture l_capture   = VideoCapture.FromFile(Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name, ar_uploaded_video_name));
            Double       l_framerate = l_capture.Get(5);
            Double       l_frameid;
            int          li_counter             = 0;
            Mat          l_frame                = new Mat();
            Mat          l_frame_with_boundarie = new Mat();

            String ls_result_folder_path = Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name);

            Directory.CreateDirectory(ls_result_folder_path);

            List <DataResult> l_list_DataResult = new List <DataResult>();

            while (true)
            {
                l_capture.Read(l_frame);
                if (l_frame.Empty())
                {
                    break;
                }

                l_frameid = l_capture.Get(1);
                if (l_frameid % l_framerate != 0)
                {
                    continue;                               //===getting 1 frame per second
                }
                //======================================================================================================
                //======================================================================================================
                li_counter++;
                if (li_counter != 1 && li_counter != 2 && li_counter != 3)
                {
                    continue;
                }                                                                        //=== temp code to process only the first 3 frames
                //======================================================================================================
                //======================================================================================================

                //===find all texts in frame
                AnnotateImageResponse l_response = f_find_brand_by_DetectDocumentText_in(l_frame);

                //===set a rectangle over each corresponding brand-text and save the frame
                foreach (string l_brand_name in ar_brand_names)
                {
                    if (String.IsNullOrEmpty(l_brand_name.Trim()))
                    {
                        continue;
                    }

                    DataResult l_current_data_result = f_hilight_brand_and_save_frame(l_frame, l_response, l_brand_name, ar_cost_of_1_second, ls_result_folder_path, "pic_" + li_counter.ToString(), li_counter);
                    l_list_DataResult.Add(l_current_data_result);
                }
            }

            //===write result into file
            using (StreamWriter l_file = File.AppendText(Path.Combine(ls_result_folder_path, "Results.txt")))
            {
                JsonSerializer serializer = new JsonSerializer();
                serializer.Serialize(l_file, l_list_DataResult);
            }


            string l_domainnamefordownloadingresults = ConfigurationManager.AppSettings["domainnamefordownloadingresults"];

            EvaluationResults l_EvaluationResults = new EvaluationResults();

            l_EvaluationResults.ResultPathURL    = l_domainnamefordownloadingresults + "/" + ar_working_folder_name;
            l_EvaluationResults.BrandNames       = ar_brand_names;
            l_EvaluationResults.BrandIndexToShow = 0;
            l_EvaluationResults.array_DataResult = l_list_DataResult.ToArray();

            HttpContext.Current.Session["results"] = l_EvaluationResults;

            return;
        }
Пример #30
0
 public Item(AnnotateImageResponse response, string imageName)
 {
     this.imageName = imageName;
 }