public async Task TestImageModeration()
        {
            using (var stream = new FileStream(@"TestImages\moxie.jpg", FileMode.Open))
            {
                var response = await AnalyzeImage.PassesImageModerationAsync(stream);

                Assert.IsTrue(response.Item1);
            }
        }
示例#2
0
        public IActionResult AnalyzeImage(string url)
        {
            AnalyzeImage analyzeImage = new AnalyzeImage(AppConstant.Endpoint, AppConstant.Key);
            var          json         = analyzeImage.GetImageAnalysisCustomModel(url).Result;
            ImageInfo    imageInfo    = new ImageInfo()
            {
                ImageAnalysis = json,
                URL           = url
            };

            return(View(imageInfo));
        }
示例#3
0
        static void Main(string[] args)
        {
            Console.WriteLine("Azure Cognitive Services Computer Vision - .NET quickstart example");
            Console.WriteLine();
            // <snippet_client>
            // Create a client
            AnalyzeImage analyzeImage = new AnalyzeImage(endpoint, subscriptionKey);

            analyzeImage.AnalyzeImageUrl(ANALYZE_URL_IMAGE).Wait();
            var dd = analyzeImage.GetImageAnalysisCustomModel(ANALYZE_URL_IMAGE).Result;

            //analyzeImage.DetectObjectsUrl(DETECT_URL_IMAGE).Wait();
            //analyzeImage.ReadFileUrl( READ_TEXT_URL_IMAGE).Wait();
            Console.WriteLine("Press enter to exit...");
            Console.ReadLine();
        }
        public async Task TestTextModeration()
        {
            bool passes = await AnalyzeImage.PassesTextModeratorAsync(new ReviewPoco { ReviewText = "Donna" });

            Assert.IsTrue(passes);
        }
        public async Task <IHttpActionResult> describeImageWithVoice([FromBody] ImageToVoice imageToVoice)
        {
            try
            {
                var user = await UserManager.FindByIdAsync(imageToVoice.userId);

                var faceDescription = new FaceDescription();
                var googleClient    = await ImageAnnotatorClient.CreateAsync();

                var byteImage = Convert.FromBase64String(imageToVoice.base64Image);
                var image     = Image.FromBytes(byteImage);

                var responseForFacesGoogle = await googleClient.DetectFacesAsync(image);

                var responseForLabels = await googleClient.DetectLabelsAsync(image);

                var responseForLandmark = await googleClient.DetectLandmarksAsync(image);

                var responseForLogo = await googleClient.DetectLogosAsync(image);

                var analyzeImage = new AnalyzeImage();
                analyzeImage.responseFaceGoogle     = responseForFacesGoogle;
                analyzeImage.responseForLabels      = responseForLabels;
                analyzeImage.responseForLogoGoogle  = responseForLogo;
                analyzeImage.responseLandMarkGoogle = responseForLandmark;

                var responseFormMicrosoftFace = new List <CognitiveMicrosoft>();
                if (responseForFacesGoogle.Count > 0)
                {
                    responseFormMicrosoftFace = await faceDescription.MakeAnalysisRequestAsync(byteImage);

                    analyzeImage.responseForFacesMicrosft = responseFormMicrosoftFace;
                }

                string base64Voice = analyzeImage.describeImageWithVoice();

                rela.ImagesProceseds.Add(new ImagesProcesed {
                    UserId = user.Id, date = DateTime.Now, image = imageToVoice.base64Image
                });
                await rela.SaveChangesAsync();

                int imageId = rela.ImagesProceseds.OrderByDescending(img => img.date).ToList()[0].imageId;

                if (responseForFacesGoogle.Count > 0)
                {
                    rela.GoogleFaces.Add(new GoogleFace {
                        GoogleFace1 = JsonConvert.SerializeObject(responseForFacesGoogle), imageId = imageId
                    });
                }

                if (responseForLabels.Count > 0)
                {
                    rela.GoogleLabels.Add(new GoogleLabel {
                        GoogleLabel1 = JsonConvert.SerializeObject(responseForLabels), imageId = imageId
                    });
                }

                if (responseForLandmark.Count > 0)
                {
                    rela.GoogleLandmarks.Add(new GoogleLandmark {
                        GoogleLandamark = JsonConvert.SerializeObject(responseForLandmark), imageId = imageId
                    });
                }

                if (responseForLogo.Count > 0)
                {
                    rela.GoogleLogoes.Add(new GoogleLogo {
                        GoogleLogo1 = JsonConvert.SerializeObject(responseForLogo), imageId = imageId
                    });
                }

                if (responseFormMicrosoftFace.Count > 0)
                {
                    rela.MicrosoftFaces.Add(new MicrosoftFace {
                        imageId = imageId, MicrosoftFace1 = JsonConvert.SerializeObject(responseFormMicrosoftFace)
                    });
                }

                rela.Voices.Add(new Voice {
                    imageId = imageId, GoogleVoice = base64Voice
                });

                await rela.SaveChangesAsync();

                DescribeImage describeImage = new DescribeImage()
                {
                    googleFace    = responseForFacesGoogle,
                    label         = responseForLabels,
                    landmark      = responseForLandmark,
                    logo          = responseForLogo,
                    voiceBase64   = base64Voice,
                    microsoftFace = responseFormMicrosoftFace
                };

                return(Ok(describeImage));
            }
            catch (Exception ex) {
                return(BadRequest("Error"));
            }
        }