/// <summary>
        /// Uploads the image to Project Oxford and performs analysis
        /// </summary>
        /// <param name="imageFilePath">The image file path.</param>
        /// <returns></returns>
        private async Task <AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)
        {
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Vision API Service client
            //
            VisionServiceClient VisionServiceClient = new VisionServiceClient(SubscriptionKey, "https://westcentralus.api.cognitive.microsoft.com/vision/v1.0");

            Log("VisionServiceClient is created");

            using (Stream imageFileStream = File.OpenRead(imageFilePath))
            {
                //
                // Analyze the image for all visual features
                //
                Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
                VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
                AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                return(analysisResult);
            }

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
        public static async Task <(bool allowd, string message, string[] tags)> PassesImageModerationAsync(Stream image, KeyVault kvService, TraceWriter log)
        {
            try
            {
                log.Info("--- Creating VisionApi client and analyzing image");

                var key = await GetSecret("MicrosoftVisionApiKey", kvService);

                var endpoint = await GetSecret("MicrosoftVisionApiEndpoint", kvService);

                var numTags = await GetSecret("MicrosoftVisionNumTags", kvService);

                var client   = new VisionServiceClient(key, endpoint);
                var features = new VisualFeature[] { VisualFeature.Description };
                var result   = await client.AnalyzeImageAsync(image, features);

                log.Info($"--- Image analyzed with tags: {String.Join(",", result.Description.Tags)}");
                if (!int.TryParse(numTags, out var tagsToFetch))
                {
                    tagsToFetch = 5;
                }
                var    fetchedTags = result?.Description?.Tags.Take(tagsToFetch).ToArray() ?? new string[0];
                bool   isAllowed   = fetchedTags.Contains("dog");
                string message     = result?.Description?.Captions.FirstOrDefault()?.Text;
                return(isAllowed, message, fetchedTags);
            }
            catch (Exception ex)
            {
                log.Info("Vision API error! " + ex.Message);
                return(false, "error " + ex.Message, new string[0]);
            }
        }
        /// <summary>
        /// Sends a url to Project Oxford and performs analysis
        /// </summary>
        /// <param name="imageUrl">The url of the image to analyze</param>
        /// <returns></returns>
        private async Task <AnalysisResult> AnalyzeUrl(string imageUrl)
        {
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Vision API Service client
            //
            VisionServiceClient VisionServiceClient = new VisionServiceClient(SubscriptionKey);

            Log("VisionServiceClient is created");

            //
            // Analyze the url for all visual features
            //
            Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageUrl, visualFeatures);

            return(analysisResult);

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
        private async Task AnalyzeImage(StorageFile imageFile)
        {
            resultText.Text = "Analyzing...";
            try
            {
                VisionServiceClient visionServiceClient = new VisionServiceClient("");
                using (var stream = await imageFile.OpenStreamForReadAsync())
                {
                    VisualFeature[] visualFeatures = new VisualFeature[]
                    {
                        VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description,
                        VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags
                    };

                    AnalysisResult analysisResult =
                        await visionServiceClient.AnalyzeImageAsync(stream, visualFeatures);

                    resultText.Text = string.Join("\n", analysisResult.Tags.OrderByDescending(x => x.Confidence).Select(x => $"{x.Name} ({x.Confidence:F})"));
                }
            }
            catch (Exception ex)
            {
                resultText.Text = $"Failed: {ex.Message}";
            }
        }
Beispiel #5
0
        public async Task<string> AnalyzeUrl(string imageUrl)
        {
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult analysisResult = await this.visionServiceClient.AnalyzeImageAsync(imageUrl, visualFeatures);

            return await TranslateText(analysisResult.Description.Captions.First().Text);
        }
Beispiel #6
0
        public async Task <VisionResult> Analyze(Stream stream)
        {
            var client = new VisionServiceClient(Constants.VisionApiKey, Constants.VisionApiEndpoint);

            var attributes = new List <VisionAttribute>();
            var rectangles = new List <Rectangle>();

            using (stream)
            {
                var features      = new VisualFeature[] { VisualFeature.Tags, VisualFeature.Faces };
                var visionsResult = await client.AnalyzeImageAsync(stream, features, null);

                if (visionsResult != null && visionsResult?.Tags.Length > 0)
                {
                    if (visionsResult.Faces != null)
                    {
                        foreach (var face in visionsResult.Faces)
                        {
                            rectangles.Add(face.FaceRectangle.ToRectangle());
                        }
                    }

                    foreach (var tag in visionsResult.Tags)
                    {
                        attributes.Add(new VisionAttribute(tag.Name, tag.Hint, tag.Confidence));
                    }
                }
            }
            return(new VisionResult {
                Attributes = attributes, Rectangles = rectangles
            });
        }
Beispiel #7
0
        private async Task <AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)
        {
            //
            // Create Project Oxford Vision API Service client
            //
            Speak("Ok, I am analysing your photo!");
            VisionServiceClient VisionServiceClient = new VisionServiceClient("---------");

            Debug.WriteLine("VisionServiceClient is created");

            StorageFile file = await StorageFile.GetFileFromPathAsync(imageFilePath);

            using (Stream imageFileStream = (await file.OpenReadAsync()).AsStreamForRead())
            {
                // Analyze the image for all visual features
                Debug.WriteLine("Calling VisionServiceClient.AnalyzeImageAsync()...");
                VisualFeature[] visualFeatures = new VisualFeature[]
                {
                    VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description,
                    VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags
                };

                AnalysisResult analysisResult =
                    await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                Debug.WriteLine(analysisResult);

                return(analysisResult);
            }
        }
Beispiel #8
0
        public static AnalysisResult AnalyzeImage(Bitmap sourceImage)
        {
            VisionServiceClient VisionServiceClient = ComputerVisionService.GetClient();

            using (MemoryStream memoryStream = new MemoryStream())
            {
                sourceImage.SaveAsPng(memoryStream);
                memoryStream.Position = 0;

                Console.WriteLine("Calling VisionServiceClient.AnalyzeImageAsync()...");
                VisualFeature[] visualFeatures = new VisualFeature[] {
                    VisualFeature.Adult,
                    VisualFeature.Color,
                    VisualFeature.Description,
                    VisualFeature.ImageType
                };

                try
                {
                    AnalysisResult analysisResult = VisionServiceClient.AnalyzeImageAsync(memoryStream, visualFeatures).GetAwaiter().GetResult();
                    return(analysisResult);
                }
                catch (Microsoft.ProjectOxford.Vision.ClientException exception)
                {
                    Console.WriteLine(exception.Error.Message);
                    Console.WriteLine(exception.Error.Code);

                    throw exception;
                }
            }
        }
Beispiel #9
0
        private void openFileDialog1_FileOk(object sender, CancelEventArgs e)
        {
            try
            {
                pictureBox1.ImageLocation = openFileDialog1.FileName;
                var file = System.IO.File.OpenRead(openFileDialog1.FileName);

                // Refer to this blog for all the preceding steps and more info:
                // https://devonblog.com/software-development/artificial-intelligence/censor-pictures-using-ai-from-computer-vision-api/

                // TODO: you have to use your own KEY here.
                var client = new VisionServiceClient("ye2f5cb4ee15341ba8f7c4f0d90ff542c",
                                                     "https://southeastasia.api.cognitive.microsoft.com/vision/v1.0");

                var visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };

                var analysisResult = client.AnalyzeImageAsync(file, visualFeatures, new[] { "celebrities" });
                analysisResult.Wait();

                var result = analysisResult.Result;

                // check out these extension methods on AnalysisResultExtension class

                var resultText = $@"Adult/racy content: {result.IsAdultOrRacyContent()}
Clear human face found: {result.HasAFace()} 
Is there a child: {!result.IsYoungerThan(14)},
Celebrity: {result.DetectCelebrity()}";

                label1.Text = resultText;
            }
            catch (AggregateException ex)
            {
                MessageBox.Show(ex.InnerException.Message + " Make sure you are using a valid API KEY ");
            }
        }
Beispiel #10
0
        private static async Task <AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)
        {
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Computer Vision API Service client
            //

            VisionServiceClient VisionServiceClient = new VisionServiceClient(ComKey);

            Log("VisionServiceClient is created");

            using (Stream imageFileStream = File.OpenRead(imageFilePath))
            {
                //
                // Analyze the image for all visual features
                //
                Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
                VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
                AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                return(analysisResult);
            }

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
        /// <summary>
        /// Uploads the image to Project Oxford and performs analysis
        /// </summary>
        /// <param name="imageFilePath">The image file path.</param>
        /// <returns></returns>
        private async Task<AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)
        {
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Vision API Service client
            //
            VisionServiceClient VisionServiceClient = new VisionServiceClient(SubscriptionKey);
            Log("VisionServiceClient is created");

            using (Stream imageFileStream = File.OpenRead(imageFilePath))
            {
                //
                // Analyze the image for all visual features
                //
                Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
                VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
                AnalysisResult analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);
                return analysisResult;
            }

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
Beispiel #12
0
        private static void AnalyzeImage()
        {
            var apiKey       = ConfigurationManager.AppSettings["VisionApiSubscriptionKey"];
            var apiRoot      = "https://eastus2.api.cognitive.microsoft.com/vision/v1.0";
            var visionClient = new VisionServiceClient(apiKey, apiRoot);

            var visualFeats = new VisualFeature[]
            {
                VisualFeature.Description,
                VisualFeature.Faces
            };

            Stream imageStream = File.OpenRead("satyaNadella.jpg");

            try
            {
                AnalysisResult analysisResult = visionClient.AnalyzeImageAsync(imageStream, visualFeats).Result;
                foreach (var caption in analysisResult.Description.Captions)
                {
                    Console.WriteLine("Description: " + caption.Text);
                }
            }
            catch (ClientException e)
            {
                Console.WriteLine("Vision client error: " + e.Error.Message);
            }
            catch (Exception e)
            {
                Console.WriteLine("Error: " + e.Message);
            }
        }
Beispiel #13
0
        public async Task <ActionResult> Upload(HttpPostedFileBase file)
        {
            if (file != null && file.ContentLength > 0)
            {
                // Make sure the user selected an image file
                if (!file.ContentType.StartsWith("image"))
                {
                    TempData["Message"] = "Only image files may be uploaded";
                }
                else
                {
                    // Save the original image in the "photos" container
                    string             photoContainer = CloudConfigurationManager.GetSetting("storage:photocontainer");
                    string             thumbContainer = CloudConfigurationManager.GetSetting("storage:thumbnailcontainer");
                    CloudBlobClient    client         = _account.CreateCloudBlobClient();
                    CloudBlobContainer container      = client.GetContainerReference(photoContainer);
                    CloudBlockBlob     photo          = container.GetBlockBlobReference(Path.GetFileName(file.FileName));
                    photo.Properties.ContentType = file.ContentType;
                    await photo.UploadFromStreamAsync(file.InputStream);

                    await photo.SetPropertiesAsync();

                    file.InputStream.Seek(0L, SeekOrigin.Begin);

                    // Generate a thumbnail and save it in the "thumbnails" container
                    using (var outputStream = new MemoryStream())
                    {
                        var settings = new ResizeSettings {
                            MaxWidth = 192, Format = "png"
                        };
                        ImageBuilder.Current.Build(file.InputStream, outputStream, settings);
                        outputStream.Seek(0L, SeekOrigin.Begin);
                        container = client.GetContainerReference(thumbContainer);
                        CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName));
                        await thumbnail.UploadFromStreamAsync(outputStream);
                    }

                    // Submit the image to Azure's Computer Vision API
                    VisionServiceClient vision = new VisionServiceClient(CloudConfigurationManager.GetSetting("vision:key"),
                                                                         CloudConfigurationManager.GetSetting("vision:rootUrl"));
                    VisualFeature[] features = new VisualFeature[] { VisualFeature.Description };
                    var             result   = await vision.AnalyzeImageAsync(photo.Uri.ToString(), features);

                    // Record the image description and tags in blob metadata
                    photo.Metadata.Add("Caption", result.Description.Captions[0].Text);

                    for (int i = 0; i < result.Description.Tags.Length; i++)
                    {
                        string key = $"Tag{i}";
                        photo.Metadata.Add(key, result.Description.Tags[i]);
                    }

                    await photo.SetMetadataAsync();
                }
            }

            // redirect back to the index action to show the form once again
            return(RedirectToAction("Details", "Pictures", new { name = file?.FileName }));
        }
Beispiel #14
0
        public async Task <AnalysisResult> AnalyzeImageAsync(string imageString)
        {
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Categories, VisualFeature.Tags };

            AnalysisResult response = await _visionServiceClient.AnalyzeImageAsync(imageString, visualFeatures);

            return(response);
        }
Beispiel #15
0
        private async Task <AnalysisResult> AnalyzeUrl(string imageUrl)
        {
            Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageUrl, visualFeatures);

            return(analysisResult);
        }
Beispiel #16
0
        public static async Task <string> AnalyzeAsync(byte[] image)
        {
            var visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            var vision         = new Microsoft.ProjectOxford.Vision.VisionServiceClient(SubscriptionKeys.ComputerVisionId, "https://westeurope.api.cognitive.microsoft.com/vision/v1.0");
            var result         = await vision.AnalyzeImageAsync(new MemoryStream(image) { Position = 0 }, visualFeatures);

            return(result.Description.Captions.OrderBy(c => c.Confidence).Select(c => c.Text).FirstOrDefault());
        }
Beispiel #17
0
        public static async Task <AnalysisResult> DescribeImage(string url)
        {
            var client   = new VisionServiceClient("<Project Oxford Subscription key here>");
            var features = new VisualFeature[] { VisualFeature.Color, VisualFeature.Description };
            var result   = await client.AnalyzeImageAsync(url, features);

            return(result);
        }
        public async Task <string> RecognizeImageFromStream(Stream stream)
        {
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult  x = await VisionServiceClient.AnalyzeImageAsync(stream, visualFeatures);

            var result = LogAnalysisResult(x);

            return(result);
        }
        private async Task <AnalysisResult> AnalyzeUrl(string imageUrl)
        {
            VisionServiceClient VisionServiceClient = new VisionServiceClient("SUA_CHAVE2_AQUI", "https://westcentralus.api.cognitive.microsoft.com/vision/v1.0");

            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageUrl, visualFeatures);

            return(analysisResult);
        }
        public static void CallCognitiveService(string imagePath)
        {
            var features = new VisualFeature[] { VisualFeature.Tags, VisualFeature.Description };

            using (var fs = new FileStream(@"C:\Vision\Sample.jpg", FileMode.Open))
            {
                //analysisResult = await visionClient.AnalyzeImageAsync(fs, features);
                //
            }
        }
        private async void _TakePhoto()
        {
            Debug.WriteLine("Taking Photo");

            try
            {
                photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync(PHOTO_FILE_NAME, CreationCollisionOption.ReplaceExisting);

                var imageProperties = ImageEncodingProperties.CreateJpeg();

                await mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile);

                var photoStream = await photoFile.OpenReadAsync();

                var bitmap = new BitmapImage();
                bitmap.SetSource(photoStream);
                captureImage.Source = bitmap;

                using (Stream imageFileStream = (await photoFile.OpenReadAsync()).AsStreamForRead())
                {
                    // Analyze the image for all visual features
                    Debug.WriteLine("Calling VisionServiceClient.AnalyzeImageAsync()...");
                    VisualFeature[] visualFeatures = new VisualFeature[]
                    {
                        VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description,
                        VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags
                    };

                    // add your API key in here
                    VisionServiceClient VisionServiceClient = new VisionServiceClient("");

                    analysisResult =
                        await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                    Debug.WriteLine("photo: " + analysisResult.Description.Captions[0].Text + " , " + analysisResult.Description.Captions[0].Confidence);

                    if (analysisResult.Description.Captions[0].Confidence > 0.6d)
                    {
                        Speak("I see, " + analysisResult.Description.Captions[0].Text);
                    }
                    else
                    {
                        Speak("I'm not quite sure but it could be, " + analysisResult.Description.Captions[0].Text);
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            finally
            {
                isProcessing = false;
            }
        }
Beispiel #22
0
        public override void Run()
        {
            Trace.WriteLine("Starting processing of messages");

            // Initiates the message pump and callback is invoked for each message that is received, calling close on the client will stop the pump.
            Client.OnMessage((receivedMessage) =>
            {
                try
                {
                    // Process the message
                    Trace.WriteLine("Processing Service Bus message: " + receivedMessage.SequenceNumber.ToString());
                    var stream = receivedMessage.GetBody <Stream>();
                    var bytes  = ReadFully(stream);
                    var uri    = Encoding.UTF8.GetString(bytes);

                    AnalysisResult analysisResult;
                    var features     = new VisualFeature[] { VisualFeature.Tags, VisualFeature.Description };
                    var visionClient = new VisionServiceClient("<-- ADD API KEY -->", "https://southcentralus.api.cognitive.microsoft.com/vision/v1.0");
                    analysisResult   = visionClient.AnalyzeImageAsync(uri, features).Result;   //

                    // Speak a string.

                    bool isCat = false;

                    foreach (var caption in analysisResult.Description.Captions)
                    {
                        if (caption.Text.Contains("cat"))
                        {
                            isCat = true;
                        }
                    }

                    foreach (var tag in analysisResult.Description.Tags)
                    {
                        if (tag.Contains("cat"))
                        {
                            isCat = true;
                        }
                    }


                    var tableStorageService = new TableStorageService();
                    tableStorageService.InsertResult(uri, isCat);
                    Console.WriteLine("Is Cat:     " + isCat.ToString());
                    Console.Read();
                }
                catch
                {
                    // Handle any message processing specific exceptions here
                }
            });

            CompletedEvent.WaitOne();
        }
        private async Task <AnalysisResult> UploadAndAnalyzeImage(StorageFile imageFile)
        {
            var stream = await imageFile.OpenStreamForReadAsync();


            Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(stream, visualFeatures);

            return(analysisResult);
        }
Beispiel #24
0
        public async Task <IActionResult> Upload(IFormFile file)
        {
            if (file != null && file.Length > 0)
            {
                if (!file.ContentType.StartsWith("image"))
                {
                    TempData["Message"] = "Only image files may be uploaded.";
                }
                else
                {
                    var client    = _account.CreateCloudBlobClient();
                    var container = client.GetContainerReference("photos");
                    var photo     = container.GetBlockBlobReference(Path.GetFileName(file.FileName));
                    await photo.UploadFromStreamAsync(file.OpenReadStream());

                    file.OpenReadStream().Seek(0L, SeekOrigin.Begin);

                    using (var outputStream = new MemoryStream())
                    {
                        Configuration.Default.AddImageFormat(new PngFormat());
                        using (var image = Image.Load(file.OpenReadStream()))
                        {
                            image.Resize(new Size {
                                Width = 252, Height = 252
                            }).SaveAsPng(outputStream);
                            container = client.GetContainerReference("thumbnails");
                            var thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName));
                            outputStream.Seek(0L, SeekOrigin.Begin);
                            await thumbnail.UploadFromStreamAsync(outputStream);
                        }
                    }

                    var features = new VisualFeature[] { VisualFeature.Description };
                    var result   = await _vision.AnalyzeImageAsync(photo.Uri.ToString(), features);

                    photo.Metadata.Add("Caption", result.Description.Captions.First().Text);

                    for (int i = 0; i < result.Description.Tags.Length; i++)
                    {
                        var key = $"tag{i}";
                        photo.Metadata.Add(key, result.Description.Tags[i]);
                    }

                    await photo.SetMetadataAsync();
                }
            }
            else
            {
                TempData["Message"] = "Not uploaded! The file is empty :c";
            }

            return(RedirectToAction(nameof(Index)));
        }
        public async Task <string> getcaption(String url)
        {
            VisionServiceClient vision = new VisionServiceClient("****************************************");


            VisualFeature[] visual   = new VisualFeature[] { VisualFeature.Description };
            AnalysisResult  analysis = await vision.AnalyzeImageAsync(url, visual);

            string message = analysis?.Description?.Captions.FirstOrDefault()?.Text;

            return(message);
        }
Beispiel #26
0
        private async Task MakeAnalysisRequest()
        {
            var features       = new VisualFeature[] { VisualFeature.Description };
            var details        = new string[] { "Celebrities" };
            var visionClient   = new VisionServiceClient(subscriptionKey, uriBase);
            var analysisResult = await visionClient.AnalyzeImageAsync(ImageUrl, features, details);


            Detail     = analysisResult.Categories[0].Detail.ToString();
            Caption    = analysisResult?.Description?.Captions[0]?.Text;
            Confidence = analysisResult?.Description?.Captions[0]?.Confidence;
        }
        /// <summary>
        /// Get information from cognitive services
        /// </summary>
        ///

        //private Stream stream = new MemoryStream();

        private async Task <AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)

        {
            //

            // Create Project Oxford Vision API Service client

            //

            try
            {
                VisionServiceClient VisionServiceClient = new VisionServiceClient("5cd43034665d42bb912d04a946ac6512");

                Debug.WriteLine("VisionServiceClient is created");
                status.Text = "VisionServiceClient is created";


                StorageFile file = await StorageFile.GetFileFromPathAsync(imageFilePath);

                using (Stream imageFileStream = (await file.OpenReadAsync()).AsStreamForRead())

                {
                    // Analyze the image for all visual features

                    Debug.WriteLine("Calling VisionServiceClient.AnalyzeImageAsync()...");

                    status.Text = "Calling VisionServiceClient.AnalyzeImageAsync()...";

                    VisualFeature[] visualFeatures = new VisualFeature[]

                    {
                        VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description,

                        VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags
                    };


                    AnalysisResult analysisResult =

                        await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                    Debug.WriteLine(analysisResult);
                    status.Text = "Image analyzed!";
                    return(analysisResult);
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
                return(null);
            }
        }
Beispiel #28
0
        private static async Task AnalyzeImage(string apiKey, string apiUrl, string imageUrl)
        {
            var vsc = new VisionServiceClient(apiKey, apiUrl);

            var visualFeatures = new VisualFeature[]
            {
                VisualFeature.Description, VisualFeature.Tags
            };

            var analysisResult = await vsc.AnalyzeImageAsync(imageUrl, visualFeatures);

            ShowAnalysisResult(analysisResult);
        }
Beispiel #29
0
        // Microsoft Vision API function
        public static async Task <AnalysisResult> UploadAndAnalyzeImage(string imageFilePath)
        {
            await Task.Delay(1000);

            VisionServiceClient VisionServiceClient = new VisionServiceClient("1b1fde153ba546dbbc6676d168854b1e", "https://westcentralus.api.cognitive.microsoft.com/vision/v1.0");

            using (Stream imageFileStream = File.OpenRead(imageFilePath))
            {
                VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
                AnalysisResult  analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);

                return(analysisResult);
            }
        }
Beispiel #30
0
        private async Task <AnalysisResult> CheckImage(string photoUrl)
        {
            AnalysisResult analysisResult;

            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };

            analysisResult = await visionClient.AnalyzeImageAsync(photoUrl, visualFeatures);

            if (analysisResult == null)
            {
                throw new Exception("Can't detect image");
            }
            return(analysisResult);
        }
        async void SelectFile()
        {
            Picker_Hide();
            try
            {
                if (Picker_SelectedFile != null)
                {
                    txtFileName.Text = Picker_SelectedFile.Path;
                    var stream = await Picker_SelectedFile.OpenAsync(Windows.Storage.FileAccessMode.Read);

                    var stream_send = stream.CloneStream();
                    var image       = new BitmapImage();
                    image.SetSource(stream);
                    imgPhoto.Source = image;
                    size_image      = new Size(image.PixelWidth, image.PixelHeight);

                    ringLoading.IsActive = true;
                    //Vision Service
                    VisionServiceClient client = new VisionServiceClient(key);
                    var feature = new VisualFeature[] { VisualFeature.Tags, VisualFeature.Faces, VisualFeature.Description, VisualFeature.Adult, VisualFeature.Categories };

                    var result = await client.AnalyzeImageAsync(stream_send.AsStream(), feature);

                    thisresult = result;
                    if (result != null)
                    {
                        DisplayData(result);
                    }

                    //hide preview
                    if (stpPreview.Visibility == Visibility.Collapsed)
                    {
                        stpPreview.Visibility = Visibility.Visible;
                        btnShow.Content       = "Hide Preview";
                    }
                    else
                    {
                        stpPreview.Visibility = Visibility.Collapsed;
                        btnShow.Content       = "Show Preview";
                    }

                    ringLoading.IsActive = false;
                }
            }
            catch (Exception ex)
            {
                //lblError.Text = ex.Message;
                //lblError.Visibility = Visibility.Visible;
            }
        }
        /// <summary>
        /// Sends a url to Project Oxford and performs analysis
        /// </summary>
        /// <param name="imageUrl">The url of the image to analyze</param>
        /// <returns></returns>
        private async Task<AnalysisResult> AnalyzeUrl(string imageUrl)
        {
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Vision API Service client
            //
            VisionServiceClient VisionServiceClient = new VisionServiceClient(SubscriptionKey);
            Log("VisionServiceClient is created");

            //
            // Analyze the url for all visual features
            //
            Log("Calling VisionServiceClient.AnalyzeImageAsync()...");
            VisualFeature[] visualFeatures = new VisualFeature[] { VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags };
            AnalysisResult analysisResult = await VisionServiceClient.AnalyzeImageAsync(imageUrl, visualFeatures);
            return analysisResult;

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
        private async Task<AnalysisResult> UploadAndAnalyzeImage(Stream imageStream)
        {
            var visionServiceClient = new VisionServiceClient(Constants.CuomputerVisionApiKey);
            var assembley = this.GetType().GetTypeInfo().Assembly;
            using (Stream imageFileStream = imageStream)
            {
                VisualFeature[] visualFeatures = new VisualFeature[]
                {
                    VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description,
                    VisualFeature.Faces, VisualFeature.ImageType, VisualFeature.Tags
                };
                AnalysisResult analysisResult =
                    await visionServiceClient.AnalyzeImageAsync(imageFileStream, visualFeatures);
                return analysisResult;
            }

        }