Exemple #1
0
        public async static Task <ImageAnalysisResult> GetImageAnalysisAsync(Guid id, byte[] bytes)
        {
            HttpClient client = new HttpClient();

            client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", CognitiveConstants.ComputerVisionApiSubscriptionKey);

            HttpContent payload = new ByteArrayContent(bytes);

            payload.Headers.ContentType = new MediaTypeWithQualityHeaderValue("application/octet-stream");

            string analysisFeatures = "Color,ImageType,Tags,Categories,Description,Adult";

            var results = await client.PostAsync($"https://api.projectoxford.ai/vision/v1.0/analyze?visualFeatures={analysisFeatures}", payload);

            ImageAnalysisResult result = null;

            try
            {
                var imageAnalysisResult = JsonConvert.DeserializeObject <ImageAnalysisInfo>(await results.Content.ReadAsStringAsync());

                result = new ImageAnalysisResult()
                {
                    id      = id.ToString(),
                    details = imageAnalysisResult,
                    caption = imageAnalysisResult.description.captions.FirstOrDefault().text,
                    tags    = imageAnalysisResult.description.tags.ToList(),
                };
            }
            catch (Exception ex)
            {
            }

            return(result);
        }
Exemple #2
0
        internal static void ApplyAnalysis(this ImageInformation image, ImageAnalysisResult analysis)
        {
            image.Caption    = analysis.caption;
            image.Tags       = analysis.tags;
            image.Categories = analysis.details.categories?.Select(s => s.name.Replace("_", " ")).ToList() ??
                               new List <string>();

            image.IsAdult    = analysis.details.adult.isAdultContent;
            image.AdultScore = analysis.details.adult.adultScore;
            image.IsRacy     = analysis.details.adult.isRacyContent;
            image.RacyScore  = analysis.details.adult.racyScore;

            image.Faces = (from face in analysis.details.faces
                           select new FaceInformation
            {
                Age = face.age,
                Gender = face.gender
            }).ToList();

            image.AccentColor     = $"#{analysis.details.color.accentColor}";
            image.BackgroundColor = $"{analysis.details.color.dominantColorBackground}";
            image.ForegroundColor = $"{analysis.details.color.dominantColorForeground}";
            image.IsBlackAndWhite = analysis.details.color.isBWImg;
            image.IsClipArt       = analysis.details.imageType.clipArtType > 0;
            image.IsLineDrawing   = analysis.details.imageType.lineDrawingType > 0;

            image.IsProcessing = false;
            image.IsTagged     = true;
        }
        public async static Task <ImageAnalysisResult> AnalyzeImageAsync(byte[] bytes)
        {
            HttpClient client = new HttpClient();

            client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", Common.CoreConstants.ComputerVisionApiSubscriptionKey);

            var payload = new HttpBufferContent(bytes.AsBuffer());

            payload.Headers.ContentType = new HttpMediaTypeHeaderValue("application/octet-stream");

            string visualFeatures = "Color,ImageType,Tags,Categories,Description,Adult";

            var results = await client.PostAsync(new Uri($"{Common.CoreConstants.CognitiveServicesBaseUrl}/vision/v1.0/analyze?visualFeatures={visualFeatures}"), payload);

            ImageAnalysisResult result = null;

            try
            {
                var analysisResults = await results.Content.ReadAsStringAsync();

                var imageAnalysisResult = JsonConvert.DeserializeObject <ImageAnalysisInfo>(analysisResults);

                result = new ImageAnalysisResult()
                {
                    id      = imageAnalysisResult.requestId,
                    details = imageAnalysisResult,
                    caption = imageAnalysisResult.description.captions.FirstOrDefault().text,
                    tags    = imageAnalysisResult.description.tags.ToList(),
                };
            }
            catch (Exception ex) { }
            return(result);
        }
        internal static void LogImageAnalysisResultCode(ImageAnalysisResult resultCode, ImageAnalysisLogData logData)
        {
            AttachmentProcessingLogger.Initialize();
            switch (resultCode)
            {
            case ImageAnalysisResult.ThumbnailSuccess:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "ThumbnailResult", "Success", "ThumbnailOperationTime", logData.operationTimeMs.ToString());
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "ThumbnailSize", logData.thumbnailSize.ToString());
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "ThumbnailWidth", logData.thumbnailWidth.ToString(), "ThumbnailHeight", logData.thumbnailHeight.ToString());
                return;

            case ImageAnalysisResult.SalientRegionSuccess:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "SalientRegionResult", "Success", "SalientRegionOperationTime", logData.operationTimeMs.ToString());
                return;

            case ImageAnalysisResult.UnknownFailure:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "Failure", logData.operationTimeMs.ToString());
                return;

            case ImageAnalysisResult.ImageTooSmallForAnalysis:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "ThumbnailResult", "ImageTooSmall", "ThumbnailOperationTime", logData.operationTimeMs.ToString());
                return;

            case ImageAnalysisResult.UnableToPerformSalientRegionAnalysis:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "SalientRegionResult", "Failure", "SalientRegionOperationTime", logData.operationTimeMs.ToString());
                return;

            case ImageAnalysisResult.ImageTooBigForAnalysis:
                AttachmentProcessingLogger.LogEvent("StorageAttachmentImageAnalysis", "ThumbnailResult", "ImageTooBigForAnalysis", "ImageSize", logData.thumbnailSize.ToString());
                return;

            default:
                return;
            }
        }
Exemple #5
0
        private async Task <ImageAnalysisResult> GetImageAnalysisAsync(byte[] bytes)
        {
            ImageAnalysisResult result = null;

            try
            {
                using (var client = new HttpClient())
                {
                    client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", CoreConstants.ComputerVisionApiSubscriptionKey);

                    byte[] lowQualityImageBytes = null;

                    using (var data = NSData.FromArray(bytes))
                    {
                        var image            = UIImage.LoadFromData(data);
                        var lowerQualityData = image.AsJPEG(0.1f);

                        lowQualityImageBytes = new byte[lowerQualityData.Length];
                        System.Runtime.InteropServices.Marshal.Copy(lowerQualityData.Bytes, lowQualityImageBytes, 0, Convert.ToInt32(lowerQualityData.Length));
                    }

                    var payload = new ByteArrayContent(lowQualityImageBytes);

                    payload.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");

                    var analysisFeatures = "Color,ImageType,Tags,Categories,Description,Adult,Faces";

                    var uri = new Uri($"{CoreConstants.CognitiveServicesBaseUrl}/analyze?visualFeatures={analysisFeatures}");

                    using (var results = await client.PostAsync(uri, payload))
                    {
                        var analysisResults = await results.Content.ReadAsStringAsync();

                        var imageAnalysisResult = JsonConvert.DeserializeObject <ImageAnalysisInfo>(analysisResults);

                        result = new ImageAnalysisResult
                        {
                            id      = Guid.NewGuid().ToString(),
                            details = imageAnalysisResult,
                            caption = imageAnalysisResult.description?.captions.FirstOrDefault()?.text,
                            tags    = imageAnalysisResult.description?.tags.ToList()
                        };

                        if (string.IsNullOrEmpty(result.caption))
                        {
                            result.caption = "No caption";
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine($"ComputerVisionService.GetImageAnalysisAsync Exception: {ex}");
            }

            return(result);
        }
        private async Task <ImageAnalysisResult> GetImageAnalysisAsync(byte[] bytes)
        {
            ImageAnalysisResult result = null;

            try
            {
                using (var client = new HttpClient())
                {
                    client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", CoreConstants.ComputerVisionApiSubscriptionKey);

                    var payload = new HttpBufferContent(bytes.AsBuffer());

                    payload.Headers.ContentType = new HttpMediaTypeHeaderValue("application/octet-stream");

                    var analysisFeatures = "Color,ImageType,Tags,Categories,Description,Adult,Faces";

                    var uri = new Uri($"{CoreConstants.CognitiveServicesBaseUrl}/analyze?visualFeatures={analysisFeatures}");

                    using (var results = await client.PostAsync(uri, payload))
                    {
                        var analysisResults = await results.Content.ReadAsStringAsync();

                        var imageAnalysisResult = JsonConvert.DeserializeObject <ImageAnalysisInfo>(analysisResults);

                        result = new ImageAnalysisResult
                        {
                            id      = Guid.NewGuid().ToString(),
                            details = imageAnalysisResult,
                            caption = imageAnalysisResult.description.captions.FirstOrDefault()?.text,
                            tags    = imageAnalysisResult.description.tags.ToList()
                        };

                        if (string.IsNullOrEmpty(result.caption))
                        {
                            result.caption = "No caption";
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine($"ComputerVisionService.GetImageAnalysisAsync Exception: {ex}");
            }

            return(result);
        }
        public KeyValuePair <List <RegionRect>, ImageAnalysisResult> GetSalientRectsAsList()
        {
            ImageAnalysisResult imageAnalysisResult = this.EnsureSalientObjectAnalysisWrapper();

            if (this.salientRegions == null && imageAnalysisResult == ImageAnalysisResult.SalientRegionSuccess)
            {
                this.salientRegions = new List <RegionRect>();
                foreach (SalientObject salientObject in this.salientAnalysis.SalientObjects)
                {
                    RegionRect item = new RegionRect((int)salientObject.Region.BestOutline.Left, (int)salientObject.Region.BestOutline.Top, (int)salientObject.Region.BestOutline.Right, (int)salientObject.Region.BestOutline.Bottom);
                    this.salientRegions.Add(item);
                }
                imageAnalysisResult = ImageAnalysisResult.SalientRegionSuccess;
            }
            else
            {
                imageAnalysisResult = ImageAnalysisResult.UnableToPerformSalientRegionAnalysis;
            }
            return(new KeyValuePair <List <RegionRect>, ImageAnalysisResult>(this.salientRegions, imageAnalysisResult));
        }
Exemple #8
0
        public KeyValuePair <byte[], ImageAnalysisResult> GenerateThumbnail(Stream imageStream, int minImageWidth, int minImageHeight, int maxThumbnailWidth, int maxThumbnailHeight, out int width, out int height)
        {
            ImageAnalysisResult value = ImageAnalysisResult.UnknownFailure;

            byte[] key = null;
            width  = 0;
            height = 0;
            using (Image image = Image.FromStream(imageStream, true, false))
            {
                if (image.Width < minImageWidth || image.Height < minImageHeight)
                {
                    value = ImageAnalysisResult.ImageTooSmallForAnalysis;
                }
                else
                {
                    this.RotateImageIfNeeded(image);
                    key   = this.PreviewImage(image, maxThumbnailWidth, maxThumbnailHeight, out width, out height);
                    value = ImageAnalysisResult.ThumbnailSuccess;
                }
            }
            return(new KeyValuePair <byte[], ImageAnalysisResult>(key, value));
        }
        public KeyValuePair <byte[], ImageAnalysisResult> GetSalientRectsAsByteArray()
        {
            ImageAnalysisResult imageAnalysisResult = this.EnsureSalientObjectAnalysisWrapper();

            if (this.salientRegionsAsByte == null && imageAnalysisResult == ImageAnalysisResult.SalientRegionSuccess)
            {
                this.salientRegionsAsByte = new byte[this.salientAnalysis.SalientObjects.Count * 4];
                int num = 0;
                foreach (SalientObject salientObject in this.salientAnalysis.SalientObjects)
                {
                    this.salientRegionsAsByte[num]     = (byte)((int)salientObject.Region.BestOutline.Top * 255 / this.imageHeight);
                    this.salientRegionsAsByte[num + 1] = (byte)((int)salientObject.Region.BestOutline.Left * 255 / this.imageWidth);
                    this.salientRegionsAsByte[num + 2] = (byte)((int)salientObject.Region.BestOutline.Bottom * 255 / this.imageHeight);
                    this.salientRegionsAsByte[num + 3] = (byte)((int)salientObject.Region.BestOutline.Right * 255 / this.imageWidth);
                    num += 4;
                }
                imageAnalysisResult = ImageAnalysisResult.SalientRegionSuccess;
            }
            else
            {
                imageAnalysisResult = ImageAnalysisResult.UnableToPerformSalientRegionAnalysis;
            }
            return(new KeyValuePair <byte[], ImageAnalysisResult>(this.salientRegionsAsByte, imageAnalysisResult));
        }
Exemple #10
0
        public async Task <ImageAnalysisResult> AnalyzeImageAsync(string url)
        {
            var analysisResult = new ImageAnalysisResult();

            try
            {
                // USING Microsoft provided VisionClientLibrary seems not working in NET Core as-is, a fix is required for ExpandoObject
                // see: https://github.com/Microsoft/Cognitive-Vision-DotNetCore/pull/1/commits/9c4647edb400aecd4def330537d5bcd74f126111

                Console.WriteLine("\t\t\tContentAnalyzer.AnalyzeImageAsync(): initializing VisionAPI client");

                var visionApiClient = new VisionServiceClient(m_VisionAPISubscriptionKey, "https://westeurope.api.cognitive.microsoft.com/vision/v1.0");

                var visualFeatures = new List <VisualFeature> {
                    VisualFeature.Adult, VisualFeature.Categories, VisualFeature.Color, VisualFeature.Description, VisualFeature.Faces, VisualFeature.ImageType                                           /*, VisualFeature.Tags */
                };
                var details = new List <string> {
                    "Celebrities", "Landmarks"
                };

                Console.WriteLine("\t\t\tContentAnalyzer.AnalyzeImageAsync(): started image analysis");

                var visionApiResult = await visionApiClient.AnalyzeImageAsync(url, visualFeatures, details).ConfigureAwait(false);

                Console.WriteLine("\t\t\tContentAnalyzer.AnalyzeImageAsync(): executing OCR");

                var ocrResult = await visionApiClient.RecognizeTextAsync(url).ConfigureAwait(false);

                Console.WriteLine("\t\t\tContentAnalyzer.AnalyzeImageAsync(): performing tag identification");

                var tagsResult = await visionApiClient.GetTagsAsync(url).ConfigureAwait(false);

                Console.WriteLine("\t\t\tContentAnalyzer.AnalyzeImageAsync(): analysis completed");

                // Mapping VisionAPI Client entity to domain entity
                analysisResult.AdultContent = new ImageAnalysisAdultContentResult {
                    AdultScore = visionApiResult.Adult.AdultScore, IsAdultContent = visionApiResult.Adult.IsAdultContent, IsRacyContent = visionApiResult.Adult.IsRacyContent, RacyScore = visionApiResult.Adult.RacyScore
                };
                analysisResult.Colors = new ImageAnalysisColorResult {
                    AccentColor = visionApiResult.Color.AccentColor, DominantColorBackground = visionApiResult.Color.DominantColorBackground, DominantColorForeground = visionApiResult.Color.DominantColorForeground, IsBWImg = visionApiResult.Color.IsBWImg
                };
                analysisResult.Categories = visionApiResult.Categories.Select(c => new ImageAnalysisCategoryResult {
                    Text = c.Name, Score = c.Score
                }).OrderByDescending(c => c.Score).ToList();
                analysisResult.Descriptions = visionApiResult.Description.Captions.Select(c => new ImageAnalysisDescriptionResult {
                    Text = c.Text, Score = c.Confidence
                }).OrderByDescending(c => c.Score).ToList();

                // Merge detected tags from image analysis and image tags
                analysisResult.Tags = tagsResult.Tags.Select(t => new ImageAnalysisTagResult {
                    Text = t.Name, Score = t.Confidence, Hint = t.Hint
                }).ToList();
                foreach (var t in visionApiResult.Description.Tags)
                {
                    analysisResult.Tags.Add(new ImageAnalysisTagResult {
                        Text = t, Score = 0.0, Hint = string.Empty
                    });
                }

                analysisResult.Faces = visionApiResult.Faces.Select(f => new ImageAnalysisFaceResult {
                    Age = f.Age, Gender = f.Gender == "Male" ? Gender.Male : f.Gender == "female" ? Gender.Female : Gender.Unknown
                }).ToList();
                analysisResult.Text = ocrResult.Regions.Select(r => new ImageAnalysisTextResult()
                {
                    Language = ocrResult.Language, Orientation = ocrResult.Orientation, TextAngle = ocrResult.TextAngle.GetValueOrDefault(), Text = string.Join(" ", r.Lines.Select(l => string.Join(" ", l.Words.Select(w => w.Text))))
                }).ToList();

                // Extend analysis by estimating reading time for each transcribed text
                foreach (var text in analysisResult.Text)
                {
                    text.WordCount                        = TextTokenizer.GetWordCount(text.Text);
                    text.ReadingTimeInMinutes             = ReadingTimeEstimator.GetEstimatedReadingTime(text.WordCount, text.Language);
                    analysisResult.WatchingTimeInMinutes += text.ReadingTimeInMinutes;
                }

                // Add an additional default time for estimating how long it will take to the user to watch the picture
                analysisResult.WatchingTimeInMinutes += DefaultImageWatchingTime;
            }
            catch (Exception ex)
            {
                Console.WriteLine($"\t\t\tContentAnalyzer.AnalyzeImageAsync(): an error occured while analyzing image - {ex.Message}");
            }

            return(analysisResult);
        }
        static async Task <ImageAnalysisResult> AnalyzeImage(string filePath, string factoryId, string cameraId, EnvSettings.AIModule module, EnvSettings.AIModule.Tag tag, string outputFolder)
        {
            try
            {
                ImageAnalysisResult analyzeResult = null;

                // Get output directory details
                string storageAccountName    = _envSettings.GetProperty("StorageAccountName");
                string dbeShareContainerName = _envSettings.GetProperty("DBEShareContainerName");
                string flaggedFolder         = "flagged";
                string nonFlaggedFolder      = "safe";

                // Read image
                RecognitionResults recognitionResults = null;
                byte[]             byteArray          = File.ReadAllBytes(filePath);
                using (ByteArrayContent content = new ByteArrayContent(byteArray))
                {
                    content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");

                    try
                    {
                        var response = await _httpClient.PostAsync(module.ScoringEndpoint, content);

                        if (!response.IsSuccessStatusCode)
                        {
                            _consoleLogger.LogError($"Failed to make POST request to module {module.ScoringEndpoint}. Response: {response.ReasonPhrase}");
                            return(null);
                        }
                        else
                        {
                            _consoleLogger.LogDebug($"POST request to module {module.ScoringEndpoint} was successful");

                            var contentString = await response.Content.ReadAsStringAsync();

                            recognitionResults = JsonConvert.DeserializeObject <RecognitionResults>(contentString);
                        }
                    }
                    catch (Exception e)
                    {
                        _consoleLogger.LogError($"AnalyzeImage:PostAsync failed to make POST request to module {module.ScoringEndpoint}. Exception: {e}");
                        return(null);
                    }
                }

                /// Need to differentiate between the current tag being flagged and
                /// any other tags from this module in order to mark the image appropriately.
                /// Logic invites to think that in case the current tag is flagged,
                /// it will also be in the all flagged tags list.
                var currentFlaggedTag = recognitionResults.Predictions.Where(x => x.TagName == tag.Name && x.Probability >= tag.Probability);
                var allFlaggedTags    = recognitionResults.Predictions.Where(x => module.Tags.Where(y => x.TagName == y.Name && x.Probability >= y.Probability).Count() > 0);

                // Create analyze result object
                string fileName = Path.GetFileName(filePath);
                if (currentFlaggedTag.Count() > 0)
                {
                    string imageUri = $"https://{storageAccountName}.blob.core.windows.net/{dbeShareContainerName}/{factoryId}/{cameraId}/{flaggedFolder}/{fileName}";

                    _consoleLogger.LogInformation($"---> Found tags in image {filePath}: {string.Join(", ", currentFlaggedTag.Select(x => x.TagName))}");

                    // Create message content
                    string   datePattern = @"^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})(\d{3})$";
                    var      match       = Regex.Match(Path.GetFileNameWithoutExtension(fileName), datePattern);
                    DateTime timestamp   = new DateTime(
                        Convert.ToInt32(match.Groups[1].Value),
                        Convert.ToInt32(match.Groups[2].Value),
                        Convert.ToInt32(match.Groups[3].Value),
                        Convert.ToInt32(match.Groups[4].Value),
                        Convert.ToInt32(match.Groups[5].Value),
                        Convert.ToInt32(match.Groups[6].Value),
                        Convert.ToInt32(match.Groups[7].Value));

                    analyzeResult = new ImageAnalysisResult()
                    {
                        ImageUri  = imageUri,
                        Timestamp = timestamp,
                        Results   = ImageAnalysisResult.Result.Results(currentFlaggedTag, module),
                    };

                    // Get flat results for reporting purposes
                    FlatImageAnalysisResult[] flatImageResults = FlatImageAnalysisResult.Convert(factoryId, cameraId, analyzeResult);
                    foreach (var flatResult in flatImageResults)
                    {
                        // Create hub message and set its properties
                        var message = new Message(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(flatResult)));
                        message.Properties.Add("messageType", "reporting");

                        // Send reporting message
                        await SendMessageToHub(message);

                        // Log it
                        _consoleLogger.LogTrace($"Sent reporting message for camera {cameraId}");
                    }
                }
                else
                {
                    _consoleLogger.LogTrace($"No tags were found in image {filePath}");
                }

                // Save image to output directory
                string destinationFolder = allFlaggedTags.Count() > 0 ? flaggedFolder : nonFlaggedFolder;

                // Set output directory
                string outputDirectory = Path.Combine(outputFolder, factoryId, cameraId, destinationFolder);
                if (!Directory.Exists(outputDirectory))
                {
                    Directory.CreateDirectory(outputDirectory);
                }

                // Save image
                string imageOutputPath = Path.Combine(outputDirectory, fileName);
                File.WriteAllBytes(imageOutputPath, byteArray);
                _consoleLogger.LogTrace($"Moving image to final destination folder {imageOutputPath}");

                // Save payload
                string fileOutputPath = Path.Combine(outputDirectory, Path.ChangeExtension(fileName, "json"));
                File.WriteAllText(fileOutputPath, JsonConvert.SerializeObject(recognitionResults.Predictions));

                // Delete image from local folder
                File.Delete(filePath);

                return(analyzeResult);
            }
            catch (Exception e)
            {
                _consoleLogger.LogCritical("AnalyzeImage caught an exception: {0}", e);
                return(null);
            }
        }