Exemple #1
0
        public async Task <CognitiveResult> AnalyzeAsync(Stream stream, string language, RecognitionType recognitionType = RecognitionType.All, Func <RecognitionPhase, Task> onProgress = null)
        {
            var result = new CognitiveResult();

            var imageBytes = await stream.ToArrayAsync();

            await RaiseOnProgressAsync(onProgress, RecognitionPhase.QueryingService);

            var            visionService      = new VisionServiceClient(Settings.VisionSubscriptionKey);
            AnalysisResult analyzeImageResult = null;

            if (recognitionType.HasFlag(RecognitionType.Vision))
            {
                var features = new HashSet <VisualFeature> {
                    VisualFeature.Description
                };

                if (recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion))
                {
                    // If recognition types include face or emotions, add also the Faces Visual Feature, so Face and Emotion services are called
                    // only if really needed.
                    features.Add(VisualFeature.Faces);
                }

                try
                {
                    analyzeImageResult = await visionService.AnalyzeImageAsync(stream, features);
                }
                catch (ClientException ex)
                {
                    var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress);

                    throw exception;
                }

                Caption originalDescription;
                Caption filteredDescription;
                var     visionSettings = VisionSettingsProvider != null ? await VisionSettingsProvider.GetSettingsAsync() : null;

                var isValid = analyzeImageResult.IsValid(out originalDescription, out filteredDescription, visionSettings);

                var visionResult = result.VisionResult;
                visionResult.IsValid        = isValid;
                visionResult.RawDescription = originalDescription.Text;
                visionResult.Confidence     = originalDescription.Confidence;

                if (isValid)
                {
                    visionResult.Description           = filteredDescription.Text;
                    visionResult.TranslatedDescription = await TranslateAsync(filteredDescription.Text, language, onProgress);
                }
            }

            if ((recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion)) &&
                (analyzeImageResult?.Faces.Any() ?? true))      // If Vision service was previously called, checks if any face was detected.
            {
                var faceService = new FaceServiceClient(Settings.FaceSubscriptionKey);

                await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                try
                {
                    stream.Position = 0;

                    var attributes = new HashSet <FaceAttributeType> {
                        FaceAttributeType.Gender, FaceAttributeType.Age
                    };

                    if (recognitionType.HasFlag(RecognitionType.Emotion))
                    {
                        // If recognition types include emotions, add also the Emotion Face Attribute Type, so this feature is called
                        // only if really needed.
                        attributes.Add(FaceAttributeType.Emotion);
                    }

                    var faces = await faceService.DetectAsync(stream, returnFaceAttributes : attributes);

                    if (faces.Any())
                    {
                        if (!faceServiceInitialized)
                        {
                            // If necessary, initializes face service by obtaining the face group used for identification, if any.
                            await InitializeFaceServiceAsync(faceService);
                        }

                        // Tries to identify faces in the image.
                        IdentifyResult[] faceIdentificationResult = null;

                        if (!string.IsNullOrWhiteSpace(identifyPersonGroupId))
                        {
                            var faceIds = faces.Select(face => face.FaceId).ToArray();
                            faceIdentificationResult = await faceService.IdentifyAsync(identifyPersonGroupId, faceIds);
                        }

                        var faceTasks = new List <Task>();

                        foreach (var face in faces)
                        {
                            await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces);

                            // Runs face identification in parallel.
                            var task = Task.Run(async() =>
                            {
                                var faceResult = face.GetFaceResult();

                                // Checks if there is a candidate (i.e. a known person) in the identification result.
                                var candidate = faceIdentificationResult?.FirstOrDefault(r => r.FaceId == face.FaceId)?.Candidates.FirstOrDefault();
                                if (candidate != null)
                                {
                                    // Gets the person name.
                                    var person = await faceService.GetPersonAsync(identifyPersonGroupId, candidate.PersonId);
                                    faceResult.IdentifyConfidence = candidate.Confidence;
                                    faceResult.Name = person?.Name;
                                }

                                result.FaceResults.Add(faceResult);
                            });

                            faceTasks.Add(task);
                        }

                        await Task.WhenAll(faceTasks);
                    }
                }
                catch (FaceAPIException ex)
                {
                    var exception = await CreateExceptionAsync(ex.ErrorCode, ex.ErrorMessage, "Face", ex.HttpStatus, ex, language, onProgress);

                    throw exception;
                }
            }

            if (recognitionType.HasFlag(RecognitionType.Text))
            {
                await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingText);

                try
                {
                    using (var ms = new MemoryStream(imageBytes))
                    {
                        var results = await visionService.RecognizeTextAsync(ms);

                        var text = results.GetRecognizedText();
                        result.OcrResult.Text = text;
                    }
                }
                catch (Microsoft.ProjectOxford.Vision.ClientException ex)
                {
                    var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress);

                    throw exception;
                }
            }

            return(result);
        }