Esempio n. 1
0
        public async Task <CognitiveStep> CognitivePipeline_OCR([ActivityTrigger] CognitiveStep input, ILogger log)
        {
            log.LogInformation($"******* Starting OCR");

            string key      = GlobalSettings.GetKeyValue("computerVisionKey");
            string endpoint = GlobalSettings.GetKeyValue("computerVisionEndpoint");

            ComputerVisionClient computerVision = new ComputerVisionClient(
                new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(key),
                new System.Net.Http.DelegatingHandler[] { })
            {
                Endpoint = endpoint
            };

            var data = await filesStorageRepo.GetFileAsync(input.FileUrl);

            var ocrResult = await computerVision.RecognizePrintedTextInStreamAsync(true, new MemoryStream(data));

            input.IsSuccessful  = true;
            input.Confidence    = -1;
            input.LastUpdatedAt = DateTime.UtcNow;
            input.RawOutput     = JsonConvert.SerializeObject(ocrResult);

            return(input);
        }
Esempio n. 2
0
        public async Task <CognitiveStep> CognitivePipeline_FaceDetectionBasic([ActivityTrigger] CognitiveStep input, ILogger log)
        {
            log.LogInformation($"******* Starting Face Detection");

            string key      = GlobalSettings.GetKeyValue("computerVisionKey");
            string endpoint = GlobalSettings.GetKeyValue("computerVisionEndpoint");

            ComputerVisionClient computerVision = new ComputerVisionClient(
                new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(key),
                new System.Net.Http.DelegatingHandler[] { })
            {
                Endpoint = endpoint
            };

            var data = await filesStorageRepo.GetFileAsync(input.FileUrl);

            var detectionResult = await computerVision.AnalyzeImageInStreamAsync(new MemoryStream(data), new List <VisualFeatureTypes> {
                VisualFeatureTypes.Faces
            });

            input.IsSuccessful  = true;
            input.Confidence    = detectionResult.Faces.Count > 0 ? 1 : 0;
            input.LastUpdatedAt = DateTime.UtcNow;
            input.RawOutput     = JsonConvert.SerializeObject(detectionResult);

            return(input);
        }
Esempio n. 3
0
        public async Task <CognitiveStep> CognitivePipeline_FaceDetection([ActivityTrigger] CognitiveStep input, ILogger log)
        {
            log.LogInformation($"******* Starting Face Detection");

            string key      = GlobalSettings.GetKeyValue("faceKey");
            string endpoint = GlobalSettings.GetKeyValue("faceEndpoint");

            IFaceClient faceClient = new FaceClient(
                new Microsoft.Azure.CognitiveServices.Vision.Face.ApiKeyServiceClientCredentials(key),
                new System.Net.Http.DelegatingHandler[] { })
            {
                Endpoint = endpoint
            };

            var data = await filesStorageRepo.GetFileAsync(input.FileUrl);

            IList <FaceAttributeType> faceAttributes = new FaceAttributeType[]
            {
                FaceAttributeType.Gender, FaceAttributeType.Age,
                FaceAttributeType.Smile, FaceAttributeType.Emotion,
                FaceAttributeType.Glasses, FaceAttributeType.Hair
            };

            try
            {
                using (Stream imageFileStream = new MemoryStream(data))
                {
                    // The second argument specifies to return the faceId, while
                    // the third argument specifies not to return face landmarks.
                    IList <DetectedFace> faceList =
                        await faceClient.Face.DetectWithStreamAsync(
                            imageFileStream, true, false, faceAttributes);

                    input.IsSuccessful  = true;
                    input.Confidence    = faceList.Count > 0 ? 1 : 0;
                    input.LastUpdatedAt = DateTime.UtcNow;
                    input.RawOutput     = JsonConvert.SerializeObject(faceList);
                    return(input);
                }
            }
            // Catch and display Face API errors.
            catch (APIErrorException e)
            {
                log.LogError($"####### Failed to detect faces: {e.Message}");
                input.IsSuccessful  = false;
                input.Confidence    = 0;
                input.LastUpdatedAt = DateTime.UtcNow;
                input.Status        = e.Message;
                return(input);
            }
        }