Esempio n. 1
0
        /*
         * END - LARGE PERSON GROUP
         */

        /*
         * GROUP FACES
         * This method of grouping is useful if you don't need to create a person group. It will automatically group similar
         * images, whereas the person group method allows you to define the grouping.
         * A single "messyGroup" array contains face IDs for which no similarities were found.
         */
        public static async Task Group(IFaceClient client, string url, string recognition_model)
        {
            Console.WriteLine("========GROUP FACES========");
            Console.WriteLine();

            // Create list of image names
            List <string> imageFileNames = new List <string>
            {
                "Family1-Dad1.jpg",
                "Family1-Dad2.jpg",
                "Family3-Lady1.jpg",
                "Family1-Daughter1.jpg",
                "Family1-Daughter2.jpg",
                "Family1-Daughter3.jpg"
            };
            // Create empty dictionary to store the groups
            Dictionary <string, string> faces = new Dictionary <string, string>();
            List <Guid?> faceIds = new List <Guid?>();

            // First, detect the faces in your images
            foreach (var imageFileName in imageFileNames)
            {
                // Detect faces from image url.
                IList <DetectedFace> detectedFaces = await DetectFaceRecognize(client, $"{url}{imageFileName}", recognition_model);

                // Add detected faceId to faceIds and faces.
                faceIds.Add(detectedFaces[0].FaceId.Value);
                faces.Add(detectedFaces[0].FaceId.ToString(), imageFileName);
            }
            Console.WriteLine();
            // Group the faces. Grouping result is a group collection, each group contains similar faces.
            var groupResult = await client.Face.GroupAsync(faceIds);

            // Face groups contain faces that are similar to all members of its group.
            for (int i = 0; i < groupResult.Groups.Count; i++)
            {
                Console.Write($"Found face group {i + 1}: ");
                foreach (var faceId in groupResult.Groups[i])
                {
                    Console.Write($"{faces[faceId.ToString()]} ");
                }
                Console.WriteLine(".");
            }

            // MessyGroup contains all faces which are not similar to any other faces. The faces that cannot be grouped.
            if (groupResult.MessyGroup.Count > 0)
            {
                Console.Write("Found messy face group: ");
                foreach (var faceId in groupResult.MessyGroup)
                {
                    Console.Write($"{faces[faceId.ToString()]} ");
                }
                Console.WriteLine(".");
            }
            Console.WriteLine();
        }
Esempio n. 2
0
        /// <summary>
        /// <param name="faceClient">Provide face client </param>
        /// <param name="url"></param>
        /// <param name="recognition_model"></param>
        /// <returns></returns>
        /// </summary>
        private static async Task <List <DetectedFace> > DetectFaceRecognize(IFaceClient faceClient, string url, string recognition_model)
        {
            using Stream fileStream = File.OpenRead(url);
            // Detect faces from image URL. Since only recognizing, use the recognition model 1.
            // We use detection model 2 because we are not retrieving attributes.
            IList <DetectedFace> detectedFaces = await faceClient.Face.DetectWithStreamAsync(fileStream, recognitionModel : recognition_model, detectionModel : DetectionModel.Detection02);

            Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{Path.GetFileName(url)}`");
            return(detectedFaces.ToList());
        }
        /// <summary>
        /// 感情分析処理
        /// </summary>
        /// <param name="client">Faseサービスインスタンス</param>
        /// <param name="url">画像のURL</param>
        /// <param name="recognitionModel">検出モデル</param>
        /// <returns></returns>
        private static async Task <IList <DetectedFace> > DetectFaceExtract(IFaceClient client, string url, string recognitionModel)
        {
            IList <DetectedFace> detectedFaces = await client.Face.DetectWithUrlAsync($"{url}",
                                                                                      returnFaceAttributes : new List <FaceAttributeType> {
                FaceAttributeType.Emotion
            },
                                                                                      recognitionModel : recognitionModel);

            return(detectedFaces);
        }
 public AzurePersonGroupRepository(ILogger <AzurePersonGroupRepository> logger)
 {
     _logger                = logger;
     SUBSCRIPTION_KEY       = Environment.GetEnvironmentVariable("AZURE_FACE_SUBSCRIPTION_KEY");
     ENDPOINT               = Environment.GetEnvironmentVariable("AZURE_FACE_ENDPOINT");
     RECOGNITION_MODEL1     = RecognitionModel.Recognition01;
     _personGroupId         = "myroomates";
     _client                = Authenticate(ENDPOINT, SUBSCRIPTION_KEY);
     _confidenceCoefficient = 0.5;
 }
 public FaceRecognitionService(IConfiguration config)
 {
     this.IMAGE_BASE_URL   = config.GetValue <string>("BlobStorage:StorageUrl");
     this.SUBSCRIPTION_KEY = config.GetValue <string>("FaceRecognition:Key");
     this.ENDPOINT         = config.GetValue <string>("FaceRecognition:Endpoint");;
     this.client           = new FaceClient(new ApiKeyServiceClientCredentials(SUBSCRIPTION_KEY))
     {
         Endpoint = ENDPOINT
     };
 }
Esempio n. 6
0
        private static async Task <List <DetectedFace> > DetectFaceRecognize(IFaceClient faceClient, Stream file,
                                                                             string recognitionModel)
        {
            var detectedFaces = await faceClient.Face.DetectWithStreamAsync(
                file,
                recognitionModel : recognitionModel,
                detectionModel : DetectionModel.Detection02);

            return(detectedFaces.ToList());
        }
Esempio n. 7
0
        public virtual IEnumerable trainFaceRecongnition(PXAdapter adapter)
        {
            Save.Press();

            PXLongOperation.StartOperation(this, delegate()
            {
                IFaceClient client = Trainer.Authenticate(MasterView.Current.FaceApiEndpoint, MasterView.Current.FaceApiSubscriptionKey);
                var trainer        = new Trainer(client, MasterView.Current.FaceApiGroupID);
                bool uploaded      = false;
                foreach (EPEmployee emp in PXSelect <EPEmployee> .Select(this))
                {
                    bool deleted  = false;
                    Person person = null;
                    Guid[] files  = PXNoteAttribute.GetFileNotes(this.Caches[typeof(EPEmployee)], emp);

                    foreach (Guid fileID in files)
                    {
                        var fm            = new PX.SM.UploadFileMaintenance();
                        PX.SM.FileInfo fi = fm.GetFile(fileID);
                        string ext        = System.IO.Path.GetExtension(fi.Name).ToLower();
                        PXTrace.WriteWarning(emp.AcctCD.TrimEnd() + " " + ext);
                        if (ext.IsIn(".png", ".jpg", ".bmp"))
                        {
                            if (!deleted)
                            {
                                var utilities = new Utilities(client, MasterView.Current.FaceApiGroupID);
                                utilities.DeleteEmployee(emp.AcctCD.TrimEnd());
                                deleted = true;
                            }

                            if (person == null)
                            {
                                person = trainer.GetEmployee_CreateIfNonExistent(emp.AcctCD.TrimEnd());
                            }


                            using (var stream = new System.IO.MemoryStream(fi.BinData))
                            {
                                PXTrace.WriteWarning(emp.AcctCD.TrimEnd() + " " + fi.Name + " " + stream.Length);
                                trainer.UploadTrainingImageStream(person, stream);
                                Thread.Sleep(1000);
                                uploaded = true;
                            }
                        }
                    }
                }
                if (uploaded)
                {
                    trainer.InvokeTraining();
                }
            });


            return(adapter.Get());
        }
Esempio n. 8
0
            //Person group creation for one Client
            static async Task CreateAndTrainWorkersPersonGroup(IFaceClient faceClient, List <WorkersForProcessing> listOfWorkersForProcessing, string personGroupId)
            {
                // Create a person group.
                await CreatePersonGroup(faceClient, personGroupId);

                // The person group person creation.
                await CreatePersonsWithFacesInPersonGroup(listOfWorkersForProcessing, faceClient, personGroupId);

                // Train Person group
                await TrainPersonGroup(faceClient, personGroupId);
            }
Esempio n. 9
0
        /*
         * Delete person group
         * After this entire sample is executed, delete the person group in your Azure account,
         * otherwise you cannot recreate one with the same name (if running sample repeatedly).
         */
        public static async Task DeletePersonGroup(IFaceClient client, String personGroupId)
        {
            // First, list the person groups in each region
            Console.WriteLine("Delete started... ");
            IList <PersonGroup> list = await client.PersonGroup.ListAsync();

            await client.PersonGroup.DeleteAsync(personGroupId);

            Console.WriteLine($"Deleted the person group {personGroupId}.");
            Console.WriteLine();
        }
Esempio n. 10
0
        // Определяет лица на фото
        private static async Task <List <DetectedFace> > DetectFaceRecognize(IFaceClient faceClient, string url, string recognition_model)
        {
            // Detect faces from image URL. Since only recognizing, use the recognition model 1.
            // We use detection model 3 because we are not retrieving attributes.

            FileStream           stream        = new FileStream(url, FileMode.Open);
            IList <DetectedFace> detectedFaces = await faceClient.Face.DetectWithStreamAsync(stream, recognitionModel : recognition_model, detectionModel : DetectionModel.Detection02);

            Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{url}`");
            return(detectedFaces.ToList());
        }
Esempio n. 11
0
        public Tracker()
        {
            //TODO: store these somewhere
            var key      = "";
            var endpoint = "https://southcentralus.api.cognitive.microsoft.com";

            faceClient = new FaceClient(
                new ApiKeyServiceClientCredentials(key),
                new System.Net.Http.DelegatingHandler[] { });
            faceClient.Endpoint = endpoint;
        }
Esempio n. 12
0
        public FaceComparisonService(IOptions <AzureCognitiveServicesConfig> config, ILogger <FormRecognizerService> logger)
        {
            if (config == null)
            {
                throw new ArgumentNullException(nameof(config));
            }

            _logger = logger ?? throw new ArgumentNullException(nameof(logger));

            _client = Authenticate(config.Value.Endpoint, config.Value.SubscriptionKey);
        }
Esempio n. 13
0
        public PhotoController(IOptions <AppSettings> appSettings)
        {
            this._appSettings = appSettings;

            _faceClient = new FaceClient(
                new ApiKeyServiceClientCredentials(appSettings.Value.FaceKey),
                new System.Net.Http.DelegatingHandler[] { }
                );
            _faceClient.Endpoint = appSettings.Value.FaceEndPoint;
            _customerGroupId     = appSettings.Value.FaceGroupID;
        }
Esempio n. 14
0
        public static async Task CreateSampleProjectAsync(IFaceClient client, string url, string personGroupId)
        {
            Dictionary <string, string[]> personDictionary =
                new Dictionary <string, string[]>
            {
                { "Family1-Dad", new[] { "Family1-Dad1.jpg", "Family1-Dad2.jpg" } },
                { "Family1-Mom", new[] { "Family1-Mom1.jpg", "Family1-Mom2.jpg" } },
                { "Family1-Son", new[] { "Family1-Son1.jpg", "Family1-Son2.jpg" } },
                { "Family1-Daughter", new[] { "Family1-Daughter1.jpg", "Family1-Daughter2.jpg" } },
                { "Family2-Lady", new[] { "Family2-Lady1.jpg", "Family2-Lady2.jpg" } },
                { "Family2-Man", new[] { "Family2-Man1.jpg", "Family2-Man2.jpg" } }
            };

            // string personGroupId = Guid.NewGuid().ToString();
            //  sourcePersonGroup = personGroupId; // This is solely for the snapshot operations example
            Console.WriteLine($"Create a person group ({personGroupId}).");
            await client.PersonGroup.CreateAsync(personGroupId, personGroupId, recognitionModel : PersonGroupSample.RECOGNITION_MODEL1);

            // The similar faces will be grouped into a single person group person.
            foreach (var groupedFace in personDictionary.Keys)
            {
                // Limit TPS
                await Task.Delay(250);

                Person person = await client.PersonGroupPerson.CreateAsync(personGroupId : personGroupId, name : groupedFace);

                Console.WriteLine($"Create a person group person '{groupedFace}'.");

                // Add face to the person group person.
                foreach (var similarImage in personDictionary[groupedFace])
                {
                    Console.WriteLine($"Add face to the person group person({groupedFace}) from image `{similarImage}`");
                    PersistedFace face = await client.PersonGroupPerson.AddFaceFromUrlAsync(personGroupId, person.PersonId,
                                                                                            $"{url}{similarImage}", similarImage);
                }
            }
            Console.WriteLine();
            Console.WriteLine($"Train person group {personGroupId}.");
            await client.PersonGroup.TrainAsync(personGroupId);

            // Wait until the training is completed.
            while (true)
            {
                await Task.Delay(1000);

                var trainingStatus = await client.PersonGroup.GetTrainingStatusAsync(personGroupId);

                Console.WriteLine($"Training status: {trainingStatus.Status}.");
                if (trainingStatus.Status == TrainingStatusType.Succeeded)
                {
                    break;
                }
            }
        }
Esempio n. 15
0
        public FaceCrop()
        {
            /*This class isn't terribly well organized, and probbly needs brokend down into
             * additional helper classes. I just needed something that would crop a few thousand
             * camper's faces really quick and organize them accordingly.
             * --Mark
             */

            var settings = SettingsComponent.GetComponent <MicrosoftFaceSettings>();

            faceClient = Authenticate(settings.GetAttributeValue("Endpoint"), settings.GetAttributeValue("SubscriptionKey"));
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="CognitiveService"/> class.
        /// </summary>
        /// <param name="faceClient">The faceClient<see cref="IFaceClient"/>.</param>
        /// <param name="azureBlobService">The azureBlobService<see cref="IAzureBlobService"/>.</param>
        /// <param name="tableStorage">The tableStorage<see cref="ITableStorage"/>.</param>
        /// <param name="studentService">The studentService<see cref="IStudentService"/>.</param>
        public CognitiveService(IFaceClient faceClient, IAzureBlobService azureBlobService, ITableStorage tableStorage, IStudentService studentService)
        {
            _faceClient       = faceClient;
            _azureBlobService = azureBlobService;
            _tableStorage     = tableStorage;
            _studentService   = studentService;

            if (Uri.IsWellFormedUriString(SettingConfigurations.CognitiveServiceEndPoint, UriKind.Absolute))
            {
                faceClient.Endpoint = SettingConfigurations.CognitiveServiceEndPoint;
            }
        }
        public HodorTabViewModel()
        {
            var subscriptionKey = ConfigurationManager.GetAzureSubscriptionKey();
            var apiUri          = ConfigurationManager.GetAzureApiUri();

            _faceClient           = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey), new System.Net.Http.DelegatingHandler[] { });
            _faceClient.Endpoint  = apiUri;
            _personGroupViewModel = new PersonGroupViewModel(_faceClient, () => { return(ImagePath); });

            Messenger.Default?.Register <FaceDetectedMessage>(this, FaceDetectedMessageHandler);
            Messenger.Default?.Register <CapturingStartedMessage>(this, CapturingStartedMessageHandler);
        }
Esempio n. 18
0
        public Form1()
        {
            InitializeComponent();
            btnAnalysis.Enabled = false;
            _driver             = new Driver(Constants.subscriptionKey, Constants.uriBase);
            _face      = new Face(_driver);
            faceClient = new FaceClient(
                new ApiKeyServiceClientCredentials(Constants.subscriptionKey),
                new System.Net.Http.DelegatingHandler[] { });

            faceClient.BaseUri = new Uri(Constants.uriBaseFrom);
        }
        /*
         * END - FIND SIMILAR
         */

        /*
         * VERIFY
         * The Verify operation takes a face ID from DetectedFace or PersistedFace and either another face ID
         * or a Person object and determines whether they belong to the same person. If you pass in a Person object,
         * you can optionally pass in a PersonGroup to which that Person belongs to improve performance.
         */
        public static async Task Verify(IFaceClient client, string url, string recognitionModel02)
        {
            Console.WriteLine("========VERIFY========");
            Console.WriteLine();

            List <string> targetImageFileNames = new List <string> {
                "Family1-Dad1.jpg", "Family1-Dad2.jpg"
            };
            string sourceImageFileName1 = "Family1-Dad3.jpg";
            string sourceImageFileName2 = "Family1-Son1.jpg";


            List <Guid> targetFaceIds = new List <Guid>();

            foreach (var imageFileName in targetImageFileNames)
            {
                // Detect faces from target image url.
                List <DetectedFace> detectedFaces = await DetectFaceRecognize(client, $"{url}{imageFileName} ", recognitionModel02);

                targetFaceIds.Add(detectedFaces[0].FaceId.Value);
                Console.WriteLine($"{detectedFaces.Count} faces detected from image `{imageFileName}`.");
            }

            // Detect faces from source image file 1.
            List <DetectedFace> detectedFaces1 = await DetectFaceRecognize(client, $"{url}{sourceImageFileName1} ", recognitionModel02);

            Console.WriteLine($"{detectedFaces1.Count} faces detected from image `{sourceImageFileName1}`.");
            Guid sourceFaceId1 = detectedFaces1[0].FaceId.Value;

            // Detect faces from source image file 2.
            List <DetectedFace> detectedFaces2 = await DetectFaceRecognize(client, $"{url}{sourceImageFileName2} ", recognitionModel02);

            Console.WriteLine($"{detectedFaces2.Count} faces detected from image `{sourceImageFileName2}`.");
            Guid sourceFaceId2 = detectedFaces2[0].FaceId.Value;

            // Verification example for faces of the same person.
            VerifyResult verifyResult1 = await client.Face.VerifyFaceToFaceAsync(sourceFaceId1, targetFaceIds[0]);

            Console.WriteLine(
                verifyResult1.IsIdentical
                                        ? $"Faces from {sourceImageFileName1} & {targetImageFileNames[0]} are of the same (Positive) person, similarity confidence: {verifyResult1.Confidence}."
                                        : $"Faces from {sourceImageFileName1} & {targetImageFileNames[0]} are of different (Negative) persons, similarity confidence: {verifyResult1.Confidence}.");

            // Verification example for faces of different persons.
            VerifyResult verifyResult2 = await client.Face.VerifyFaceToFaceAsync(sourceFaceId2, targetFaceIds[0]);

            Console.WriteLine(
                verifyResult2.IsIdentical
                                        ? $"Faces from {sourceImageFileName2} & {targetImageFileNames[0]} are of the same (Negative) person, similarity confidence: {verifyResult2.Confidence}."
                                        : $"Faces from {sourceImageFileName2} & {targetImageFileNames[0]} are of different (Positive) persons, similarity confidence: {verifyResult2.Confidence}.");

            Console.WriteLine();
        }
        public CandidatesController(ApplicationDbContext context, IConfiguration configuration)
        {
            string key      = configuration["Face:SubscriptionKey"];
            string endpoint = configuration["Face:Endpoint"];

            _context    = context;
            _faceClient = new FaceClient(
                new ApiKeyServiceClientCredentials(key),
                new System.Net.Http.DelegatingHandler[] { }
                );
            _faceClient.Endpoint = endpoint;
        }
        public void FaceSnapshotTestFaceList()
        {
            using (MockContext context = MockContext.Start(this.GetType()))
            {
                HttpMockServer.Initialize(this.GetType(), "FaceSnapshotTestFaceList");

                var sourceFaceListId = "source-face-list-id";
                var name             = $"name{sourceFaceListId}";
                var userdata         = $"userdata{sourceFaceListId}";

                IFaceClient client = GetFaceClient(HttpMockServer.CreateInstance());
                client.FaceList.CreateAsync(sourceFaceListId, name, userdata, recognitionModel: recognitionModel).Wait();

                using (FileStream stream = new FileStream(Path.Combine("TestImages", "Satya4.jpg"), FileMode.Open))
                {
                    var persistedFace = client.FaceList.AddFaceFromStreamAsync(sourceFaceListId, stream, detectionModel: detectionModel).Result;
                    Assert.NotNull(persistedFace);
                }

                var objectType       = SnapshotObjectType.FaceList;
                var objectId         = sourceFaceListId;
                var snapshotUserData = "User provided data for the snapshot.";

                var takeSnapshotResult = client.Snapshot.TakeAsync(objectType, objectId, ApplyScope, snapshotUserData).Result;
                Assert.NotNull(takeSnapshotResult.OperationLocation);

                var operationStatus = GetOperationResult(client, takeSnapshotResult.OperationLocation);
                Assert.NotNull(operationStatus);
                Assert.Equal(OperationStatusType.Succeeded, operationStatus.Status);

                var resourceId = Guid.Parse(operationStatus.ResourceLocation.Split('/').Last());

                var targetFacelistId    = "target-face-list-id";
                var applyMode           = SnapshotApplyMode.CreateNew;
                var applySnapshotResult = client.Snapshot.ApplyAsync(resourceId, targetFacelistId, applyMode).Result;
                Assert.NotNull(applySnapshotResult.OperationLocation);

                operationStatus = GetOperationResult(client, applySnapshotResult.OperationLocation);
                Assert.NotNull(operationStatus);
                Assert.Equal(operationStatus.Status.ToString(), OperationStatusType.Succeeded.ToString());

                var targetObjectId = operationStatus.ResourceLocation.Split('/').Last();
                Assert.Equal(targetFacelistId, targetObjectId);

                var targetFacelist = client.FaceList.GetAsync(targetFacelistId).Result;
                Assert.NotNull(targetFacelist);
                Assert.True(targetFacelist.PersistedFaces.Count == 1);

                client.FaceList.DeleteAsync(sourceFaceListId).Wait();
                client.FaceList.DeleteAsync(targetFacelistId).Wait();
                client.Snapshot.DeleteAsync(resourceId).Wait();
            }
        }
Esempio n. 22
0
 public MyFace()
 {
     using (StreamReader r = new StreamReader(configPath))
     {
         string jsonString = r.ReadToEnd();
         Config config     = JsonConvert.DeserializeObject <Config>(jsonString);
         faceClient = new FaceClient(
             new ApiKeyServiceClientCredentials(config.SubscriptionKey),
             new System.Net.Http.DelegatingHandler[] { });
         faceClient.Endpoint = config.FaceEndpoint;
     }
 }
Esempio n. 23
0
        /// <summary>
        /// Identification against the person group.
        /// </summary>
        private static async Task IdentifyInPersonGroup(IFaceClient client, string personGroupId)
        {
            using (var fileStream = new FileStream("data\\PersonGroup\\Daughter\\Daughter1.jpg", FileMode.Open, FileAccess.Read))
            {
                var detectedFaces = await client.Face.DetectWithStreamAsync(fileStream, recognitionModel : recognitionModel);

                var result = await client.Face.IdentifyAsync(detectedFaces.Select(face => face.FaceId).Where(faceId => faceId != null).Select(faceId => faceId.Value).ToList(), personGroupId);

                Console.WriteLine("Test identify against PersonGroup");
                Console.WriteLine(JsonConvert.SerializeObject(result));
                Console.WriteLine();
            }
        }
        private static async Task <List <DetectedFace> > DetectFaceRecognize(IFaceClient faceClient, Stream img, string RECOGNITION_MODEL1)
        {
            try
            {
                IList <DetectedFace> detectedFaces = await faceClient.Face.DetectWithStreamAsync(img, recognitionModel : RECOGNITION_MODEL1);

                return(detectedFaces.ToList());
            }
            catch (Exception)
            {
                return(null);
            }
        }
        public async Task <int> DetectFaceExtract(IFaceClient client, string url, string recognitionModel)
        {
            IList <DetectedFace> detectedFaces;

            detectedFaces = await client.Face.DetectWithUrlAsync($"{url}{string.Format("{0}.{1}", imageName.ImageName, imageName.ImageExtension)}",
                                                                 returnFaceAttributes : new List <FaceAttributeType> {
                FaceAttributeType.Accessories, FaceAttributeType.Age
            },
                                                                 recognitionModel : recognitionModel);

            Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{imageName}`.");
            return((int)detectedFaces[0].FaceAttributes.Age);
        }
Esempio n. 26
0
        public async Task <IActionResult> GetFaceDetails([FromForm] IFormFile file)
        {
            if (file == null)
            {
                return(BadRequest());
            }
            string      SUBSCRIPTION_KEY = _config.GetValue <string>("Keys:SUBSCRIPTION_KEY");
            string      ENDPOINT         = _config.GetValue <string>("Keys:ENDPOINT");
            IFaceClient client           = Authenticate(ENDPOINT, SUBSCRIPTION_KEY);
            FaceImage   dataToReturn     = await DetectFaceExtract(client, file, RECOGNITION_MODEL);

            return(Ok(dataToReturn));
        }
Esempio n. 27
0
        static async Task Main(string[] args)
        {
            Console.WriteLine("Running");

            var configCognitiveSubscriptionKey = Environment.GetEnvironmentVariable("AZURE_COGNITIVE_SUBSCRIPTION_KEY");
            var configCognitiveEndpoint        = Environment.GetEnvironmentVariable("AZURE_COGNITIVE_ENDPOINT");
            var faceBaseImageUrl = Environment.GetEnvironmentVariable("AZURE_COGNITIVE_IMAGE_BASE_URL");

            IFaceClient client = Authenticate(configCognitiveEndpoint, configCognitiveSubscriptionKey);

            //await DetectFaceExtract(client, faceBaseImageUrl, RECOGNITION_MODEL);
            await FindSimilar(client, faceBaseImageUrl, RECOGNITION_MODEL);
        }
Esempio n. 28
0
        public FaceService(IMapper mapper, IImagesRepository tRepository, IConfiguration configuration, IHttpContextAccessor contextAccessor)
            : base(mapper, tRepository, contextAccessor)
        {
            var faceApiConfig = configuration.GetSection("FaceApi");

            FaceClient = new FaceClient(
                new ApiKeyServiceClientCredentials(faceApiConfig["faceKey"]),

                new DelegatingHandler[] { })
            {
                Endpoint = faceApiConfig["faceEndpoint"]
            };
        }
Esempio n. 29
0
        private async Task <DetectedFace> DetectFaceExtract(IFaceClient client, string url, string recognitionModel)
        {
            var face = await client.Face.DetectWithUrlAsync(url,
                                                            returnFaceAttributes : new List <FaceAttributeType> {
                FaceAttributeType.Accessories, FaceAttributeType.Age,
                FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair,
                FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose,
                FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile
            },
                                                            recognitionModel : recognitionModel);

            return(face.FirstOrDefault());
        }
Esempio n. 30
0
        /*
         * Take a snapshot of a person group
         * This sample uses a pre-existing person group and copies it from one Azure region to another. For example: from the EastUS region to the WestUS region
         * The same process can be used for face lists.
         * NOTE: a copy of the person group in the target region has a new person group ID, so it no longer associates with the source person group.
         */
        public static async Task Snapshot(IFaceClient clientSource, IFaceClient clientTarget, string personGroupId, Guid azureSubscriptionId)
        {
            Console.WriteLine("========Sample of creating a snapshot========");
            // Take a snapshot for the person group that was previously created in your source region.
            var takeSnapshotResult = await clientSource.Snapshot.TakeAsync(SnapshotObjectType.PersonGroup, personGroupId, new[] { azureSubscriptionId });

            // Get operation id from response for tracking the progress of snapshot taking.
            var operationId = Guid.Parse(takeSnapshotResult.OperationLocation.Split('/')[2]);

            Console.WriteLine($"Taking snapshot(operation ID: {operationId})... Started");

            // Wait for taking the snapshot to complete.
            OperationStatus operationStatus = null;

            do
            {
                Thread.Sleep(TimeSpan.FromMilliseconds(1000));
                // Get the status of the operation.
                operationStatus = await clientSource.Snapshot.GetOperationStatusAsync(operationId);

                Console.WriteLine($"Operation Status: {operationStatus.Status}");
            }while (operationStatus.Status != OperationStatusType.Succeeded && operationStatus.Status != OperationStatusType.Failed);
            // Confirm the location of the resource where the snapshot is taken and its snapshot ID
            var snapshotId = Guid.Parse(operationStatus.ResourceLocation.Split('/')[2]);

            Console.WriteLine($"Source region snapshot ID: {snapshotId}");
            Console.WriteLine($"Taking snapshot of person group: {personGroupId}... Done\n");

            // Apply the snapshot in target region, with a new ID.
            var newPersonGroupId = Guid.NewGuid().ToString();

            targetPersonGroup = newPersonGroupId;
            var applySnapshotResult = await clientTarget.Snapshot.ApplyAsync(snapshotId, newPersonGroupId);

            // Get operation id from response for tracking the progress of snapshot applying.
            var applyOperationId = Guid.Parse(applySnapshotResult.OperationLocation.Split('/')[2]);

            Console.WriteLine($"Applying snapshot(operation ID: {applyOperationId})... Started");
            // Wait for applying operation to complete
            do
            {
                Thread.Sleep(TimeSpan.FromMilliseconds(1000));
                // Get the status of the operation.
                operationStatus = await clientSource.Snapshot.GetOperationStatusAsync(applyOperationId);

                Console.WriteLine($"Operation Status: {operationStatus.Status}");
            }while (operationStatus.Status != OperationStatusType.Succeeded && operationStatus.Status != OperationStatusType.Failed);
            // Confirm location of the target resource location, with its ID.
            Console.WriteLine($"Person group in new region: {newPersonGroupId}");
            Console.WriteLine("Applying snapshot... Done\n");
        }