private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            if (!CameraList.HasItems)
            {
                MessageArea.Text = "No cameras found; cannot start processing";
                return;
            }

            // Clean leading/trailing spaces in API keys.
            Properties.Settings.Default.FaceAPIKey   = Properties.Settings.Default.FaceAPIKey.Trim();
            Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim();

            // Create API clients.
            _faceClient = new FaceAPI.FaceClient(new FaceAPI.ApiKeyServiceClientCredentials(Properties.Settings.Default.FaceAPIKey))
            {
                Endpoint = Properties.Settings.Default.FaceAPIHost
            };
            _visionClient = new VisionAPI.ComputerVisionClient(new VisionAPI.ApiKeyServiceClientCredentials(Properties.Settings.Default.VisionAPIKey))
            {
                Endpoint = Properties.Settings.Default.VisionAPIHost
            };

            // How often to analyze.
            _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval);

            // Reset message.
            MessageArea.Text = "";

            // Record start time, for auto-stop
            _startTime = DateTime.Now;

            await _grabber.StartProcessingCameraAsync(CameraList.SelectedIndex);
        }
Ejemplo n.º 2
0
        public ComputerVisionClient(IComputerVisionConfiguration config)
        {
            _config = config ?? throw new ArgumentNullException(nameof(config));

            _client = new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient(
                new ApiKeyServiceClientCredentials(_config.SubscriptionKey),
                new System.Net.Http.DelegatingHandler[] { });
            _client.Endpoint = _config.ApiUrl;
        }
Ejemplo n.º 3
0
        private async Task ProcessImage(SoftwareBitmap image)
        {
            try
            {
                Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient visionClient = new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient(
                    new ApiKeyServiceClientCredentials(settings.VisionKey),
                    new System.Net.Http.DelegatingHandler[] { });

                Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient(
                    new ApiKeyServiceClientCredentials(settings.FaceKey),
                    new System.Net.Http.DelegatingHandler[] { });

                visionClient.Endpoint = $"https://{settings.VisionRegion}.api.cognitive.microsoft.com";
                faceClient.Endpoint   = $"https://{settings.FaceRegion}.api.cognitive.microsoft.com";

                List <VisualFeatureTypes> features =
                    new List <VisualFeatureTypes>()
                {
                    VisualFeatureTypes.Categories, VisualFeatureTypes.Description,
                    VisualFeatureTypes.Tags, VisualFeatureTypes.Faces
                };
                // The list of Face attributes to return.
                IList <FaceAttributeType> faceAttributes =
                    new FaceAttributeType[]
                {
                    FaceAttributeType.Gender, FaceAttributeType.Age,
                    FaceAttributeType.Smile, FaceAttributeType.Emotion,
                    FaceAttributeType.Glasses, FaceAttributeType.Hair
                };

                try
                {
                    if (DateTime.Now.Subtract(imageAnalysisLastDate).TotalSeconds > 1)
                    {
                        using (var ms = new InMemoryRandomAccessStream())
                        {
                            BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

                            encoder.SetSoftwareBitmap(image);
                            await encoder.FlushAsync();

                            var analysis = await visionClient.AnalyzeImageInStreamAsync(ms.AsStream(), features);

                            UpdateWithAnalysis(analysis);
                        }
                        imageAnalysisLastDate = DateTime.Now;
                    }


                    using (var ms = new InMemoryRandomAccessStream())
                    {
                        BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

                        encoder.SetSoftwareBitmap(image);
                        await encoder.FlushAsync();

                        var analysisFace = await faceClient.Face.DetectWithStreamWithHttpMessagesAsync(ms.AsStream(), returnFaceAttributes : faceAttributes);

                        UpdateFaces(analysisFace, image.PixelHeight, image.PixelWidth);
                    }
                }
                catch (Exception)
                {
                    // Eat exception
                }
            }
            catch (Exception)
            {
                // eat this exception too
            }
        }
Ejemplo n.º 4
0
        private async Task ProcessImage(SoftwareBitmap image)
        {
            try
            {
                Func <Task <Stream> > imageStreamCallback;

                using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream())
                {
                    BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                    encoder.SetSoftwareBitmap(image);
                    await encoder.FlushAsync();

                    // Read the pixel bytes from the memory stream
                    using (var reader = new DataReader(stream.GetInputStreamAt(0)))
                    {
                        var bytes = new byte[stream.Size];
                        await reader.LoadAsync((uint)stream.Size);

                        reader.ReadBytes(bytes);
                        imageStreamCallback = () => Task.FromResult <Stream>(new MemoryStream(bytes));
                    }
                }


                Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient visionClient = new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient(
                    new ApiKeyServiceClientCredentials(settings.ComputerVisionKey),
                    new System.Net.Http.DelegatingHandler[] { });

                // Create a prediction endpoint, passing in the obtained prediction key
                CustomVisionPredictionClient customVisionClient = new CustomVisionPredictionClient()
                {
                    ApiKey   = settings.CustomVisionKey,
                    Endpoint = $"https://{settings.CustomVisionRegion}.api.cognitive.microsoft.com"
                };

                Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient(
                    new ApiKeyServiceClientCredentials(settings.FaceKey),
                    new System.Net.Http.DelegatingHandler[] { });


                visionClient.Endpoint = settings.ComputerVisionEndpoint;
                faceClient.Endpoint   = settings.FaceEndpoint;

                List <VisualFeatureTypes> features =
                    new List <VisualFeatureTypes>()
                {
                    VisualFeatureTypes.Categories, VisualFeatureTypes.Description,
                    VisualFeatureTypes.Tags, VisualFeatureTypes.Faces, VisualFeatureTypes.Brands
                };
                // The list of Face attributes to return.
                IList <FaceAttributeType> faceAttributes =
                    new FaceAttributeType[]
                {
                    FaceAttributeType.Gender, FaceAttributeType.Age,
                    FaceAttributeType.Smile, FaceAttributeType.Emotion,
                    FaceAttributeType.Glasses, FaceAttributeType.Hair
                };

                try
                {
                    if (!imageAnalysisRunning && DateTime.Now.Subtract(imageAnalysisLastDate).TotalMilliseconds > 1000)
                    {
                        imageAnalysisRunning = true;

                        _ = Task.Run(async() =>
                        {
                            ImageAnalysis analysis     = await visionClient.AnalyzeImageInStreamAsync(await imageStreamCallback(), features);
                            ImagePrediction analysisCV = null;

                            try
                            {
                                analysisCV = await customVisionClient.DetectImageWithNoStoreAsync(new Guid(settings.CustomVisionProjectId), settings.CustomVisionIterationName, await imageStreamCallback());
                            }
                            catch (Exception)
                            {
                                // Throw away error
                            }


                            UpdateWithAnalysis(analysis, analysisCV);

                            imageAnalysisLastDate = DateTime.Now;
                            imageAnalysisRunning  = false;
                        });
                    }



                    var analysisFace = await faceClient.Face.DetectWithStreamWithHttpMessagesAsync(await imageStreamCallback(), returnFaceId : true, returnFaceAttributes : faceAttributes);

                    imageWidth  = image.PixelWidth;
                    imageHeight = image.PixelHeight;
                    facesControl.UpdateEvent(new CognitiveEvent()
                    {
                        Faces = analysisFace.Body, ImageWidth = image.PixelWidth, ImageHeight = image.PixelHeight
                    });

                    if (analysisFace.Body.Count() > 0 && settings.DoFaceDetection)
                    {
                        var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync();

                        var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName);
                        if (group != null)
                        {
                            var results = await faceClient.Face.IdentifyWithHttpMessagesAsync(analysisFace.Body.Select(x => x.FaceId.Value).ToArray(), group.PersonGroupId);

                            foreach (var identifyResult in results.Body)
                            {
                                var cand = identifyResult.Candidates.FirstOrDefault(x => x.Confidence > settings.FaceThreshold / 100d);
                                if (cand == null)
                                {
                                    Console.WriteLine("No one identified");
                                }
                                else
                                {
                                    // Get top 1 among all candidates returned
                                    var candidateId = cand.PersonId;
                                    var person      = await faceClient.PersonGroupPerson.GetWithHttpMessagesAsync(group.PersonGroupId, candidateId);

                                    tagsControl.UpdateEvent(new CognitiveEvent()
                                    {
                                        IdentifiedPerson = person.Body, IdentifiedPersonPrediction = cand.Confidence
                                    });
                                    Console.WriteLine("Identified as {0}", person.Body.Name);
                                }
                            }
                        }
                    }
                }
                catch (Exception)
                {
                    // Eat exception
                }
            }
            catch (Exception)
            {
                // eat this exception too
            }
        }