Example #1
0
        private async void PerformFaceAnalysis(StorageFile file)
        {
            var imageInfo = await FileHelper.GetImageInfoForRendering(file.Path);
            NewImageSizeWidth = 300;
            NewImageSizeHeight = NewImageSizeWidth*imageInfo.Item2/imageInfo.Item1;

            var newSourceFile = await FileHelper.CreateCopyOfSelectedImage(file);
            var uriSource = new Uri(newSourceFile.Path);
            SelectedFileBitmapImage = new BitmapImage(uriSource);


            // start face api detection
            var faceApi = new FaceApiHelper();
            DetectedFaces = await faceApi.StartFaceDetection(newSourceFile.Path, newSourceFile, imageInfo, "4c138b4d82b947beb2e2926c92d1e514");

            // draw rectangles 
            var color = Color.FromArgb(125, 255, 0, 0);
            var bg = new SolidColorBrush(color);

            DetectedFacesCanvas = new ObservableCollection<Canvas>();
            foreach (var detectedFace in DetectedFaces)
            {
                var margin = new Thickness(detectedFace.RectLeft, detectedFace.RectTop, 0, 0);
                var canvas = new Canvas()
                {
                    Background = bg,
                    HorizontalAlignment = HorizontalAlignment.Left,
                    VerticalAlignment = VerticalAlignment.Top,
                    Height = detectedFace.RectHeight,
                    Width = detectedFace.RectWidth,
                    Margin = margin
                };
                DetectedFacesCanvas.Add(canvas);
            }
        }
Example #2
0
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            _state = StreamingState.Idle;
            if (_faceTracker == null)
            {
                _faceTracker = await FaceTracker.CreateAsync();
            }

            if (_faceApiHelper == null)
            {
                try
                {
                    _faceApiHelper  = new FaceApiHelper();
                    _eventHubHelper = new EventHubHelper();
                    //用不到
                    //await _faceApiHelper.CheckGroupExistAsync();
                }
                catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx)
                {
                    ShowErrorHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode);
                }
                catch (Microsoft.Azure.EventHubs.EventHubsException eventhubEx)
                {
                    ShowErrorHelper.ShowDialog(eventhubEx.Message);
                }
                catch (Exception ex)
                {
                    ShowErrorHelper.ShowDialog(ex.Message);
                }
            }
        }
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            PaintingCanvas.Background = null;
            _state = StreamingState.Idle;

            if (_faceDetector == null)
            {
                _faceDetector = await FaceDetector.CreateAsync();
            }

            if (_faceApiHelper == null)
            {
                try
                {
                    _faceApiHelper = new FaceApiHelper();
                    await _faceApiHelper.CheckGroupExistAsync();
                }
                catch (FaceAPIException faceEx)
                {
                    ShowAlertHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode);
                }
                catch (Exception ex)
                {
                    ShowAlertHelper.ShowDialog(ex.Message);
                }
            }
        }
Example #4
0
        public FaceControllerTest()
        {
            var client = new HttpClient();

            var jsonFile = Path.Combine(Directory.GetCurrentDirectory(), "..\\..\\..\\appsettings.json");
            var builder  = new ConfigurationBuilder().AddJsonFile(jsonFile);
            var helper   = new FaceApiHelper(client, null, builder.Build());

            _controller = new FaceController(helper);
        }
Example #5
0
    // Use this for initialization
    void Start()
    {
        _faceApiHelper = new FaceApiHelper(this, Location, SubscriptionKey);


        //option 1
        //var imageFilePath = Application.dataPath + @"\Images\Cookie.jpg";
        //_faceApiHelper.Detect(imageFilePath, Callback);


        //webcam
        _webCamTexture = new WebCamTexture();
        Preview.material.mainTexture = _webCamTexture;
        _webCamTexture.Play();
    }
        public static async Task <IdentifiedFace> CheckGroupAsync(FaceServiceClient faceClient, Stream stream, string personGroupId, string groupImagesFolder)
        {
            try
            {
                var response = await FaceApiHelper.IdentifyPersonAsync(faceClient, stream, personGroupId);

                if (response?.Candidates == null || response.Candidates.Length == 0)
                {
                    return(null);
                }

                // Due to legal limitations, Face API does not support images retrieval in any circumstance currently.You need to store the images and maintain the relationship between face ids and images by yourself.
                var personsFolder = await PicturesHelper.GetPersonFolderAsync(groupImagesFolder);

                var dataSet = await faceClient.ListPersonsAsync(personGroupId);

                var matches =
                    from c in response.Candidates
                    join p in dataSet on c.PersonId equals p.PersonId into ps
                    from p in ps.DefaultIfEmpty()
                    select new IdentifiedFace
                {
                    Confidence = c.Confidence,
                    PersonName = p == null ? "(No matching face)" : p.Name,
                    FaceId     = c.PersonId
                };

                var match = matches.OrderByDescending(m => m.Confidence).FirstOrDefault();


                if (match == null)
                {
                    return(null);
                }

                var matchFile = await personsFolder.GetFileAsync($"{match.PersonName}.{Constants.LocalPersonFileExtension}");

                IRandomAccessStream photoStream = await matchFile.OpenReadAsync();

                match.FaceStream = photoStream.CloneStream().AsStream();
                return(match);
            }
            catch (Exception)
            {
                return(null);
            }
        }
Example #7
0
        public async Task <IActionResult> GetThumbNails()
        {
            try
            {
                if (FaceApiHelper.imageUrl != "")
                {
                    List <FaceApiResponse.FaceInfo> faces = await FaceApiHelper.MakeRequest(faceApiConfig);

                    return(new ObjectResult(faces));
                }
                else
                {
                    return(BadRequest("No image url"));
                }
            }
            catch (Exception ex)
            {
                return(BadRequest(ex.Message));
            }
        }
Example #8
0
        private async void ButtonGetData_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            var dlg = new Microsoft.Win32.OpenFileDialog
            {
                DefaultExt = ".jpg",
                Filter     = "Image files(*.jpg) | *.jpg"
            };
            var result = dlg.ShowDialog();

            if (!result.HasValue || !result.Value)
            {
                return;
            }
            SelectedFile = dlg.FileName;
            var faceApi    = new FaceApiHelper();
            var returnData = await faceApi.StartFaceDetection(SelectedFile, "4c138b4d82b947beb2e2926c92d1e514");

            DetectedFaces = returnData.Item1;
            FacesRect     = returnData.Item2;
        }
Example #9
0
        private async void ButtonGetData_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            var dlg = new Microsoft.Win32.OpenFileDialog
            {
                DefaultExt = ".jpg",
                Filter     = "Image files(*.jpg) | *.jpg"
            };
            var result = dlg.ShowDialog();

            if (!result.HasValue || !result.Value)
            {
                return;
            }
            SelectedFile = dlg.FileName;
            var faceApi    = new FaceApiHelper();
            var returnData = await faceApi.StartFaceDetection(SelectedFile, Properties.Settings.Default.FaceApiKey, Properties.Settings.Default.EmotionsApiKey);

            DetectedFaces = returnData.Item1;
            FacesRect     = returnData.Item2;

            StatusInformation = $@"{DetectedFaces.Count} faces datected.";
        }
Example #10
0
 // inject services through controller's constructor
 public FaceController(FaceApiHelper faceApiHelper)
 {
     _faceApiHelper = faceApiHelper;
 }
Example #11
0
 // inject services through controller's constructor
 public EmployeeProfileController(IHostingEnvironment env, EmployeeProfileDbContext context, FaceApiHelper faceApiHelper)
 {
     _env           = env;
     _context       = context;
     _faceApiHelper = faceApiHelper;
 }