private static async Task <IList <DetectedFace> > DetectFaceAsync(IRandomAccessStream randomStream) { if (randomStream == null) { throw new ArgumentNullException(nameof(randomStream)); } var decoder = await BitmapDecoder.CreateAsync(randomStream); var transform = new BitmapTransform(); const float sourceImageHeightLimit = 1280; if (decoder.PixelHeight > sourceImageHeightLimit) { var scalingFactor = sourceImageHeightLimit / decoder.PixelHeight; transform.ScaledWidth = (uint)Math.Floor(decoder.PixelWidth * scalingFactor); transform.ScaledHeight = (uint)Math.Floor(decoder.PixelHeight * scalingFactor); } var sourceBitmap = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied, transform, ExifOrientationMode.IgnoreExifOrientation, ColorManagementMode.DoNotColorManage); const BitmapPixelFormat faceDetectionPixelFormat = BitmapPixelFormat.Gray8; var convertedBitmap = sourceBitmap.BitmapPixelFormat != faceDetectionPixelFormat?SoftwareBitmap.Convert(sourceBitmap, faceDetectionPixelFormat) : sourceBitmap; var faceDetector = await FaceDetector.CreateAsync(); var detectedFaces = await faceDetector.DetectFacesAsync(convertedBitmap); return(detectedFaces); }
public static void ImageToFace() { //Console.WriteLine("Detecting face.."); Task.Run(async() => { var faceDetector = await FaceDetector.CreateAsync(); var screenBitmap = GetBitmapFromScreen(); var softwareBitmap = await GetSoftwareBitmapFromBitmap(screenBitmap); if (!FaceDetector.IsBitmapPixelFormatSupported(softwareBitmap.BitmapPixelFormat)) { //Console.WriteLine("Converting to supported bitmap pixel format.."); //Console.WriteLine("srcBitmap Width={0}, Height={1}", screenBitmap.Width, screenBitmap.Height); //Console.WriteLine("dstBitmap Width={0}, Height={1}", softwareBitmap.PixelWidth, softwareBitmap.PixelHeight); softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, FaceDetector.GetSupportedBitmapPixelFormats().First()); //Console.WriteLine("Converted successfully"); } //Console.WriteLine(screenBitmap.PixelFormat); //Console.WriteLine(softwareBitmap.BitmapPixelFormat); screenBitmap = await GetBitmapFromSoftwareBitmap(softwareBitmap); //Console.WriteLine(screenBitmap.PixelFormat); //Console.WriteLine(softwareBitmap.BitmapPixelFormat); using (var g = Graphics.FromImage(screenBitmap)) { var detectedFaces = await faceDetector.DetectFacesAsync(softwareBitmap); //Console.WriteLine("Detected faces: {0}", detectedFaces.Count); foreach (var detectedFace in detectedFaces) { var facebox = detectedFace.FaceBox; g.DrawRectangle(Pens.Red, new Rectangle((int)facebox.X, (int)facebox.Y, (int)facebox.Width, (int)facebox.Height)); //Console.WriteLine("Face at X={0}, Y={1}, Width={2}, Height={3}", facebox.X, facebox.Y, facebox.Width, facebox.Height); } } //screenBitmap.Save("screenbitmap" + DateTime.Now.Ticks + ".png", ImageFormat.Png); }).Wait(); }
/// <summary> /// Perfoem face detect. /// </summary> public override void DetectFace() { AppCallbacks.Instance.InvokeOnUIThread(async() => { var properties = _capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; if (properties == null) { return; } //coution! face detect is only supported 'BitmapPixelFormat.Gray8'. var videoFrame = new VideoFrame(BitmapPixelFormat.Gray8, (int)properties.Width, (int)properties.Height); this.FrameSizeWidth = (int)properties.Width; this.FrameSizeHeight = (int)properties.Height; var previewFrame = await _capture.GetPreviewFrameAsync(videoFrame); var detector = await FaceDetector.CreateAsync(); var detectFaces = await detector.DetectFacesAsync(previewFrame.SoftwareBitmap); var faceInformations = detectFaces.Select(x => new FaceInformation { X = x.FaceBox.X, Y = x.FaceBox.Y, Width = x.FaceBox.Width, Height = x.FaceBox.Height }).ToList(); AppCallbacks.Instance.InvokeOnAppThread(() => { OnDetected(faceInformations); }, false); videoFrame.Dispose(); previewFrame.Dispose(); }, true); }
public async Task <RecogniseResult> Recognise(Stream fileStream) { var randomAccessStream = fileStream.AsRandomAccessStream(); var bitmapDecoder = await BitmapDecoder.CreateAsync(randomAccessStream); var rawBitmap = await bitmapDecoder.GetSoftwareBitmapAsync(); var supportedBitmapFormats = FaceDetector.GetSupportedBitmapPixelFormats(); var supportedFormatBitmap = SoftwareBitmap.Convert(rawBitmap, supportedBitmapFormats.First()); var faceDetector = await FaceDetector.CreateAsync(); var faces = await faceDetector.DetectFacesAsync(supportedFormatBitmap); var result = new RecogniseResult(); if (faces.Any()) { result.Faces = faces.Count(); var memoryStream = new InMemoryRandomAccessStream(); var bitmapEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, memoryStream); bitmapEncoder.SetSoftwareBitmap(rawBitmap); bitmapEncoder.BitmapTransform.Bounds = faces.First().FaceBox; await bitmapEncoder.FlushAsync(); result.FirstFace = memoryStream.AsStream(); } return(result); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { PaintingCanvas.Background = null; _state = StreamingState.Idle; if (_faceDetector == null) { _faceDetector = await FaceDetector.CreateAsync(); } if (_faceApiHelper == null) { try { _faceApiHelper = new FaceApiHelper(); await _faceApiHelper.CheckGroupExistAsync(); } catch (FaceAPIException faceEx) { ShowAlertHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode); } catch (Exception ex) { ShowAlertHelper.ShowDialog(ex.Message); } } }
public async Task <IList <DetectedFace> > GetFaces() { // Use FaceDetector.GetSupportedBitmapPixelFormats and IsBitmapPixelFormatSupported to dynamically // determine supported formats const BitmapPixelFormat faceDetectionPixelFormat = BitmapPixelFormat.Gray8; SoftwareBitmap convertedBitmap = null; if (image.BitmapPixelFormat != faceDetectionPixelFormat) { convertedBitmap = SoftwareBitmap.Convert(image, faceDetectionPixelFormat); } else { convertedBitmap = image; } using (convertedBitmap) { FaceDetector faceDetector = await FaceDetector.CreateAsync(); IList <DetectedFace> detectedFaces = await faceDetector.DetectFacesAsync(convertedBitmap); return(detectedFaces); } }
private async Task InitializeFaceDetection() { if (FaceDetector.IsSupported) { if (_faceDetector == null) { _faceDetector = await FaceDetector.CreateAsync(); _faceDectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Face detection is not supported"); } if (FaceTracker.IsSupported) { if (_faceTracker == null) { _faceTracker = await FaceTracker.CreateAsync(); _faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Face tracking is not suppoted"); } }
public LocalFaceTracker(int nIters, float confidenceThreshold) { this.confidenceThreshold = confidenceThreshold; this.nIters = nIters; faceDetector = FaceDetector.CreateAsync().AsTask().Result; faceAligner = new FaceAligner(Application.dataPath + "/StreamingAssets/LocalFaceTracker/", nIters); }
private async Task InitializeFaceDetection() { if (FaceDetector.IsSupported) { if (faceDetector == null) { faceDetector = await FaceDetector.CreateAsync(); faceDetectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Warning. FaceDetector is not supported on this device"); } if (FaceTracker.IsSupported) { if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Warning. FaceTracking is not supported on this device"); } }
protected async override void OnNavigatedTo(NavigationEventArgs e) { // load Model var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///emotion_ferplus.onnx")); model = await emotion_ferplusModel.CreateFromStreamAsync(file); labels = new List <string>() { "Neutral", "Happiness", "Surprise", "Sadness", "Anger", "Disgust", "Fear", "Contempt" }; currentEmotionIndex = 1; //happiness EmotionText.Text = $"Show {labels[currentEmotionIndex]} to Snooze"; faceDetector = await FaceDetector.CreateAsync(); timer = new DispatcherTimer(); timer.Interval = TimeSpan.FromMilliseconds(300); timer.Tick += Timer_Tick; timer.Start(); await camera.StartAsync(); camera.CameraHelper.FrameArrived += Preview_FrameArrived; }
/// <summary> /// Asynchronously initializes webcam feed /// </summary> public async Task InitializeCameraAsync() { if (MediaCapture == null) { var cameraDevice = await FindCameraDevice(); if (cameraDevice == null) { // No camera found, report the error and break out of initialization Debug.WriteLine("No camera found!"); Initialized = false; return; } // Creates MediaCapture initialization settings with foudnd webcam device var settings = new MediaCaptureInitializationSettings { VideoDeviceId = cameraDevice.Id }; MediaCapture = new MediaCapture(); await MediaCapture.InitializeAsync(settings); faceDetector = await FaceDetector.CreateAsync(); Initialized = true; } }
async Task ProcessVideoFrame(SoftwareBitmap bitmap) { if (this.faceDetector == null) { this.faceDetector = await FaceDetector.CreateAsync(); } var results = await this.faceDetector.DetectFacesAsync(bitmap); var faceFound = results?.Count > 0; if (faceFound) { cameraControl.faceProcessingPaused = true; var user = await this.cameraControl.Snap(); userName = await IdentifyUser(user); if (userName == "Lilian") { await GetEventsGmail(); } await Task.Delay(5000); cameraControl.faceProcessingPaused = false; } else { userName = ""; eventsList_txt.Text = ""; IdentityTextBlock.Text = ""; } }
public static async Task <IEnumerable <DetectedFace> > DetectFacesAsync(SoftwareBitmap convertedBitmap, SoftwareBitmap sourceBitmap) { var faceDetector = await FaceDetector.CreateAsync(); return(await faceDetector.DetectFacesAsync(convertedBitmap)); //ShowDetectedFaces(sourceBitmap, detectedFaces); }
/// <summary> /// Responds when we navigate to this page. /// </summary> /// <param name="e">Event data</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { // The 'await' operation can only be used from within an async method but class constructors // cannot be labeled as async, and so we'll initialize FaceDetector here. if (this.faceDetector == null) { this.faceDetector = await FaceDetector.CreateAsync(); } }
public async Task AttachAsync(ICameraService camera) { _camera = camera; _faceDetector = await FaceDetector.CreateAsync(); var timerInterval = TimeSpan.FromMilliseconds(300); _frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(ProcessCurrentVideoFrame, timerInterval); }
private async Task InitializeAsync() { Message = "Initializing.."; faceDetector = await FaceDetector.CreateAsync(); await mediaCapture.InitializeAsync(); CaptureElement.Source = mediaCapture; await LoadDataAsync(); }
public static async Task <IList <DetectedFace> > FaceDetectAsync(this SoftwareBitmap source) { var dest = source; var detector = await FaceDetector.CreateAsync(); if (!FaceDetector.IsBitmapPixelFormatSupported(dest.BitmapPixelFormat)) { dest = SoftwareBitmap.Convert(dest, BitmapPixelFormat.Gray8); } return(await detector.DetectFacesAsync(dest)); }
public async Task <bool> InitializeAsync() { bool modelLoaded = await LoadModelAsync(); if (modelLoaded == true) { _faceDetector = await FaceDetector.CreateAsync(); CameraService.Current.SoftwareBitmapFrameCaptured += Current_SoftwareBitmapFrameCaptured; _isInitialized = true; } return(modelLoaded); }
public async Task InitializeAsync() { Message = "Initializing"; devInfoCollection = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); // Setup SignalR client and subscribe events. hubConnection = new HubConnection(Settings.HubUrl); hubProxy = hubConnection.CreateHubProxy(Settings.HubName); hubProxy.On("BroadcastStartQuestion", async() => { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, async() => { await StartQuestionAsync(); }); }); hubProxy.On("BroadcastStopQuestion", async() => { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, async() => { await StopQuestionAsync(); }); }); hubProxy.On("BroadcastClear", async() => { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, async() => { await ClearInputAsync(); }); }); await hubConnection.Start(); // Initialize Speech recognizer and subscribe event. speechRecognizer = new SpeechRecognizer(new Windows.Globalization.Language(Settings.SpeechLanguage)); speechRecognizer.Timeouts.BabbleTimeout = TimeSpan.FromSeconds(25); speechRecognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromSeconds(50); speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromMilliseconds(50); speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated; speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated; speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed; await speechRecognizer.CompileConstraintsAsync(); // Initialize video and start preview. await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings() { VideoDeviceId = devInfoCollection.Last().Id }); CaptureElement.Source = mediaCapture; await mediaCapture.StartPreviewAsync(); faceDetector = await FaceDetector.CreateAsync(); Identify(); GetEmotion(); }
private async Task <string> DetectEmotionWithWinML() { var videoFrame = lastFrame; if (faceDetector == null) { faceDetector = await FaceDetector.CreateAsync(); } var detectedFaces = await faceDetector.DetectFacesAsync(videoFrame.SoftwareBitmap); if (detectedFaces != null && detectedFaces.Any()) { var face = detectedFaces.OrderByDescending(s => s.FaceBox.Height * s.FaceBox.Width).First(); using (var randomAccessStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, randomAccessStream); var softwareBitmap = SoftwareBitmap.Convert(videoFrame.SoftwareBitmap, BitmapPixelFormat.Rgba16); Debug.WriteLine(softwareBitmap.BitmapPixelFormat); encoder.SetSoftwareBitmap(softwareBitmap); encoder.BitmapTransform.Bounds = new BitmapBounds { X = face.FaceBox.X, Y = face.FaceBox.Y, Width = face.FaceBox.Width, Height = face.FaceBox.Height }; await encoder.FlushAsync(); var decoder = await BitmapDecoder.CreateAsync(randomAccessStream); var croppedImage = await decoder.GetSoftwareBitmapAsync(softwareBitmap.BitmapPixelFormat, softwareBitmap.BitmapAlphaMode); videoFrame = VideoFrame.CreateWithSoftwareBitmap(croppedImage); } } var input = ImageFeatureValue.CreateFromVideoFrame(videoFrame); var emotion = await model.EvaluateAsync(new FER_Emotion_RecognitionInput() { Input3 = input }); var list = new List <float>(emotion.Plus692_Output_0.GetAsVectorView()); var index = list.IndexOf(list.Max()); var label = labels[index]; return(label); }
public async Task RegisterFace(Uri uri, string name) { if (faceDetector == null) { faceDetector = await FaceDetector.CreateAsync(); } var stream = await http.GetStreamAsync(uri); var memStream = new MemoryStream(); await stream.CopyToAsync(memStream); memStream.Position = 0; var decoder = await BitmapDecoder.CreateAsync(memStream.AsRandomAccessStream()); var bitmap = await decoder.GetSoftwareBitmapAsync(); /* * var faceBounds = await FindFace(bitmap); * if(!faceBounds.HasValue) { * System.Diagnostics.Debug.WriteLine("More than or less than one face found in training image"); * return; * } */ // https://forums.xamarin.com/discussion/63447/cannot-crop-or-resize-images-on-windows-universal-uwp var dstStream = new InMemoryRandomAccessStream(); //var encoder = await BitmapEncoder.CreateForTranscodingAsync(dstStream, decoder); var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, dstStream); //encoder.BitmapTransform.Bounds = faceBounds.Value; encoder.SetSoftwareBitmap(bitmap); await encoder.FlushAsync(); dstStream.Seek(0); var imgBuffer = new byte[dstStream.Size]; await dstStream.ReadAsync(imgBuffer.AsBuffer(), (uint)dstStream.Size, InputStreamOptions.None); /* * // uncomment to dump files to disk for debug * var file = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFileAsync($"test-{name}.jpg", Windows.Storage.CreationCollisionOption.ReplaceExisting); * using(var outStream = await file.OpenStreamForWriteAsync()) { * await outStream.WriteAsync(imgBuffer, 0, imgBuffer.Length); * } */ server.SaveFace(imgBuffer, name, true, null); }
private static async Task <UnityContainer> InitializeDiContainer() { UnityContainer container = new UnityContainer(); container.RegisterType <IConfiguration, StaticConfiguration>(); container.RegisterType <IDeviceConfiguration, DeviceConfiguration>(); container.RegisterInstance(typeof(EasClientDeviceInformation), new EasClientDeviceInformation()); container.RegisterType <IImagePersiter, AzureImagePersister>(); container.RegisterType <IImageFilter, LocalFaceDetector>(); container.RegisterInstance(typeof(FaceDetector), FaceDetector.IsSupported ? await FaceDetector.CreateAsync() : null); return(container); }
public async Task <IList <DetectedFace> > DetectFaces(Stream fileStream) { var stream = fileStream.AsRandomAccessStream(); var bitmapDecoder = await BitmapDecoder.CreateAsync(stream); using SoftwareBitmap bitmap = await bitmapDecoder.GetSoftwareBitmapAsync(); var bmp = FaceDetector.IsBitmapPixelFormatSupported(bitmap.BitmapPixelFormat) ? bitmap : SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Gray8); var faceDetector = await FaceDetector.CreateAsync(); var detectedFaces = await faceDetector.DetectFacesAsync(bmp); return(detectedFaces); }
async Task ProcessVideoFrame(SoftwareBitmap bitmap) { if (this.faceDetector == null) { this.faceDetector = await FaceDetector.CreateAsync(); } var results = await this.faceDetector.DetectFacesAsync(bitmap); var showVideo = results?.Count > 0; //? = DIFFERENT THAN NULL this.myCameraControl.ShowCamera(true); if (showVideo) { this.myCameraControl.HighlightFace(results[0].FaceBox); } }
private async void DetectFaces() { if (file != null) { // Open the image file and decode the bitmap into memory. // We'll need to make 2 bitmap copies: one for the FaceDetector and another to display. using (IRandomAccessStream fileStream = await file.OpenAsync(FileAccessMode.Read)) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(fileStream); BitmapTransform transform = this.ComputeScalingTransformForSourceImage(decoder); using (SoftwareBitmap originalBitmap = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Ignore, transform, ExifOrientationMode.IgnoreExifOrientation, ColorManagementMode.DoNotColorManage)) { // We need to convert the image into a format that's compatible with FaceDetector. // Gray8 should be a good type but verify it against FaceDetector’s supported formats. const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Gray8; if (FaceDetector.IsBitmapPixelFormatSupported(InputPixelFormat)) { using (detectorInput = SoftwareBitmap.Convert(originalBitmap, InputPixelFormat)) { // Create a WritableBitmap for our visualization display; copy the original bitmap pixels to wb's buffer. displaySource = new WriteableBitmap(originalBitmap.PixelWidth, originalBitmap.PixelHeight); originalBitmap.CopyToBuffer(displaySource.PixelBuffer); NotifyUser("Detecting...", NotifyType.StatusMessage); // Initialize our FaceDetector and execute it against our input image. // NOTE: FaceDetector initialization can take a long time, and in most cases // you should create a member variable and reuse the object. // However, for simplicity in this scenario we instantiate a new instance each time. FaceDetector detector = await FaceDetector.CreateAsync(); faces = await detector.DetectFacesAsync(detectorInput); // Create our display using the available image and face results. DrawDetectedFaces(displaySource, faces); } } else { NotifyUser("PixelFormat '" + InputPixelFormat.ToString() + "' is not supported by FaceDetector", NotifyType.ErrorMessage); } } } } }
async Task OnProcessFrameAsync(SoftwareBitmap bitmap) { if (this.faceDetector == null) { this.faceDetector = await FaceDetector.CreateAsync(); } var faces = await this.faceDetector.DetectFacesAsync(bitmap); this.cameraDisplay.ShowCamera(faces.Count > 0); if (faces.Count > 0) { foreach (var face in faces) { this.cameraDisplay.HighlightFace(face.FaceBox); } } }
/// <summary> /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element. /// </summary> /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns> private async Task <bool> StartWebcamStreamingAsync() { bool successful = false; this.faceDetector = await FaceDetector.CreateAsync(); try { this.mediaCapture = new MediaCapture(); // For this scenario, we only need Video (not microphone) so specify this in the initializer. // NOTE: the appxmanifest only declares "webcam" under capabilities and if this is changed to include // microphone (default constructor) you must add "microphone" to the manifest or initialization will fail. MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.StreamingCaptureMode = StreamingCaptureMode.Video; await this.mediaCapture.InitializeAsync(settings); this.mediaCapture.CameraStreamStateChanged += this.MediaCapture_CameraStreamStateChanged; // Cache the media properties as we'll need them later. var deviceController = this.mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; // Immediately start streaming to our CaptureElement UI. // NOTE: CaptureElement's Source must be set before streaming is started. this.CamPreview.Source = this.mediaCapture; await this.mediaCapture.StartPreviewAsync(); successful = true; } catch (System.UnauthorizedAccessException) { // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact. this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage); } catch (Exception ex) { this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage); } return(successful); }
public async Task <int> DetectFacesAsync(byte[] photoByteArray) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(photoByteArray.ToRandomAccessMemory()); BitmapTransform transform = new BitmapTransform(); const float sourceImageHeightLimit = 1280; if (decoder.PixelHeight > sourceImageHeightLimit) { float scalingFactor = (float)sourceImageHeightLimit / (float)decoder.PixelHeight; transform.ScaledWidth = (uint)Math.Floor(decoder.PixelWidth * scalingFactor); transform.ScaledHeight = (uint)Math.Floor(decoder.PixelHeight * scalingFactor); } SoftwareBitmap sourceBitmap = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied, transform, ExifOrientationMode.IgnoreExifOrientation, ColorManagementMode.DoNotColorManage); SoftwareBitmap convertedBitmap = sourceBitmap; if (sourceBitmap.BitmapPixelFormat != faceDetectionPixelFormat) { convertedBitmap = SoftwareBitmap.Convert(sourceBitmap, faceDetectionPixelFormat); } FaceDetector detector = await FaceDetector.CreateAsync(); IList <DetectedFace> faces = null; faces = await detector.DetectFacesAsync(convertedBitmap); /* ICollection<System.Drawing.Rectangle> rectangles = new List<System.Drawing.Rectangle>(); * * foreach(DetectedFace face in faces) * rectangles.Add(new System.Drawing.Rectangle(Convert.ToInt32(face.FaceBox.X), Convert.ToInt32(face.FaceBox.Y), Convert.ToInt32(face.FaceBox.Width), Convert.ToInt32(face.FaceBox.Height))); */ sourceBitmap.Dispose(); convertedBitmap.Dispose(); return(faces.Count); }
/// <summary> /// Creates and initializes a FaceSentimentAnalyzerSkill instance /// </summary> /// <param name="descriptor"></param> /// <param name="device"></param> /// <returns></returns> internal static IAsyncOperation <FaceSentimentAnalyzerSkill> CreateAsync( ISkillDescriptor descriptor, ISkillExecutionDevice device) { return(AsyncInfo.Run(async(token) => { // Create instance var skillInstance = new FaceSentimentAnalyzerSkill(descriptor, device); // Instantiate the FaceDetector skillInstance.m_faceDetector = await FaceDetector.CreateAsync(); // Load WinML model var modelFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Contoso.FaceSentimentAnalyzer/{FaceSentimentAnalyzerConst.WINML_MODEL_FILENAME}")); var winmlModel = LearningModel.LoadFromFilePath(modelFile.Path); // Create WinML session skillInstance.m_winmlSession = new LearningModelSession(winmlModel, GetWinMLDevice(device)); return skillInstance; })); }
public async Task <Rect?> ProcessCameraFrameAsync(SoftwareBitmap bitmap) { if (this.faceDetector == null) { this.faceDetector = await FaceDetector.CreateAsync(); } var result = await this.faceDetector.DetectFacesAsync(bitmap); this.photoControl.Switch(result?.Count > 0); Rect?returnValue = null; if (result?.Count > 0) { returnValue = new Rect( (double)result[0].FaceBox.X / bitmap.PixelWidth, (double)result[0].FaceBox.Y / bitmap.PixelHeight, (double)result[0].FaceBox.Width / bitmap.PixelWidth, (double)result[0].FaceBox.Height / bitmap.PixelHeight); } return(returnValue); }