public FaceTrackerProxy(Canvas canvas, MainPage page, CaptureElement capture, MediaCapture mediacapture) { if (this.faceTracker == null) { this.faceTracker = FaceTracker.CreateAsync().AsTask().Result; } rootPage = page; VisualizationCanvas = canvas; this.VisualizationCanvas.Children.Clear(); mediaCapture = mediacapture; var deviceController = mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; currentState = ScenarioState.Streaming; // Ensure the Semaphore is in the signalled state. this.frameProcessingSemaphore.Release(); // Use a 66 milisecond interval for our timer, i.e. 15 frames per second TimeSpan timerInterval = TimeSpan.FromMilliseconds(200); this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); }
public async Task <int> InitFacialeRecon() { this.faceTracker = await FaceTracker.CreateAsync(); Debug.WriteLine("Face tracker initializated !"); // Parallel.Invoke(() => InitVideoProcessorWork(), () => ProcessingWork()); //await InitVideoProcessorWork(); this.videoProcessor = await VideoProcessor.CreateAsync(); Task.Run(() => ProcessingWork()); /* * Task videoprocessorTask = new Task(async delegate () * { * this.videoProcessor = await VideoProcessor.CreateAsync(); * Debug.WriteLine("\t --> Video Processor Created !"); * }); * videoprocessorTask.Start();*/ //TimeSpan timerInterval = TimeSpan.FromMilliseconds(132); // 15 fps //this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading..TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); // ThreadStart delegateProcess = new ThreadStart(); //Task backgroundTracking = new Task(delegate() { ProcessCurrentVideoFrame(); }); //backgroundTracking /*this._isRunning = true; * Task backgroundTask = new Task(async delegate() * { * /* * if (videoProcessor != null) * { * TimeSpan timerInterval = TimeSpan.FromMilliseconds(132); // 15 fps * this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); * } * else * { * Debug.WriteLine("\t\t video processor = null"); * }*/ /* * while (_isRunning) * { * if (videoProcessor != null) * { * if (videoProcessor.IsStarted()) * { * Debug.WriteLine("Calling frame processing"); * await ProcessCurrentVideoFrame(); * } * else * { * Debug.WriteLine("-->video processor not started yet"); * } * } * } * * }); * backgroundTask.Start(); */ return(0); }
public async static Task <FaceTrackerProcessor> CreateAsync(VideoProcessor processor) { Debug.WriteLine("FaceTRackerProcessor.CreateAsync() called !"); FaceTracker tracker = await FaceTracker.CreateAsync(); return(new FaceTrackerProcessor(tracker, processor)); }
async void OnLoaded(object sender, RoutedEventArgs e) { _messageLabel.Text = "Hello"; _faceTracker = await FaceTracker.CreateAsync(); _speechEngine.PhraseRecognized += OnspeechEnginePhraseRecognized; _speechEngine.StateChanged += OnSpeechEngineStateChanged; await _speechEngine.StartContinuousRecognitionAsync(); //await Task.WhenAll(//ChangeStreamStateAsync(true));, // InitializeSpeechRecognizerAsync()); //var iPod = await Bluetooth.PairAsync(); //if (iPod != null) //{ // var device = await Bluetooth.FromIdAsync(iPod.Id); // if (device != null) // { // } //} }
protected override async void OnNavigatedTo(NavigationEventArgs e) { if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } }
protected override async void OnNavigatedTo(NavigationEventArgs e) { _state = StreamingState.Idle; if (_faceTracker == null) { _faceTracker = await FaceTracker.CreateAsync(); } if (_faceApiHelper == null) { try { _faceApiHelper = new FaceApiHelper(); _eventHubHelper = new EventHubHelper(); //用不到 //await _faceApiHelper.CheckGroupExistAsync(); } catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx) { ShowErrorHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode); } catch (Microsoft.Azure.EventHubs.EventHubsException eventhubEx) { ShowErrorHelper.ShowDialog(eventhubEx.Message); } catch (Exception ex) { ShowErrorHelper.ShowDialog(ex.Message); } } }
private async void facetrack() { if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } }
public async Task StartStreamAsync(bool isForRealTimeProcessing = false) { try { if (captureManager == null || captureManager.CameraStreamState == CameraStreamState.Shutdown || captureManager.CameraStreamState == CameraStreamState.NotStreaming) { if (captureManager != null) { captureManager.Dispose(); } captureManager = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); var allCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var selectedCamera = allCameras.FirstOrDefault(c => c.Name == SettingsHelper.Instance.CameraName); if (selectedCamera != null) { settings.VideoDeviceId = selectedCamera.Id; } await captureManager.InitializeAsync(settings); await SetVideoEncodingToHighestResolution(isForRealTimeProcessing); this.webCamCaptureElement.Source = captureManager; } if (captureManager.CameraStreamState == CameraStreamState.NotStreaming) { if (PerformFaceTracking || CameraFrameProcessor != null) { if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } if (this.frameProcessingTimer != null) { this.frameProcessingTimer.Cancel(); frameProcessingSemaphore.Release(); } TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); //15fps this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); } this.videoProperties = this.captureManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; await captureManager.StartPreviewAsync(); this.cameraControlSymbol.Symbol = Symbol.Camera; this.webCamCaptureElement.Visibility = Visibility.Visible; } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Error starting the camera."); } }
private async Task InitializeFaceDetection() { if (FaceDetector.IsSupported) { if (_faceDetector == null) { _faceDetector = await FaceDetector.CreateAsync(); _faceDectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Face detection is not supported"); } if (FaceTracker.IsSupported) { if (_faceTracker == null) { _faceTracker = await FaceTracker.CreateAsync(); _faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Face tracking is not suppoted"); } }
private async Task InitializeFaceDetection() { if (FaceDetector.IsSupported) { if (faceDetector == null) { faceDetector = await FaceDetector.CreateAsync(); faceDetectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Warning. FaceDetector is not supported on this device"); } if (FaceTracker.IsSupported) { if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault(); } } else { Debug.WriteLine("Warning. FaceTracking is not supported on this device"); } }
public async Task AttachAsync(ICameraService camera) { _camera = camera; _faceTracker = await FaceTracker.CreateAsync(); _frameProcessingTimer = new Timer((a) => { ProcessCurrentVideoFrame(); }); _frameProcessingTimer.Change(0, 300); }
// initial camera capture private async void InitializeCapture() { await mediaCapture.InitializeAsync(); if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } }
/// <summary> /// Responds when we navigate to this page. /// </summary> /// <param name="e">Event data</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { // The 'await' operation can only be used from within an async method but class constructors // cannot be labeled as async, and so we'll initialize FaceTracker here. if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void MainPage_Loaded(object sender, RoutedEventArgs e) { if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); } //カメラの初期化 await InitCameraAsync(); }
//</SnippetClassVariables3> public async void TrackFaces() { //<SnippetTrackingInit> this.faceTracker = await FaceTracker.CreateAsync(); TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); // 15 fps this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); //</SnippetTrackingInit> }
public static async Task <FaceMatrix> CreateAsync(MediaCapture mediaCapture, int rowsCount, int columnsCount) { var faceTracker = await FaceTracker.CreateAsync(); var faceMatrix = new FaceMatrix(faceTracker, mediaCapture, rowsCount, columnsCount); faceMatrix.StartRecognitionLoop(); return(faceMatrix); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { base.OnNavigatedTo(e); if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } await StartWebcamStreaming(); }
/// <summary> /// Triggered every time the page is navigated to. /// </summary> protected override async void OnNavigatedTo(NavigationEventArgs e) { if (this.initializedFaceApi) { UpdateWhitelistedVisitors(); } if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } }
private async void InitializeFaceTracking() { if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); var timerInterval = TimeSpan.FromMilliseconds(1000 / framesPerSecond); // gets us seconds/frame frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessVideoFrame), timerInterval); UpdateStatus("Face detection initiated..."); } }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void Page_Loaded(object sender, RoutedEventArgs e) { //Modelの読み込み await LoadModelAsysnc(); //FaceTrackerオブジェクトの作成 if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); } //カメラの初期化 await InitCameraAsync(); }
/// <summary> /// Responds when we navigate to this page. /// </summary> /// <param name="e">Event data</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { //get device settings here await UpdateDeviceSettings(); this.vp = await Infrastructure.VoicePackageService.VoicePlayerFactory(); // VerbaliseSystemInformation if (Settings.GetBool(DeviceSettingKeys.VerbaliseSystemInformationOnBootKey)) { LogStatusMessage($"The IP address is: {GetLocalIp()}", StatusSeverity.Info, true); LogStatusMessage($"The exhibit is {Settings.GetString(DeviceSettingKeys.DeviceExhibitKey)}", StatusSeverity.Info, true); LogStatusMessage($"The device label is {Settings.GetString(DeviceSettingKeys.DeviceLabelKey)}", StatusSeverity.Info, true); } // Only check microphone enabled and create speech objects if we're running in interactive (QnA) mode if (Settings.GetBool(DeviceSettingKeys.InteractiveKey)) { // Prompt for permission to access the microphone. This request will only happen // once, it will not re-prompt if the user rejects the permission. if (!await AudioCapturePermissions.RequestMicrophonePermission()) { Say(AppSettings.GetString("MicrophonePrivacyDeclined")); } else { try { Debug.WriteLine($"Initialising speech recognizer"); //This can fail randomly SpeechRecognizer = new SpeechRecognizer(); SpeechRecognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromMilliseconds(NumberMilliSecsForSpeechRecognitionTimeout); await SpeechRecognizer.CompileConstraintsAsync(); Debug.WriteLine($"Speech recognizer initialised"); } catch (Exception exp) { Say($"There was an error initialising the speech recognizer: {exp.Message}"); } } } if (faceTracker == null) { faceTracker = await FaceTracker.CreateAsync(); ChangeDetectionState(DetectionStates.Startup); } }
public static async Task <FaceTrackerProcessor> CreateAsync(VideoFrameProcessor videoFrameProcessor) { FaceTracker tracker = await FaceTracker.CreateAsync(); tracker.MinDetectableFaceSize = new BitmapSize() { Width = 32, Height = 32 }; tracker.MaxDetectableFaceSize = new BitmapSize() { Width = 1024, Height = 1024 }; return(new FaceTrackerProcessor(tracker, videoFrameProcessor)); }
private async void DetectButton_Click(object sender, RoutedEventArgs e) { if (mediaCapture == null) { return; } if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); // 15 fps this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer (new TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); }
/// <summary> /// Creates the FaceTracker object which we will use for face detection and tracking. /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element. /// </summary> /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns> private async Task <bool> StartWebcamStreamingAsync() { bool successful = false; faceTracker = await FaceTracker.CreateAsync(); try { this.mediaCapture = new MediaCapture(); // For this scenario, we only need Video (not microphone) so specify this in the initializer. // NOTE: the appxmanifest only declares "webcam" under capabilities and if this is changed to include // microphone (default constructor) you must add "microphone" to the manifest or initialization will fail. MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.StreamingCaptureMode = StreamingCaptureMode.Video; await this.mediaCapture.InitializeAsync(settings); this.mediaCapture.Failed += this.MediaCapture_CameraStreamFailed; // Cache the media properties as we'll need them later. var deviceController = this.mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; // Immediately start streaming to our CaptureElement UI. // NOTE: CaptureElement's Source must be set before streaming is started. this.CamPreview.Source = this.mediaCapture; await this.mediaCapture.StartPreviewAsync(); // Run the timer at 66ms, which is approximately 15 frames per second. TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); this.frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(ProcessCurrentVideoFrame, timerInterval); successful = true; } catch (System.UnauthorizedAccessException) { // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact. this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage); } catch (Exception ex) { this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage); } return(successful); }
public async Task Initialize(string cameraName = "LifeCam") { // select the camera var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var device = devices.FirstOrDefault(d => d.Name.ToLowerInvariant().Contains(cameraName.ToLower())) ?? devices.FirstOrDefault(); var settings = new MediaCaptureInitializationSettings() { VideoDeviceId = device.Id }; // initialize the camera mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync(settings); // select a lower framerate and resolution to reduce USB bandwidth var props = mediaCapture .VideoDeviceController .GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview) .Cast <VideoEncodingProperties>() .First(p => p.FrameRate.Numerator == 10 && p.Height == 720); await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, props); // start the preview feed (a CaptureElement is required to sync the feed) captureElement = new CaptureElement() { Source = mediaCapture }; await mediaCapture.StartPreviewAsync(); // get the video properties var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; ImageHeight = (int)previewProperties.Height; ImageWidth = (int)previewProperties.Width; // intialize face tracking faceTracker = await FaceTracker.CreateAsync(); // Get the known persons var persons = await faceClient.GetPersonsAsync(personGroupId); personMap = persons.ToDictionary(p => p.PersonId, p => p.Name); }
private async Task StartPreviewAsync() { try { _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(); _displayRequest.RequestActive(); DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape; } catch (UnauthorizedAccessException) { // This will be thrown if the user denied access to the camera in privacy settings //("The app was denied access to the camera"); return; } try { PreviewControl.Source = _mediaCapture; await _mediaCapture.StartPreviewAsync(); _isPreviewing = true; } catch (System.IO.FileLoadException) { _mediaCapture.CaptureDeviceExclusiveControlStatusChanged += _mediaCapture_CaptureDeviceExclusiveControlStatusChanged; } //initialize UI facetracking this.faceTracker = await FaceTracker.CreateAsync(); TimeSpan UIUpdateTimer = TimeSpan.FromMilliseconds(66); // 15 fps this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), UIUpdateTimer); //initialize Remote emotiog Detection TimeSpan EmotionUpdateTimer = TimeSpan.FromMilliseconds(2000); // every 2 seconds this.EmotionProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(AnalyzeEmotion), EmotionUpdateTimer); }
private async Task InitFacialeRecon() { // Creates the Face tracker object this.faceTracker = await FaceTracker.CreateAsync(); // Set the frame rate TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); // 15 fps Debug.WriteLine("Face tracker initializating"); this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); Debug.WriteLine("Face tracker initializated !"); // Gets the video properties var deviceController = this._mediaCapture.VideoDeviceController; this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; // Process frames with the tracker ProcessCurrentVideoFrame(frameProcessingTimer); }
private async void InitializeFacialRecognition() { if (m_faceTracker == null) { m_faceTracker = await FaceTracker.CreateAsync(); } m_mediaCapture = new MediaCapture(); MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings(); settings.StreamingCaptureMode = StreamingCaptureMode.Video; await m_mediaCapture.InitializeAsync(settings); VideoDeviceController deviceController = m_mediaCapture.VideoDeviceController; m_videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; await m_mediaCapture.StartPreviewAsync(); TimeSpan timerInterval = TimeSpan.FromMilliseconds(66); m_frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval); }
private async Task <bool> StartWebcamStreamingAsync() { _faceTracker = await FaceTracker.CreateAsync(); try { _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { StreamingCaptureMode = StreamingCaptureMode.Video }); _mediaCapture.Failed += (s, a) => AbandonStreaming(); var deviceController = _mediaCapture.VideoDeviceController; _videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; CameraPreview.Source = _mediaCapture; await _mediaCapture.StartPreviewAsync(); var timerInterval = TimeSpan.FromMilliseconds(66); // 66ms, aprox 15 fps _frameProcessingTimer = ThreadPoolTimer.CreatePeriodicTimer(ProcessCurrentVideoFrame, timerInterval); return(true); } catch (UnauthorizedAccessException) { NavigateToPermissionsPage(); return(false); } catch (Exception exception) { await DisplayMessage($"Error al iniciar el stream de la cámara: {exception.Message}"); return(false); } }
/// <summary> /// Responds when we navigate to this page. /// </summary> /// <param name="e">Event data</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { if (ApplicationData.Current.LocalSettings.Values.ContainsKey("ApiKey")) { ApiKey.Text = ApplicationData.Current.LocalSettings.Values["ApiKey"].ToString(); } if (ApplicationData.Current.LocalSettings.Values.ContainsKey("ApiEndPoint")) { ApiEndPoint.Text = ApplicationData.Current.LocalSettings.Values["ApiEndPoint"].ToString(); } if (this.faceTracker == null) { this.faceTracker = await FaceTracker.CreateAsync(); } if (this.faceClient == null) { this.faceClient = new FaceClient( new ApiKeyServiceClientCredentials(ApiKey.Text), new System.Net.Http.DelegatingHandler[] { }) { Endpoint = ApiEndPoint.Text }; } if (captureFolder == null) { var picturesLibrary = await StorageLibrary.GetLibraryAsync(KnownLibraryId.Pictures); // Fall back to the local app storage if the Pictures Library is not available captureFolder = picturesLibrary.SaveFolder ?? ApplicationData.Current.LocalFolder; } await this.StartWebcamStreaming(); }