Beispiel #1
0
        /// <summary>
        /// Entry point of program
        /// </summary>
        /// <param name="args"></param>
        /// <returns></returns>
        static void Main(string[] args)
        {
            Console.WriteLine("Face Sentiment Analyzer .NetCore 3.0 Console App: Please face your camera");

            Task.Run(async() =>
            {
                try
                {
                    var sceneClassifierSkilldesc = new  FaceSentimentAnalyzerDescriptor();
                    m_skill         = await sceneClassifierSkilldesc.CreateSkillAsync() as FaceSentimentAnalyzerSkill;
                    var skillDevice = m_skill.Device;
                    Console.WriteLine("Running Skill on : " + skillDevice.ExecutionDeviceKind.ToString() + ": " + skillDevice.Name);

                    m_binding = await m_skill.CreateSkillBindingAsync() as FaceSentimentAnalyzerBinding;
                    await StartMediaCaptureAsync();
                }
                catch (Exception e)
                {
                    Console.WriteLine("Error:: " + e.Message.ToString() + e.TargetSite.ToString() + e.Source.ToString() + e.StackTrace.ToString());
                    Environment.Exit(e.HResult);
                }
            }).Wait();

            Console.WriteLine("\nPress Any key to stop\n");

            var key = Console.ReadKey();
        }
Beispiel #2
0
        private async void UIButtonFilePick_Click(object sender, RoutedEventArgs e)
        {
            // Stop Camera preview
            UICameraPreview.Stop();
            if (UICameraPreview.CameraHelper != null)
            {
                await UICameraPreview.CameraHelper.CleanUpAsync();
            }
            UICameraPreview.Visibility = Visibility.Collapsed;
            UIImageViewer.Visibility   = Visibility.Visible;

            // Disable subsequent trigger of this event callback
            UICameraToggle.IsEnabled   = false;
            UIButtonFilePick.IsEnabled = false;

            await m_lock.WaitAsync();

            try
            {
                // Initialize skill with the selected supported device
                m_skill = await m_skillDescriptor.CreateSkillAsync(m_availableExecutionDevices[UISkillExecutionDevices.SelectedIndex]) as FaceSentimentAnalyzerSkill;

                // Instantiate a binding object that will hold the skill's input and output resource
                m_binding = await m_skill.CreateSkillBindingAsync() as FaceSentimentAnalyzerBinding;

                var frame = await LoadVideoFrameFromFilePickedAsync();

                if (frame != null)
                {
                    await m_bitmapSource.SetBitmapAsync(frame.SoftwareBitmap);

                    UIImageViewer.Source = m_bitmapSource;

                    UIImageViewer_SizeChanged(null, null);

                    await RunSkillAsync(frame);
                }

                m_skill   = null;
                m_binding = null;

                m_currentFrameSourceToggled = FrameSourceToggledType.ImageFile;
            }
            catch (Exception ex)
            {
                await(new MessageDialog(ex.Message)).ShowAsync();
                m_currentFrameSourceToggled = FrameSourceToggledType.None;
            }

            m_lock.Release();

            // Enable subsequent trigger of this event callback
            UIButtonFilePick.IsEnabled = true;
            UICameraToggle.IsEnabled   = true;
        }
Beispiel #3
0
        private async void UICameraToggle_Click(object sender, RoutedEventArgs e)
        {
            await m_lock.WaitAsync();

            try
            {
                UICameraPreview.Stop();
                if (UICameraPreview.CameraHelper != null)
                {
                    await UICameraPreview.CameraHelper.CleanUpAsync();
                }
                m_isCameraFrameDimensionInitialized = false;

                // Initialize skill with the selected supported device
                m_skill = await m_skillDescriptor.CreateSkillAsync(m_availableExecutionDevices[UISkillExecutionDevices.SelectedIndex]) as FaceSentimentAnalyzerSkill;

                // Instantiate a binding object that will hold the skill's input and output resource
                m_binding = await m_skill.CreateSkillBindingAsync() as FaceSentimentAnalyzerBinding;

                // Initialize the CameraPreview control, register frame arrived event callback
                UIImageViewer.Visibility   = Visibility.Collapsed;
                UICameraPreview.Visibility = Visibility.Visible;
                await UICameraPreview.StartAsync();

                UICameraPreview.CameraHelper.FrameArrived += CameraHelper_FrameArrived;
                m_currentFrameSourceToggled = FrameSourceToggledType.Camera;
            }
            catch (Exception ex)
            {
                await(new MessageDialog(ex.Message)).ShowAsync();
                m_currentFrameSourceToggled = FrameSourceToggledType.None;
            }
            finally
            {
                m_lock.Release();
            }
        }