/// <summary> /// Triggered after the page has loaded /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void Page_Loaded(object sender, RoutedEventArgs e) { // Initialize helper class used to render the skill results on screen m_faceSentimentRenderer = new FaceSentimentRenderer(UICanvasOverlay, UISentiment); try { // Instatiate skill descriptor to display details about the skill and populate UI m_skillDescriptor = new FaceSentimentAnalyzerDescriptor(); m_availableExecutionDevices = await m_skillDescriptor.GetSupportedExecutionDevicesAsync(); // Show skill description members in UI UISkillName.Text = m_skillDescriptor.Name; UISkillDescription.Text = SkillHelper.SkillHelperMethods.GetSkillDescriptorString(m_skillDescriptor); int featureIndex = 0; foreach (var featureDesc in m_skillDescriptor.InputFeatureDescriptors) { UISkillInputDescription.Text += SkillHelper.SkillHelperMethods.GetSkillFeatureDescriptorString(featureDesc); if (featureIndex++ < m_skillDescriptor.InputFeatureDescriptors.Count - 1) { UISkillInputDescription.Text += "\n----\n"; } } featureIndex = 0; foreach (var featureDesc in m_skillDescriptor.OutputFeatureDescriptors) { UISkillOutputDescription.Text += SkillHelper.SkillHelperMethods.GetSkillFeatureDescriptorString(featureDesc); if (featureIndex++ < m_skillDescriptor.OutputFeatureDescriptors.Count - 1) { UISkillOutputDescription.Text += "\n----\n"; } } if (m_availableExecutionDevices.Count == 0) { UISkillOutputDetails.Text = "No execution devices available, this skill cannot run on this device"; } else { // Display available execution devices UISkillExecutionDevices.ItemsSource = m_availableExecutionDevices.Select((device) => device.Name); UISkillExecutionDevices.SelectedIndex = 0; // Alow user to interact with the app UIButtonFilePick.IsEnabled = true; UICameraToggle.IsEnabled = true; UIButtonFilePick.Focus(FocusState.Keyboard); } } catch (Exception ex) { await new MessageDialog(ex.Message).ShowAsync(); } // Register callback for if camera preview encounters an issue UICameraPreview.PreviewFailed += UICameraPreview_PreviewFailed; }
/// <summary> /// Entry point of program /// </summary> /// <param name="args"></param> /// <returns></returns> static void Main(string[] args) { Console.WriteLine("Face Sentiment Analyzer .NetCore 3.0 Console App: Please face your camera"); Task.Run(async() => { try { var sceneClassifierSkilldesc = new FaceSentimentAnalyzerDescriptor(); m_skill = await sceneClassifierSkilldesc.CreateSkillAsync() as FaceSentimentAnalyzerSkill; var skillDevice = m_skill.Device; Console.WriteLine("Running Skill on : " + skillDevice.ExecutionDeviceKind.ToString() + ": " + skillDevice.Name); m_binding = await m_skill.CreateSkillBindingAsync() as FaceSentimentAnalyzerBinding; await StartMediaCaptureAsync(); } catch (Exception e) { Console.WriteLine("Error:: " + e.Message.ToString() + e.TargetSite.ToString() + e.Source.ToString() + e.StackTrace.ToString()); Environment.Exit(e.HResult); } }).Wait(); Console.WriteLine("\nPress Any key to stop\n"); var key = Console.ReadKey(); }
/// <summary> /// Triggered after the page has loaded /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void Page_Loaded(object sender, RoutedEventArgs e) { // Initialize helper class used to render the skill results on screen m_faceSentimentRenderer = new FaceSentimentRenderer(UICanvasOverlay, UISentiment); try { // Instatiate skill descriptor to display details about the skill and populate UI m_skillDescriptor = new FaceSentimentAnalyzerDescriptor(); m_availableExecutionDevices = await m_skillDescriptor.GetSupportedExecutionDevicesAsync(); // Show skill description members in UI UISkillName.Text = m_skillDescriptor.Name; UISkillDescription.Text = $"{m_skillDescriptor.Description}" + $"\n\tauthored by: {m_skillDescriptor.Version.Author}" + $"\n\tpublished by: {m_skillDescriptor.Version.Author}" + $"\n\tversion: {m_skillDescriptor.Version.Major}.{m_skillDescriptor.Version.Minor}" + $"\n\tunique ID: {m_skillDescriptor.Id}"; var inputDesc = m_skillDescriptor.InputFeatureDescriptors[0] as SkillFeatureImageDescriptor; UISkillInputDescription.Text = $"\tName: {inputDesc.Name}" + $"\n\tDescription: {inputDesc.Description}" + $"\n\tType: {inputDesc.FeatureKind}" + $"\n\tWidth: {inputDesc.Width}" + $"\n\tHeight: {inputDesc.Height}" + $"\n\tSupportedBitmapPixelFormat: {inputDesc.SupportedBitmapPixelFormat}" + $"\n\tSupportedBitmapAlphaMode: {inputDesc.SupportedBitmapAlphaMode}"; var outputDesc1 = m_skillDescriptor.OutputFeatureDescriptors[0] as SkillFeatureTensorDescriptor; UISkillOutputDescription1.Text = $"\tName: {outputDesc1.Name}, Description: {outputDesc1.Description} \n\tType: {outputDesc1.FeatureKind} of {outputDesc1.ElementKind} with shape [{outputDesc1.Shape.Select(i => i.ToString()).Aggregate((a, b) => a + ", " + b)}]"; var outputDesc2 = m_skillDescriptor.OutputFeatureDescriptors[1] as SkillFeatureTensorDescriptor; UISkillOutputDescription2.Text = $"\tName: {outputDesc2.Name} \n\tDescription: {outputDesc2.Description} \n\tType: {outputDesc2.FeatureKind} of {outputDesc2.ElementKind} with shape [{outputDesc2.Shape.Select(i => i.ToString()).Aggregate((a, b) => a + ", " + b)}]"; if (m_availableExecutionDevices.Count == 0) { UISkillOutputDetails.Text = "No execution devices available, this skill cannot run on this device"; } else { // Display available execution devices UISkillExecutionDevices.ItemsSource = m_availableExecutionDevices.Select((device) => device.Name); UISkillExecutionDevices.SelectedIndex = 0; // Alow user to interact with the app UIButtonFilePick.IsEnabled = true; UICameraToggle.IsEnabled = true; UIButtonFilePick.Focus(FocusState.Keyboard); } } catch (Exception ex) { await new MessageDialog(ex.Message).ShowAsync(); } // Register callback for if camera preview encounters an issue UICameraPreview.PreviewFailed += UICameraPreview_PreviewFailed; }