/// <summary>
        /// Update the view of the binding values
        /// </summary>
        /// <param name="binding"></param>
        private QuadDetectorBindingInputFeatureValues ExtractBindingValues(ISkillBinding binding)
        {
            QuadDetectorBindingInputFeatureValues result = new QuadDetectorBindingInputFeatureValues();

            result.SubMarginPercentage = (binding["SubRectangleMargin"].FeatureValue as SkillFeatureTensorIntValue).GetAsVectorView()[0];
            result.MaxDetectedQuads    = (binding["MaxDetectedQuads"].FeatureValue as SkillFeatureTensorIntValue).GetAsVectorView()[0];
            result.NumberOfEdgePixels  = (binding["NumberOfEdgePixels"].FeatureValue as SkillFeatureTensorIntValue).GetAsVectorView()[0];

            var baseQuadFeature      = binding["BaseQuad"].FeatureValue;
            var baseQuadFeatureValue = (baseQuadFeature as SkillFeatureTensorFloatValue).GetAsVectorView();

            for (int i = 0; i < baseQuadFeatureValue.Count; i += 2)
            {
                result.BaseQuad[i / 2] = new Point(baseQuadFeatureValue[i], baseQuadFeatureValue[i + 1]);
            }

            result.UseCenterPoint = (binding["UseCenterPoint"].FeatureValue as SkillFeatureTensorBooleanValue).GetAsVectorView()[0];

            var centerPointFeature = binding["CenterPoint"].FeatureValue;
            var centerPointTensor  = (centerPointFeature as SkillFeatureTensorFloatValue).GetAsVectorView();

            if (centerPointTensor.Count > 0)
            {
                result.CenterPointCoordinates.X = centerPointTensor[0];
                result.CenterPointCoordinates.Y = centerPointTensor[1];
            }

            return(result);
        }
Beispiel #2
0
 /// <summary>
 /// SkillControl factory to instantiate known derivatives
 /// </summary>
 /// <param name="binding"></param>
 /// <returns></returns>
 public static SkillControl CreateControl(ISkillBinding binding)
 {
     if (binding is QuadDetectorBinding)
     {
         return(new QuadDetectorControl(binding));
     }
     else if (binding is LiveQuadDetectorBinding)
     {
         return(new LiveQuadDetectorControl(binding));
     }
     else if (binding is ImageRectifierBinding)
     {
         return(new ImageRectifierControl(binding));
     }
     else if (binding is ImageCleanerBinding)
     {
         return(new ImageCleanerControl(binding));
     }
     else if (binding is CurvedEdgesDetectorBinding)
     {
         return(new CurvedEdgesDetectorControl(binding));
     }
     else if (binding is QuadEdgesDetectorBinding)
     {
         return(new QuadEdgesDetectorControl(binding));
     }
     else
     {
         throw new ArgumentException("Unexpected skill binding type specified");
     }
 }
Beispiel #3
0
        /// <summary>
        /// Evaluate the specified ISkillBinding and update the specified ResultItem with the outcome
        /// </summary>
        /// <param name="binding"></param>
        /// <param name="control"></param>
        /// <returns></returns>
        private async Task EvaluateBindingAsync(ISkillBinding binding, SkillControl control)
        {
            // Take a lock for using a skill if one is available, or wait if not
            m_evaluationLock.Wait();

            try
            {
                var baseTime = (float)m_perfWatch.ElapsedTicks / Stopwatch.Frequency * 1000f;

                // Evaluate binding
                await m_currentSkillWrapper.Skill.EvaluateAsync(binding);

                // Record evaluation time for display
                control.EvalTime = (float)m_perfWatch.ElapsedTicks / Stopwatch.Frequency * 1000f - baseTime;

                m_evaluationLock.Release();
            }
            catch (Exception ex)
            {
                NotifyUser(ex.Message, NotifyType.ErrorMessage);

                m_bindingLock.Release();

                m_evaluationLock.Release();
                return;
            }

            m_bindingLock.Release();
        }
Beispiel #4
0
        /// <summary>
        /// Update the view of the binding values
        /// </summary>
        /// <param name="binding"></param>
        private QuadEdgesDetectorBindingInputFeatureValues ExtractBindingValues(ISkillBinding binding)
        {
            QuadEdgesDetectorBindingInputFeatureValues result = new QuadEdgesDetectorBindingInputFeatureValues();

            result.MaxQuadEdges = (binding["MaxDetectedEdges"].FeatureValue as SkillFeatureTensorIntValue).GetAsVectorView()[0];

            return(result);
        }
        /// <summary>
        /// Update the view of the binding values
        /// </summary>
        /// <param name="binding"></param>
        private LiveQuadDetectorBindingInputFeatureValues ExtractBindingValues(ISkillBinding binding)
        {
            LiveQuadDetectorBindingInputFeatureValues result = new LiveQuadDetectorBindingInputFeatureValues();

            result.Reset = (binding["Reset"].FeatureValue as SkillFeatureTensorBooleanValue).GetAsVectorView()[0];

            return(result);
        }
        /// <summary>
        ///  ImageCleanerControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public ImageCleanerControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_ImageCleanerBindingFeatureValues = ExtractBindingValues(binding);
            m_interactiveControl = new ImageCleanerInteractiveControl();
            m_interactiveControl.ImageCleaningKindChanged += ImageCleanerInteractiveControl_ImageCleaningKindChanged;

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateSelectedImageCleaningKind(m_ImageCleanerBindingFeatureValues.InputImageType);
        }
        /// <summary>
        /// Update the view of the binding values
        /// </summary>
        /// <param name="binding"></param>
        private ImageCleanerBindingInputFeatureValues ExtractBindingValues(ISkillBinding binding)
        {
            ImageCleanerBindingInputFeatureValues result = new ImageCleanerBindingInputFeatureValues();

            var inputImageType             = binding["InputImageType"].FeatureValue;
            var inputImageTypeFeatureValue = (inputImageType as SkillFeatureTensorStringValue).GetAsVectorView();

            result.InputImageType = (ImageCleaningKind)Enum.GetNames(typeof(ImageCleaningKind)).ToList().IndexOf(inputImageTypeFeatureValue[0]);

            return(result);
        }
Beispiel #8
0
        /// <summary>
        /// SkillControl class constructor
        /// </summary>
        public SkillControl(ISkillBinding binding)
        {
            Orientation    = Orientation.Horizontal;
            m_skillBinding = binding;
            m_imageGrid.Children.Add(m_image);
            m_imageGrid.Children.Add(m_progressRing);

            Children.Add(new StackPanel()
            {
                Children = { m_imageGrid, m_runButton, m_perfTextBlock }
            });
            m_runButton.Click += RunButton_Click;
        }
        /// <summary>
        /// Evaluate input image, process (inference) then bind to output
        /// </summary>
        /// <param name="binding"></param>
        /// <returns></returns>
        public IAsyncAction EvaluateAsync(ISkillBinding binding)
        {
            NeuralStyleTransformerBinding bindingObj = binding as NeuralStyleTransformerBinding;

            if (bindingObj == null)
            {
                throw new ArgumentException("Invalid ISkillBinding parameter: This skill handles evaluation of NeuralStyleTransformerBinding instances only");
            }

            return(AsyncInfo.Run(async(token) =>
            {
                // Retrieve input frame from the binding object
                VideoFrame inputFrame = (binding[NeuralStyleTransformerConst.SKILL_INPUTNAME_IMAGE].FeatureValue as SkillFeatureImageValue).VideoFrame;
                SoftwareBitmap softwareBitmapInput = inputFrame.SoftwareBitmap;

                // Retrieve a SoftwareBitmap to run face detection
                if (softwareBitmapInput == null)
                {
                    if (inputFrame.Direct3DSurface == null)
                    {
                        throw (new ArgumentNullException("An invalid input frame has been bound"));
                    }
                    softwareBitmapInput = await SoftwareBitmap.CreateCopyFromSurfaceAsync(inputFrame.Direct3DSurface);
                }

                // Retrieve output image from model
                var transformedImage = binding[NeuralStyleTransformerConst.SKILL_OUTPUTNAME_IMAGE];

                // Bind the WinML input frame
                bindingObj.m_winmlBinding.Bind(
                    NeuralStyleTransformerConst.WINML_MODEL_INPUTNAME, // WinML feature name
                    inputFrame);
                ImageFeatureValue outputImageFeatureValue = ImageFeatureValue.CreateFromVideoFrame(_outputFrame);
                bindingObj.m_winmlBinding.Bind(NeuralStyleTransformerConst.WINML_MODEL_OUTPUTNAME, outputImageFeatureValue);
                // Run WinML evaluation
                var winMLEvaluationResult = await m_winmlSession.EvaluateAsync(bindingObj.m_winmlBinding, "0");
                // Parse result
                IReadOnlyDictionary <string, object> outputs = winMLEvaluationResult.Outputs;
                foreach (var output in outputs)
                {
                    Debug.WriteLine($"{output.Key} : {output.Value} -> {output.Value.GetType()}");
                }
                //set model output to skill output
                await transformedImage.SetFeatureValueAsync(_outputFrame);
            }));
        }
        /// <summary>
        ///  LiveQuadDetectorControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public LiveQuadDetectorControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_liveQuadDetectorBindingFeatureValues = ExtractBindingValues(binding);
            m_interactiveControl = new LiveQuadDetectorSkillInteractiveControl();
            m_interactiveControl.ResetCheckedUnchecked += LiveQuadDetectorSkillInteractiveControl_ResetCheckedUnchecked;

            m_imageGrid.Children.Add(m_canvas);
            m_image.SetValue(Canvas.ZIndexProperty, -1);
            m_image.SizeChanged += Image_SizeChanged;

            // Add Quad results control
            m_quadsResultRenderer           = new QuadSetRenderer(ref m_canvas, 1);
            m_quadsResultRenderer.IsVisible = false;

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateDisplayedInputValues(m_liveQuadDetectorBindingFeatureValues);
        }
        /// <summary>
        /// Update the view of the binding values
        /// </summary>
        /// <param name="binding"></param>
        private ImageRectifierBindingInputFeatureValues ExtractBindingValues(ISkillBinding binding)
        {
            ImageRectifierBindingInputFeatureValues result = new ImageRectifierBindingInputFeatureValues();

            var inputQuadFeature      = binding["InputQuad"].FeatureValue;
            var inputQuadFeatureValue = (inputQuadFeature as SkillFeatureTensorFloatValue).GetAsVectorView();

            for (int i = 0; i < inputQuadFeatureValue.Count; i += 2)
            {
                result.InputQuad[i / 2] = new Point(inputQuadFeatureValue[i], inputQuadFeatureValue[i + 1]);
            }
            var interpolationType             = binding["InterpolationType"].FeatureValue;
            var interpolationTypeFeatureValue = (interpolationType as SkillFeatureTensorStringValue).GetAsVectorView();

            result.InterpolationType = (ImageRectifierInterpolationKind)Enum.GetNames(typeof(ImageRectifierInterpolationKind)).ToList().IndexOf(interpolationTypeFeatureValue[0]);

            return(result);
        }
Beispiel #12
0
        /// <summary>
        ///  QuadEdgesDetectorControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public QuadEdgesDetectorControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_quadEdgesDetectorBindingFeatureValues = ExtractBindingValues(binding);
            m_interactiveControl = new QuadEdgesDetectorSkillInteractiveControl();
            m_interactiveControl.MaxQuadEdgesValueChanged += QuadEdgesDetectorSkillInteractiveControl_MaxQuadEdgesValueChanged;

            m_imageGrid.Children.Add(m_canvas);
            m_image.SetValue(Canvas.ZIndexProperty, -1);
            m_image.SizeChanged += Image_SizeChanged;

            // Add line results controls
            m_verticalLinesResultRenderer             = new LineSetRenderer(ref m_canvas, Colors.Blue);
            m_verticalLinesResultRenderer.IsVisible   = false;
            m_horizontalLinesResultRenderer           = new LineSetRenderer(ref m_canvas, Colors.Orange);
            m_horizontalLinesResultRenderer.IsVisible = false;

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateDisplayedInputValues(m_quadEdgesDetectorBindingFeatureValues);
        }
        /// <summary>
        /// ImageRectifierControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public ImageRectifierControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_ImageRectifierBindingFeatureValues = ExtractBindingValues(binding);
            m_interactiveControl = new ImageRectifierInteractiveControl();
            m_interactiveControl.InterpolationTypeChanged += ImageRectifierInteractiveControl_InterpolationTypeChanged;

            m_imageGrid.Children.Add(m_canvas);
            m_image.SetValue(Canvas.ZIndexProperty, -1);
            m_image.SizeChanged += Image_SizeChanged;

            // Add InputQuad control
            m_inputQuadRenderer = new InteractiveQuadRenderer(ref m_canvas);
            m_inputQuadRenderer.CornersChanged         += InteractiveQuadRenderer_CornersChanged;
            m_inputQuadRenderer.CornersChangeCompleted += InteractiveQuadRenderer_CornersChangeCompleted;
            m_inputQuadRenderer.Update(m_ImageRectifierBindingFeatureValues.InputQuad);

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateDisplayedInputValues(m_ImageRectifierBindingFeatureValues);
            m_interactiveControl.UpdateInterpolationType(m_ImageRectifierBindingFeatureValues.InterpolationType);
        }
        /// <summary>
        ///  QuadDetectorControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public QuadDetectorControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_quadDetectorBindingFeatureValues = ExtractBindingValues(binding);
            m_interactiveControl = new QuadDetectorSkillInteractiveControl();

            m_imageGrid.Children.Add(m_canvas);
            m_image.SetValue(Canvas.ZIndexProperty, -1);
            m_image.SizeChanged += Image_SizeChanged;

            // Add SubRectangleMargin control
            foreach (var margin in m_margins)
            {
                m_canvas.Children.Add(margin);
            }
            m_interactiveControl.SubMargingValueChanged            += QuadDetectorSkillInteractiveControl_SubMargingValueChanged;
            m_interactiveControl.MaxQuadValueChanged               += QuadDetectorSkillInteractiveControl_MaxQuadValueChanged;
            m_interactiveControl.NumberOfPixelsPerEdgeValueChanged += QuadDetectorSkillInteractiveControl_NumberOfPixelsPerEdgeValueChanged;

            // Add Quad results control
            m_quadsResultRenderer           = new QuadSetRenderer(ref m_canvas, 10);
            m_quadsResultRenderer.IsVisible = false;

            // Add BaseQuad control
            m_baseQuadRenderer                                    = new InteractiveQuadRenderer(ref m_canvas);
            m_baseQuadRenderer.IsVisible                          = false;
            m_baseQuadRenderer.CornersChanged                    += InteractiveQuadRenderer_CornersChanged;
            m_baseQuadRenderer.CornersChangeCompleted            += InteractiveQuadRenderer_CornersChangeCompleted;
            m_interactiveControl.SpecifyBaseQuadCheckedUnchecked += QuadDetectorSkillInteractiveControl_SpecifyBaseQuadCheckedUnchecked;

            // Add CenterPoint control
            m_centerPointControl.ManipulationDelta     += QuadRendererCenterPoint_ManipulationDelta;
            m_centerPointControl.ManipulationCompleted += QuadRendererCenterPoint_ManipulationCompleted;
            Mouse.SetCursor(m_centerPointControl, Windows.UI.Core.CoreCursorType.Pin);
            m_canvas.Children.Add(m_centerPointControl);
            m_interactiveControl.CenterPointCheckedUnchecked += QuadDetectorSkillInteractiveControl_CenterPointCheckedUnchecked;

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateDisplayedInputValues(m_quadDetectorBindingFeatureValues);
        }
Beispiel #15
0
        /// <summary>
        /// Triggered when the run button for the skill currently toggled is clicked
        /// </summary>
        /// <param name="binding"></param>
        private async void SkillControl_RunButtonClicked(ISkillBinding binding)
        {
            // Disable UI
            UISkillTabs.IsEnabled      = false;
            UIButtonFilePick.IsEnabled = false;
            NotifyUser("", NotifyType.ClearMessage);

            // Evaluate binding (fire and forget)
            await EvaluateBindingAsync(m_currentSkillWrapper.Binding, m_currentSkillControl);

            // Display image and results
            await UIResultPanel.Dispatcher.RunAsync(
                CoreDispatcherPriority.Normal,
                async() =>
            {
                await m_currentSkillControl.UpdateSkillControlValuesAsync(m_currentSkillWrapper.Binding);
            });

            // Enable UI
            UISkillTabs.IsEnabled      = true;
            UIButtonFilePick.IsEnabled = true;
        }
Beispiel #16
0
        /// <summary>
        ///  CurvedEdgesDetectorControl constructor
        /// </summary>
        /// <param name="binding"></param>
        public CurvedEdgesDetectorControl(ISkillBinding binding) : base(binding)
        {
            // Update the view of the initial binding values
            m_interactiveControl = new CurvedEdgesDetectorSkillInteractiveControl();

            m_imageGrid.Children.Add(m_canvas);
            m_image.SetValue(Canvas.ZIndexProperty, -1);
            m_image.SizeChanged += Image_SizeChanged;

            // Add line result control
            m_linesResultRenderer = new PolylineRenderer(ref m_canvas);

            m_linesResultRenderer.IsVisible = false;

            Children.Add(m_interactiveControl);
            m_interactiveControl.UpdateBaseQuadCorners(m_inputQuadCorners);

            // Add control to manipulate InputQuad
            m_inputQuadRenderer           = new InteractiveQuadRenderer(ref m_canvas);
            m_inputQuadRenderer.IsVisible = true;
            m_inputQuadRenderer.Update(m_inputQuadCorners);
            m_inputQuadRenderer.CornersChanged         += InteractiveQuadRenderer_CornersChanged;
            m_inputQuadRenderer.CornersChangeCompleted += InteractiveQuadRenderer_CornersChangeCompleted;
        }
        /// <summary>
        /// Runs the skill against a binding object, executing the skill logic on the associated input features and populating the output ones
        /// This skill proceeds in 2 steps:
        /// 1) Run FaceDetector against the image and populate the face bound feature in the binding object
        /// 2) If a face was detected, proceeds with sentiment analysis of that portion fo the image using Windows ML then updating the score
        /// of each possible sentiment returned as result
        /// </summary>
        /// <param name="binding"></param>
        /// <returns></returns>
        public IAsyncAction EvaluateAsync(ISkillBinding binding)
        {
            FaceSentimentAnalyzerBinding bindingObj = binding as FaceSentimentAnalyzerBinding;

            if (bindingObj == null)
            {
                throw new ArgumentException("Invalid ISkillBinding parameter: This skill handles evaluation of FaceSentimentAnalyzerBinding instances only");
            }

            return(AsyncInfo.Run(async(token) =>
            {
                // Retrieve input frame from the binding object
                VideoFrame inputFrame = (binding[FaceSentimentAnalyzerConst.SKILL_INPUTNAME_IMAGE].FeatureValue as SkillFeatureImageValue).VideoFrame;
                SoftwareBitmap softwareBitmapInput = inputFrame.SoftwareBitmap;

                // Retrieve a SoftwareBitmap to run face detection
                if (softwareBitmapInput == null)
                {
                    if (inputFrame.Direct3DSurface == null)
                    {
                        throw (new ArgumentNullException("An invalid input frame has been bound"));
                    }
                    softwareBitmapInput = await SoftwareBitmap.CreateCopyFromSurfaceAsync(inputFrame.Direct3DSurface);
                }

                // Run face detection and retrieve face detection result
                var faceDetectionResult = await m_faceDetector.DetectFacesAsync(softwareBitmapInput);

                // Retrieve face rectangle feature from the binding object
                var faceRectangleFeature = binding[FaceSentimentAnalyzerConst.SKILL_OUTPUTNAME_FACERECTANGLE];

                // Retrieve face sentiment scores feature from the binding object
                var faceSentimentScores = binding[FaceSentimentAnalyzerConst.SKILL_OUTPUTNAME_FACESENTIMENTSCORES];

                // If a face is found, update face rectangle feature
                if (faceDetectionResult.Count > 0)
                {
                    // Retrieve the face bound and enlarge it by a factor of 1.5x while also ensuring clamping to frame dimensions
                    BitmapBounds faceBound = faceDetectionResult[0].FaceBox;
                    var additionalOffset = faceBound.Width / 2;
                    faceBound.X = Math.Max(0, faceBound.X - additionalOffset);
                    faceBound.Y = Math.Max(0, faceBound.Y - additionalOffset);
                    faceBound.Width = (uint)Math.Min(faceBound.Width + 2 * additionalOffset, softwareBitmapInput.PixelWidth - faceBound.X);
                    faceBound.Height = (uint)Math.Min(faceBound.Height + 2 * additionalOffset, softwareBitmapInput.PixelHeight - faceBound.Y);

                    // Set the face rectangle SkillFeatureValue in the skill binding object
                    // note that values are in normalized coordinates between [0, 1] for ease of use
                    await faceRectangleFeature.SetFeatureValueAsync(
                        new List <float>()
                    {
                        (float)faceBound.X / softwareBitmapInput.PixelWidth,                      // left
                        (float)faceBound.Y / softwareBitmapInput.PixelHeight,                     // top
                        (float)(faceBound.X + faceBound.Width) / softwareBitmapInput.PixelWidth,  // right
                        (float)(faceBound.Y + faceBound.Height) / softwareBitmapInput.PixelHeight // bottom
                    });

                    // Bind the WinML input frame with the adequate face bounds specified as metadata
                    bindingObj.m_winmlBinding.Bind(
                        FaceSentimentAnalyzerConst.WINML_MODEL_INPUTNAME, // WinML feature name
                        inputFrame,                                       // VideoFrame
                        new PropertySet()                                 // VideoFrame bounds
                    {
                        { "BitmapBounds",
                          PropertyValue.CreateUInt32Array(new uint[] { faceBound.X, faceBound.Y, faceBound.Width, faceBound.Height }) }
                    });

                    // Run WinML evaluation
                    var winMLEvaluationResult = await m_winmlSession.EvaluateAsync(bindingObj.m_winmlBinding, "");
                    var winMLModelResult = (winMLEvaluationResult.Outputs[FaceSentimentAnalyzerConst.WINML_MODEL_OUTPUTNAME] as TensorFloat).GetAsVectorView();
                    var predictionScores = SoftMax(winMLModelResult);

                    // Set the SkillFeatureValue in the skill binding object related to the face sentiment scores for each possible SentimentType
                    // note that we SoftMax the output of WinML to give a score normalized between [0, 1] for ease of use
                    await faceSentimentScores.SetFeatureValueAsync(predictionScores);
                }
                else // if no face found, reset output SkillFeatureValues with 0s
                {
                    await faceRectangleFeature.SetFeatureValueAsync(FaceSentimentAnalyzerConst.ZeroFaceRectangleCoordinates);
                    await faceSentimentScores.SetFeatureValueAsync(FaceSentimentAnalyzerConst.ZeroFaceSentimentScores);
                }
            }));
        }