Ejemplo n.º 1
0
    public async Task <IList <YoloBoundingBox> > AnalyzeImage(VideoFrame videoFrame)
    {
        // This appears to be the right way to handle background tasks.
        // We return to the main thread as fast as we can, and wait for the next call to the Update()
        // to advance our processing
#if SDK_1809
        TinyYoloV2O12Input input = new TinyYoloV2O12Input {
            image = ImageFeatureValue.CreateFromVideoFrame(videoFrame)
        };
#else
        TinyYoloV2O1ModelInput input = new TinyYoloV2O1ModelInput {
            image = videoFrame
        };
#endif
        var dims   = GetDimensionsFromVideoFrame(videoFrame);
        int width  = dims.Item1;
        int height = dims.Item2;

        var predictions = await model.EvaluateAsync(input).ConfigureAwait(false);

#if SDK_1809
        var boxes = parser.ParseOutputs(predictions.grid.GetAsVectorView().ToArray(), width, height, DetectionThreshold);
#else
        var boxes = parser.ParseOutputs(predictions.grid.ToArray(), width, height, DetectionThreshold);
#endif
        boxes = boxes.Where(b => b.Confidence >= DetectionThreshold).ToList();

        // normalize coordinates
        boxes = parser.NonMaxSuppress(boxes);
        return(boxes.ToList());
    }
Ejemplo n.º 2
0
        private async Task EvaluateVideoFrameAsync(VideoFrame frame)
        {
            if (frame != null)
            {
                try
                {
                    _stopwatch.Restart();
                    Input inputData = new Input()
                    {
                        data = ImageFeatureValue.CreateFromVideoFrame(frame)
                    };
                    var results = await _model.EvaluateAsync(inputData);

                    var loss   = results.loss.ToList();
                    var labels = results.classLabel;
                    // Get the highest score result.
                    var possibleCat = loss.FirstOrDefault().ToList().OrderByDescending(x => x.Value).First();

                    string message = $"Predictions: {possibleCat.Key} - {(possibleCat.Value * 100.0f).ToString("#0.00") + "%"}";
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => StatusBlock.Text = message);
                }
                catch (Exception ex)
                {
                    Debug.WriteLine($"error: {ex.Message}");
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => StatusBlock.Text = $"error: {ex.Message}");
                }

                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => ButtonRun.IsEnabled = true);
            }
        }
Ejemplo n.º 3
0
        private async void recognizeButton_Click(object sender, RoutedEventArgs e)
        {
            //Bind model input with contents from InkCanvas
            VideoFrame vf = await helper.GetHandWrittenImage(inkGrid);

            inkshapesInput.data = ImageFeatureValue.CreateFromVideoFrame(vf);

            //Evaluate the model
            inkshapesOutput = await modelGen.EvaluateAsync(inkshapesInput);

            var   guessedTag        = inkshapesOutput.classLabel.GetAsVectorView().First();
            var   loss              = inkshapesOutput?.loss?.FirstOrDefault();
            float guessedPercentage = 0f;

            if (loss != null)
            {
                var entry = loss.ToList().OrderByDescending(kv => kv.Value).FirstOrDefault();
                guessedPercentage = entry.Value;
            }

            var text = $"{guessedTag} ({guessedPercentage.ToString("P")})";

            ////Display the results
            numberLabel.Text = text;
        }
Ejemplo n.º 4
0
        private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e)
        {
            if (e?.VideoFrame?.SoftwareBitmap == null)
            {
                return;
            }

            SoftwareBitmap softwareBitmap = SoftwareBitmap.Convert(e.VideoFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
            VideoFrame     inputFrame     = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

            _input.image = ImageFeatureValue.CreateFromVideoFrame(inputFrame);

            // Evaluate the model
            _stopwatch = Stopwatch.StartNew();
            _output    = await _model.EvaluateAsync(_input);

            _stopwatch.Stop();

            IReadOnlyList <float> VectorImage = _output.grid.GetAsVectorView();

            float[] ImageAry = VectorImage.ToArray();

            _boxes = _parser.ParseOutputs(ImageAry);

            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_canvasActualWidth} x Height {_canvasActualHeight}";
                DrawOverlays(e.VideoFrame);
            });

            //Debug.WriteLine(ImageList.ToString());
        }
Ejemplo n.º 5
0
        public async Task <IDictionary <string, float> > EvaluateAsync(SoftwareBitmap bitmap)
        {
            var videoFrame        = VideoFrame.CreateWithSoftwareBitmap(bitmap);
            var imageFeatureValue = ImageFeatureValue.CreateFromVideoFrame(videoFrame);
            var input             = new SmartInkModelInput()
            {
                data = imageFeatureValue
            };
            var output = new SmartInkModelOutput();

            _binding.Bind("data", input.data);

            _binding.Bind("classLabel", output.ClassLabel);

            _binding.Bind("loss", output.Loss);
            LearningModelEvaluationResult result = await _session.EvaluateAsync(_binding, "0");

            output.ClassLabel = result.Outputs["classLabel"] as TensorString;//).GetAsVectorView()[0];

            output.Loss = result.Outputs["loss"] as IList <IDictionary <string, float> >;
            var dict = new Dictionary <string, float>();

            foreach (var key in output.Loss[0].Keys)
            {
                dict.Add(key, output.Loss[0][key]);
            }

            return(dict);
        }
Ejemplo n.º 6
0
        public async Task RecognizeAsync(VideoFrame image)
        {
            await InitializeModelAsync();

            var output = await Model.EvaluateAsync(new HandwriteInput
            {
                Input3 = ImageFeatureValue.CreateFromVideoFrame(image),
            });

            var scores = output.Plus214_Output_0.GetAsVectorView().ToList();
            var answer = scores.IndexOf(scores.Max());

            Results.Add(new RecognizedResult
            {
                Result    = answer.ToString(),
                Zero      = scores[0],
                One       = scores[1],
                Two       = scores[2],
                Three     = scores[3],
                Four      = scores[4],
                Five      = scores[5],
                Six       = scores[6],
                Seven     = scores[7],
                Eight     = scores[8],
                Nine      = scores[9],
                Timestamp = DateTimeOffset.Now,
            });
        }
Ejemplo n.º 7
0
        private async void CameraHelper_FrameArrived(object sender, Microsoft.Toolkit.Uwp.Helpers.FrameEventArgs e)
        {
            if (e?.VideoFrame?.SoftwareBitmap == null)
            {
                return;
            }
            _stopwatch = Stopwatch.StartNew();
            ImageFeatureValue _image = ImageFeatureValue.CreateFromVideoFrame(e.VideoFrame);

            var input = new Yolov2Input
            {
                image = _image
            };
            var output = _model.EvaluateAsync(input).GetAwaiter().GetResult();

            _stopwatch.Stop();

            IReadOnlyList <float> vectorImage = output.grid.GetAsVectorView();
            IList <float>         imageList   = vectorImage.ToList();

            _boxes = _parser.ParseOutputs(vectorImage.ToArray());

            var maxIndex = imageList.IndexOf(imageList.Max());
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                TextBlockInformation.Text = $"{1000f / _stopwatch.ElapsedMilliseconds,4:f1} fps on Width {_cameraModel.Width} x Height {_cameraModel}";
                DrawOverlays(e.VideoFrame);
            });
        }
Ejemplo n.º 8
0
        private async Task EvaluateVideoFrameAsync(VideoFrame frame)
        {
            if (frame != null)
            {
                try
                {
                    PlanesInput inputData = new PlanesInput();
                    inputData.data = ImageFeatureValue.CreateFromVideoFrame(frame);
                    var results = await planeModel.EvaluateAsync(inputData);

                    var loss   = results.loss.ToList();
                    var labels = results.classLabel;

                    float value = loss.FirstOrDefault()["plane"];

                    var  lossStr = (value * 100.0f).ToString("#0.00") + "%";
                    bool isPlane = false;
                    if (value > 0.75)
                    {
                        isPlane = true;
                    }

                    string message = string.Empty;
                    message = isPlane ? $"Yes, it's a plane! Confidence: {lossStr}" : $"No, it isn't a plane, I'm sorry. Confidence: {lossStr}";

                    txtStatus.Text = message;
                }
                catch (Exception ex)
                {
                    Debug.WriteLine($"error: {ex.Message}");
                    txtStatus.Text = $"error: {ex.Message}";
                }
            }
        }
Ejemplo n.º 9
0
        // A method to convert and bind the input image.
        private async Task imageBind()
        {
            UIPreviewImage.Source = null;

            try
            {
                SoftwareBitmap softwareBitmap;
                using (IRandomAccessStream stream = await selectedStorageFile.OpenAsync(FileAccessMode.Read))
                {
                    // Create the decoder from the stream
                    BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                    // Get the SoftwareBitmap representation of the file in BGRA8 format
                    softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                    softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                }
                // Display the image
                SoftwareBitmapSource imageSource = new SoftwareBitmapSource();
                await imageSource.SetBitmapAsync(softwareBitmap);

                UIPreviewImage.Source = imageSource;

                // Encapsulate the image within a VideoFrame to be bound and evaluated
                VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

                // bind the input image
                ImageFeatureValue imageTensor = ImageFeatureValue.CreateFromVideoFrame(inputImage);
                input.data = imageTensor;
            }
            catch (Exception e)
            {
            }
        }
        private async void InkCanvas_StrokeCollected(object sender, InkCanvasStrokeCollectedEventArgs e)
        {
            // Mnist.onnx がまだ読み込まれていない場合は読み込む
            if (_model == null)
            {
                var mnistModelFilePath = Path.Combine(
                    Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location),
                    "Mnist.onnx");
                var modelFile = await Windows.Storage.StorageFile.GetFileFromPathAsync(mnistModelFilePath);

                _model = await MnistModel.CreateFromStreamAsync(modelFile);
            }

            // 手書きの内容を画像に変換してモデルで評価する
            var visual = await Helper.GetHandWrittenImageAsync(inkCanvas);

            var result = await _model.EvaluateAsync(new MnistInput
            {
                Input3 = ImageFeatureValue.CreateFromVideoFrame(visual),
            });

            // 評価結果を解析して認識した数字を表示する
            var orderedScores = result.Plus214_Output_0.GetAsVectorView()
                                .Select((score, index) => (score, index))
                                .OrderByDescending(x => x.score);

            _results.Clear();
            foreach (var text in orderedScores.Select(x => $"{x.index}(score: {x.score})"))
            {
                _results.Add(text);
            }
        }
Ejemplo n.º 11
0
        private async void Controller_InputReady(object sender, VideoFrame args)
        {
            try
            {
                LogService.Write($"Evaluating model...");
                using (args)
                {
                    var output = await _model.EvaluateAsync(new mnistInput
                    {
                        Input3 = ImageFeatureValue.CreateFromVideoFrame(args)
                    });

                    var outputVector = output.Plus214_Output_0.GetAsVectorView().ToList();
                    var maxIndex     = outputVector.IndexOf(outputVector.Max());

                    NumberText = maxIndex.ToString();

                    var    topIndices       = MLHelper.GetTopLabelIndices(outputVector);
                    string topIndicesString = Environment.NewLine;
                    foreach (var topIndex in topIndices)
                    {
                        topIndicesString += $"{topIndex.LabelIndex}, Confidence: {topIndex.Confidence}" + Environment.NewLine;
                    }
                    LogService.Write(topIndicesString);
                }
            }
            catch (Exception ex)
            {
                // The WinML APIs are still in preview, so throw a visible exception so users can file a bug
                AppService.DisplayDialog(ex.GetType().Name, ex.Message);
                LogService.WriteException(ex);
            }
        }
Ejemplo n.º 12
0
        private async Task LoadAndEvaluateModelAsync(VideoFrame _inputFrame, string _modelFileName)
        {
            LearningModelBinding _binding     = null;
            VideoFrame           _outputFrame = null;
            LearningModelSession _session;

            try
            {
                //Load and create the model
                if (_model == null)
                {
                    var modelFile =
                        await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///{_modelFileName}"));

                    _model = await LearningModel.LoadFromStorageFileAsync(modelFile);
                }

                // Create the evaluation session with the model
                _session = new LearningModelSession(_model);

                // Get input and output features of the model
                var inputFeatures  = _model.InputFeatures.ToList();
                var outputFeatures = _model.OutputFeatures.ToList();

                // Create binding and then bind input/ output features
                _binding = new LearningModelBinding(_session);

                _inputImageDescriptor =
                    inputFeatures.FirstOrDefault(feature => feature.Kind == LearningModelFeatureKind.Tensor) as TensorFeatureDescriptor;

                _outputTensorDescriptor =
                    outputFeatures.FirstOrDefault(feature => feature.Kind == LearningModelFeatureKind.Tensor) as TensorFeatureDescriptor;

                TensorFloat       outputTensor = TensorFloat.Create(_outputTensorDescriptor.Shape);
                ImageFeatureValue imageTensor  = ImageFeatureValue.CreateFromVideoFrame(_inputFrame);

                // Bind inputs +outputs
                _binding.Bind(_inputImageDescriptor.Name, imageTensor);
                _binding.Bind(_outputTensorDescriptor.Name, outputTensor);


                // Evaluate and get the results
                var results = await _session.EvaluateAsync(_binding, "test");

                Debug.WriteLine("ResultsEvaluated: " + results.ToString());

                var outputTensorList = outputTensor.GetAsVectorView();
                var resultsList      = new List <float>(outputTensorList.Count);
                for (int i = 0; i < outputTensorList.Count; i++)
                {
                    resultsList.Add(outputTensorList[i]);
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine($"error: {ex.Message}");
                _model = null;
            }
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Detect objects from the given image.
        /// The input image must be 416x416.
        /// </summary>
        ///


        public async Task <IList <PredictionModel> > PredictImageAsync(VideoFrame image)
        {
            var imageFeature = ImageFeatureValue.CreateFromVideoFrame(image);

            this.binding.Bind("data", imageFeature);
            var result = await this.session.EvaluateAsync(this.binding, "");

            return(Postprocess(result.Outputs["model_outputs0"] as TensorFloat));
        }
Ejemplo n.º 14
0
        static async Task Main(string[] args)
        {
            var rootDir         = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
            var squeezeNetModel = SqueezeNetModel.CreateFromFilePath(Path.Combine(rootDir, "squeezenet1.0-9.onnx"));

            // Load labels from JSON
            var labels = new List <string>();

            foreach (var kvp in JsonSerializer.Deserialize <Dictionary <string, string> >(File.ReadAllText(Path.Combine(rootDir, "Labels.json"))))
            {
                labels.Add(kvp.Value);
            }

            if (args.Length < 1)
            {
                return;
            }

            var filePath = args[0];

            // Open image file
            SqueezeNetOutput output;

            using (var fileStream = File.OpenRead(filePath))
            {
                // Convert from FileStream to ImageFeatureValue
                var decoder = await BitmapDecoder.CreateAsync(fileStream.AsRandomAccessStream());

                using var softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                using var inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
                var imageTensor = ImageFeatureValue.CreateFromVideoFrame(inputImage);

                output = await squeezeNetModel.EvaluateAsync(new SqueezeNetInput
                {
                    data_0 = imageTensor
                });
            }

            // Get result, which is a list of floats with all the probabilities for all 1000 classes of SqueezeNet
            var resultTensor = output.softmaxout_1;
            var resultVector = resultTensor.GetAsVectorView();

            // Order the 1000 results with their indexes to know which class is the highest ranked one
            List <(int index, float p)> results = new List <(int, float)>();

            for (int i = 0; i < resultVector.Count; i++)
            {
                results.Add((index: i, p: resultVector.ElementAt(i)));
            }
            results.Sort((a, b) => a.p switch
            {
                var p when p <b.p => 1,
                              var p when p> b.p => - 1,
                _ => 0
            });
Ejemplo n.º 15
0
        private async Task EvaluteImageAsync(VideoFrame videoFrame)
        {
            var startTime = DateTime.Now;

            if (model == null)
            {
                var modelFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Model/GoogLeNetPlaces.onnx"));

                if (modelFile != null)
                {
                    model = new GoogLeNetPlacesModel();
                    await MLHelper.CreateModelAsync(modelFile, model);
                }
            }
            var input = new GoogLeNetPlacesInput()
            {
                sceneImage = ImageFeatureValue.CreateFromVideoFrame(videoFrame)
            };

            try
            {
                var res = await model.EvaluateAsync(input) as GoogLeNetPlacesOutput;

                if (res != null)
                {
                    var results = new List <LabelResult>();
                    if (res.sceneLabelProbs != null)
                    {
                        var dict = res.sceneLabelProbs.FirstOrDefault();
                        foreach (var kv in dict)
                        {
                            results.Add(new LabelResult
                            {
                                Label  = kv.Key,
                                Result = (float)Math.Round(kv.Value * 100, 2)
                            });
                        }
                        results.Sort((p1, p2) =>
                        {
                            return(p2.Result.CompareTo(p1.Result));
                        });
                    }
                    await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.High, () =>
                    {
                        var places                   = res.sceneLabel.GetAsVectorView().ToArray();
                        outputText.Text              = places.FirstOrDefault();
                        resultList.ItemsSource       = results;
                        previewControl.EvalutionTime = (DateTime.Now - startTime).TotalSeconds.ToString();
                    });
                }
            }
            catch (Exception ex)
            {
                await AlertHelper.ShowMessageAsync(ex.ToString());
            }
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Detect objects from the given image.
        /// The input image must be 416x416.
        /// </summary>
        public async Task <IList <PredictionModel> > PredictImageAsync(VideoFrame image)
        {
            var imageFeature = ImageFeatureValue.CreateFromVideoFrame(image);
            var bindings     = new LearningModelBinding(session);

            bindings.Bind("data", imageFeature);
            var result = await session.EvaluateAsync(bindings, "0");

            return(Postprocess(result.Outputs["model_outputs0"] as TensorFloat));
        }
        public async Task <MultiObjectDetectionModelv8Output> EvaluateAsync(ImageFeatureValue image)
        {
            binding.Bind("image", image);
            var result = await session.EvaluateAsync(binding, "0");

            var output = new MultiObjectDetectionModelv8Output();

            output.Grid = result.Outputs["grid"] as TensorFloat;
            return(output);
        }
Ejemplo n.º 18
0
        private static ImageFeatureValue LoadImageFile()
        {
            StorageFile         imageFile      = AsyncHelper(StorageFile.GetFileFromPathAsync(_imagePath));
            IRandomAccessStream stream         = AsyncHelper(imageFile.OpenReadAsync());
            BitmapDecoder       decoder        = AsyncHelper(BitmapDecoder.CreateAsync(stream));
            SoftwareBitmap      softwareBitmap = AsyncHelper(decoder.GetSoftwareBitmapAsync());

            softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
            VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

            return(ImageFeatureValue.CreateFromVideoFrame(inputImage));
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Detect objects from the given image.
        /// The input image must be 416x416.
        /// </summary>
        public async Task <IList <PredictionModel> > PredictImageAsync(VideoFrame image)
        {
            var output       = new modelOutput();
            var imageFeature = ImageFeatureValue.CreateFromVideoFrame(image);
            var bindings     = new LearningModelBinding(Session);

            bindings.Bind("data", imageFeature);
            bindings.Bind("model_outputs0", output.Model_outputs0);
            var result = await Session.EvaluateAsync(bindings, "0");

            return(Postprocess(output.Model_outputs0));
        }
Ejemplo n.º 20
0
        private async void recognizeButton_Click(object sender, RoutedEventArgs e)
        {
            //Bind model input with contents from InkCanvas
            VideoFrame vf = await helper.GetHandWrittenImage(inkGrid);

            ModelInput.data = ImageFeatureValue.CreateFromVideoFrame(vf);
            // Evaluate the model
            ModelOutput = await ModelGen.EvaluateAsync(ModelInput);

            //Display the results
            numberLabel.Text = ModelOutput.classLabel.GetAsVectorView()[0];
        }
Ejemplo n.º 21
0
        public async Task <string> ObtenerIdentidadOnnX(VideoFrame videoFrame, LearningModelSession _session)
        {
            identidadEncontradaTexto = string.Empty;
            if (videoFrame != null)
            {
                try
                {
                    LearningModelBinding binding     = new LearningModelBinding(_session);
                    ImageFeatureValue    imageTensor = ImageFeatureValue.CreateFromVideoFrame(videoFrame);
                    binding.Bind("data", imageTensor);
                    int ticks = Environment.TickCount;

                    // Process the frame with the model
                    var results = await _session.EvaluateAsync(binding, $"Run { ++_runCount } ");

                    ticks = Environment.TickCount - ticks;
                    var          label            = results.Outputs["classLabel"] as TensorString;
                    var          resultVector     = label.GetAsVectorView();
                    List <float> topProbabilities = new List <float>()
                    {
                        0.0f, 0.0f, 0.0f
                    };
                    List <int> topProbabilityLabelIndexes = new List <int>()
                    {
                        0, 0, 0
                    };
                    // SqueezeNet returns a list of 1000 options, with probabilities for each, loop through all
                    for (int i = 0; i < resultVector.Count(); i++)
                    {
                        // is it one of the top 3?
                        for (int j = 0; j < 3; j++)
                        {
                            identidadEncontradaTexto = resultVector[i].ToString();

                            //if (resultVector[i] > topProbabilities[j])
                            //{
                            //    topProbabilityLabelIndexes[j] = i;
                            //    topProbabilities[j] = resultVector[i];
                            //    break;
                            //}
                        }
                    }
                }

                catch (Exception ex)
                {
                    identidadEncontradaTexto = "";
                }
            }
            return(identidadEncontradaTexto);
        }
Ejemplo n.º 22
0
        private void ThisAddIn_Startup(object sender, System.EventArgs e)
        {
            richTextBox1 = new RichTextBox();
            richTextBox1.Dock = DockStyle.Fill;

            richTextBox1.SelectionFont = new Font("Verdana", 12, FontStyle.Bold);
            richTextBox1.SelectionColor = Color.Red;

            Clipboard.SetImage(Image.FromFile(_imagePath));
            richTextBox1.Paste();

            // Load and create the model 
            outToLog($"Loading modelfile '{_modelPath}' on the '{_deviceName}' device");

            int ticks = Environment.TickCount;
            _model = LearningModel.LoadFromFilePath(_modelPath);
            ticks = Environment.TickCount - ticks;
            outToLog($"model file loaded in { ticks } ticks");

            // Create the evaluation session with the model and device
            _session = new LearningModelSession(_model);

            outToLog("Getting color management mode...");
            ColorManagementMode colorManagementMode = GetColorManagementMode();

            outToLog("Loading the image...");
            ImageFeatureValue imageTensor = LoadImageFile(colorManagementMode);

            // create a binding object from the session
            outToLog("Binding...");
            LearningModelBinding binding = new LearningModelBinding(_session);
            binding.Bind(_model.InputFeatures.ElementAt(0).Name, imageTensor);

            outToLog("Running the model...");
            ticks = Environment.TickCount;
            var results = _session.Evaluate(binding, "RunId");
            ticks = Environment.TickCount - ticks;
            outToLog($"model run took { ticks } ticks");

            // retrieve results from evaluation
            var resultTensor = results.Outputs[_model.OutputFeatures.ElementAt(0).Name] as TensorFloat;
            var resultVector = resultTensor.GetAsVectorView();

            PrintResults(resultVector);

            Form form1 = new Form();
            form1.Size = new Size(800, 800);
            form1.Controls.Add(richTextBox1);
            //form1.Show();
            form1.ShowDialog();
        }
Ejemplo n.º 23
0
        private async Task EvaluateImageAsync(string imagePath, string modelPath)
        {
            var selectedStorageFile = await StorageFile.GetFileFromPathAsync(imagePath);

            SoftwareBitmap softwareBitmap;

            using (var stream = await selectedStorageFile.OpenAsync(FileAccessMode.Read)) {
                // Create the decoder from the stream
                var decoder = await BitmapDecoder.CreateAsync(stream);

                // Get the SoftwareBitmap representation of the file in BGRA8 format
                softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8,
                                                        BitmapAlphaMode.Premultiplied);
            }

            // Encapsulate the image within a VideoFrame to be bound and evaluated
            var inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

            if (_model == null)
            {
                var modelFile = await StorageFile.GetFileFromPathAsync(modelPath);

                _model = new SqueezeNetModel {
                    LearningModel = await LearningModel.LoadFromStorageFileAsync(modelFile)
                };
                _model.Session = new LearningModelSession(_model.LearningModel,
                                                          new LearningModelDevice(LearningModelDeviceKind.Default));
                _model.Binding = new LearningModelBinding(_model.Session);
            }

            if (_model == null)
            {
                return;
            }

            var input = new SqueezeNetInput {
                Image = ImageFeatureValue.CreateFromVideoFrame(inputImage)
            };

            try {
                var output = (SqueezeNetOutput)await _model.EvaluateAsync(input);

                var(label, probability) = output.classLabelProbs.FirstOrDefault();
                DispatchEvent(Result, probability + ", " + label);
            }
            catch (Exception ex) {
                Trace(ex.Message, ex.StackTrace);
            }
        }
        private async void Button_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
        {
            CanvasDevice       device       = CanvasDevice.GetSharedDevice();
            CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (int)inkDataCanvas.ActualWidth, (int)inkDataCanvas.ActualHeight, 96);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                ds.Clear(Colors.Black);
                ds.DrawInk(inkDataCanvas.InkPresenter.StrokeContainer.GetStrokes());
            }

            using (var ms = new InMemoryRandomAccessStream())
            {
                await renderTarget.SaveAsync(ms, CanvasBitmapFileFormat.Jpeg, 1);

                await ms.FlushAsync();

                var decoder = await BitmapDecoder.CreateAsync(ms);

                var img = await decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);

                var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

                encoder.BitmapTransform.ScaledHeight = 227;
                encoder.BitmapTransform.ScaledWidth  = 227;
                encoder.SetSoftwareBitmap(img);
                await encoder.FlushAsync();

                decoder = await BitmapDecoder.CreateAsync(ms);

                img = await decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);

                img = SoftwareBitmap.Convert(img, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);
                var model = await CustomGestureModel.CreateFromStreamAsync(await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///CustomGesture.onnx")));

                var output = await model.EvaluateAsync(new CustomGestureInput
                {
                    data = ImageFeatureValue.CreateFromVideoFrame(VideoFrame.CreateWithSoftwareBitmap(img))
                });

                if (output != null)
                {
                    var res   = output.classLabel.GetAsVectorView().ToList();
                    var label = res.FirstOrDefault();
                    await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                    {
                        await new MessageDialog(label).ShowAsync();
                    });
                }
            }
        }
Ejemplo n.º 25
0
        private async Task <string> DetectEmotionWithWinML()
        {
            var videoFrame = lastFrame;

            if (faceDetector == null)
            {
                faceDetector = await FaceDetector.CreateAsync();
            }

            var detectedFaces = await faceDetector.DetectFacesAsync(videoFrame.SoftwareBitmap);

            if (detectedFaces != null && detectedFaces.Any())
            {
                var face = detectedFaces.OrderByDescending(s => s.FaceBox.Height * s.FaceBox.Width).First();
                using (var randomAccessStream = new InMemoryRandomAccessStream())
                {
                    var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, randomAccessStream);

                    var softwareBitmap = SoftwareBitmap.Convert(videoFrame.SoftwareBitmap, BitmapPixelFormat.Rgba16);
                    Debug.WriteLine(softwareBitmap.BitmapPixelFormat);
                    encoder.SetSoftwareBitmap(softwareBitmap);
                    encoder.BitmapTransform.Bounds = new BitmapBounds
                    {
                        X      = face.FaceBox.X,
                        Y      = face.FaceBox.Y,
                        Width  = face.FaceBox.Width,
                        Height = face.FaceBox.Height
                    };

                    await encoder.FlushAsync();

                    var decoder = await BitmapDecoder.CreateAsync(randomAccessStream);

                    var croppedImage = await decoder.GetSoftwareBitmapAsync(softwareBitmap.BitmapPixelFormat, softwareBitmap.BitmapAlphaMode);

                    videoFrame = VideoFrame.CreateWithSoftwareBitmap(croppedImage);
                }
            }

            var input   = ImageFeatureValue.CreateFromVideoFrame(videoFrame);
            var emotion = await model.EvaluateAsync(new FER_Emotion_RecognitionInput()
            {
                Input3 = input
            });

            var list  = new List <float>(emotion.Plus692_Output_0.GetAsVectorView());
            var index = list.IndexOf(list.Max());
            var label = labels[index];

            return(label);
        }
Ejemplo n.º 26
0
        /// <summary>
        /// 识别图像分类
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void BtRecognize_Click(object sender, RoutedEventArgs e)
        {
            tbResult.Text     = string.Empty;
            imgPreview.Source = null;

            try
            {
                // 下载图片
                Uri uri      = new Uri(tbUrl.Text);
                var response = await new HttpClient().GetAsync(uri);
                var stream   = (await response.Content.ReadAsStreamAsync()).AsRandomAccessStream();

                var decoder = await BitmapDecoder.CreateAsync(stream);

                var softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                // 构造模型需要的输入格式
                VideoFrame        videoFrame        = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
                ImageFeatureValue imageFeatureValue = ImageFeatureValue.CreateFromVideoFrame(videoFrame);

                // 推理
                var output = await classificationModel.EvaluateAsync(new bearInput()
                {
                    data = imageFeatureValue
                });

                // 解析结果、更新控件
                var resultDescend = output.loss[0].OrderByDescending(p => p.Value);
                var name          = output.classLabel.GetAsVectorView()[0];

                var stringResult = new StringBuilder();
                stringResult.AppendLine(name);
                stringResult.AppendLine();
                foreach (var kvp in resultDescend)
                {
                    stringResult.AppendLine(string.Format("{0}\t: {1}%", kvp.Key, kvp.Value));
                }

                tbResult.Text = stringResult.ToString();
                var sbs          = new SoftwareBitmapSource();
                var imagePreview = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                await sbs.SetBitmapAsync(imagePreview);

                imgPreview.Source = sbs;
            }
            catch (Exception ex)
            {
                MessageDialog a = new MessageDialog(String.Format("读取图片错误:\n{0}", ex.ToString()));
                await a.ShowAsync();
            }
        }
Ejemplo n.º 27
0
        private async void EvaluateVideoFrameAsync(VideoFrame inputImage)
        {
            elaborate = true;
            //Image crop to required size
            ModelInput.data = ImageFeatureValue.CreateFromVideoFrame(await CenterCropImageAsync(inputImage, 227, 227));

            //Evaluate the model
            ModelOutput = await ModelGen.EvaluateAsync(ModelInput);

            //If no results
            if (ModelOutput.Loss == null || ModelOutput.Loss.Count == 0)
            {
                return;
            }
            var loss = ModelOutput.Loss.ToList()[0];

            //Find max
            var maxValue = loss.Values.Max();

            if (maxValue > 0.7)
            {
                lastAlert = DateTime.Now;

                var pos   = loss.Values.ToList().IndexOf(maxValue);
                var label = loss.Keys.ToList().ElementAt(pos);

                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                                          () =>

                {
                    //Get current image
                    var source = new SoftwareBitmapSource();
                    source.SetBitmapAsync(inputImage.SoftwareBitmap);

                    //Create alarm image
                    var lossStr = new ResultModel
                    {
                        Name    = label,
                        Percent = maxValue * 100.0f,
                        Image   = source
                    };


                    resultsList.Insert(0, lossStr);
                    PlaySound();
                });
            }

            elaborate = false;
        }
Ejemplo n.º 28
0
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            var picker = new Windows.Storage.Pickers.FileOpenPicker();

            picker.ViewMode = Windows.Storage.Pickers.PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.PicturesLibrary;
            picker.FileTypeFilter.Add(".jpg");
            picker.FileTypeFilter.Add(".jpeg");
            picker.FileTypeFilter.Add(".png");

            Windows.Storage.StorageFile file = await picker.PickSingleFileAsync();

            if (file != null)
            {
                Stopwatch sw = new Stopwatch();
                sw.Start();
                txtFilePath.Text = file.Path;
                SoftwareBitmap bitmap;
                using (var s = await file.OpenAsync(FileAccessMode.Read))
                {
                    var decoder = await BitmapDecoder.CreateAsync(s);

                    bitmap = await decoder.GetSoftwareBitmapAsync();

                    bitmap = SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                }

                var images = new SoftwareBitmapSource();
                await images.SetBitmapAsync(bitmap);

                imgCeleb.Source = images;

                //VideoFrame
                VideoFrame inputimage = VideoFrame.CreateWithSoftwareBitmap(bitmap);
                inputimage = await GetCropedImage(inputimage);

                celebInput.data = ImageFeatureValue.CreateFromVideoFrame(inputimage);
                celebOutput     = await modelGen.EvaluateAsync(celebInput);

                var resultVector = celebOutput.classLabel.GetAsVectorView();
                txtcelebName.Text = resultVector[0];
                sw.Stop();
                txtProcTime.Text = $"{sw.Elapsed}";
                Debug.WriteLine($"process time = {sw.Elapsed}");
            }
            else
            {
                txtFilePath.Text = "Operation cancelled.";
            }
        }
Ejemplo n.º 29
0
        public static async Task <ImageFeatureValue> GetAsImageFeatureValue(this IRandomAccessStream stream)
        {
            SoftwareBitmap softwareBitmap;
            // Create the decoder from the stream
            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

            // Get the SoftwareBitmap representation of the file in BGRA8 format
            softwareBitmap = await decoder.GetSoftwareBitmapAsync();

            softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

            VideoFrame videoFrame = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

            return(ImageFeatureValue.CreateFromVideoFrame(videoFrame));
        }
Ejemplo n.º 30
0
        public async Task <FriesOrNotFriesTag> DetectAsync(Stream photo)
        {
            await InitializeModelAsync();

            var bitmapDecoder = await BitmapDecoder.CreateAsync(photo.AsRandomAccessStream());

            var output = await _model.EvaluateAsync(new FriesOrNotFriesInput
            {
                data = ImageFeatureValue.CreateFromVideoFrame(VideoFrame.CreateWithSoftwareBitmap(await bitmapDecoder.GetSoftwareBitmapAsync())),
            });

            var label = output.classLabel.GetAsVectorView().FirstOrDefault();

            return(Enum.Parse <FriesOrNotFriesTag>(label));
        }