Example #1
0
        public async Task <Inceptionv3_convertedOutput> Evaluate(StorageFile file)
        {
            Inceptionv3_convertedInput tensorInput = new Inceptionv3_convertedInput();

            byte[] image = await ResizedImage(file, INPUT_WIDTH, INPUT_HEIGHT);


            List <float> input = new List <float>();
            List <float> R     = new List <float>();
            List <float> G     = new List <float>();
            List <float> B     = new List <float>();

            for (int j = 0; j < INPUT_HEIGHT; j++)
            {
                for (int i = 0; i < INPUT_WIDTH; i++)
                {
                    R.Add(GetPixel(image, i, j, INPUT_WIDTH, INPUT_HEIGHT).R / 255f);
                    G.Add(GetPixel(image, i, j, INPUT_WIDTH, INPUT_HEIGHT).G / 255f);
                    B.Add(GetPixel(image, i, j, INPUT_WIDTH, INPUT_HEIGHT).B / 255f);
                }
            }
            input.AddRange(R);
            input.AddRange(G);
            input.AddRange(B);
            tensorInput.input_1_0 = TensorFloat.CreateFromArray(new long[] { 1, 256, 256, 3 }, input.ToArray());
            return(await Model.EvaluateAsync(tensorInput));
        }
Example #2
0
        private unsafe LearningModelBinding EvaluateContrastAndBrightnessSession(object input, object output)
        {
            var slope      = Math.Tan(ContrastMaxSlider.Value * 3.14159 / 2);
            var yintercept = -255 * (ContrastMinSlider.Value * 2 - 1);

            if (yintercept < 0)
            {
                // it was the x-intercept
                yintercept = slope * yintercept;
            }

            var binding = new LearningModelBinding(contrastEffectSession_);

            binding.Bind("Input", input);
            binding.Bind("Slope", TensorFloat.CreateFromArray(new long[] { 1 }, new float[] { (float)slope }));
            binding.Bind("YIntercept", TensorFloat.CreateFromArray(new long[] { 1 }, new float[] { (float)yintercept }));

            var outputBindProperties = new PropertySet();

            outputBindProperties.Add("DisableTensorCpuSync", PropertyValue.CreateBoolean(true));
            binding.Bind("Output", output, outputBindProperties);

            EvaluateInternal(contrastEffectSession_, binding);

            return(binding);
        }
Example #3
0
        private async void RecogNumberFromInk()
        {
            // 从文件加载模型
            var modelFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Assets/mnist.onnx"));

            var model = await mnistModel.CreateFromStreamAsync(modelFile);

            // 组织输入
            var inputArray = await GetInputDataFromInk();

            var inputTensor = TensorFloat.CreateFromArray(new List <long> {
                784
            }, inputArray);
            var modelInput = new mnistInput {
                port = inputTensor
            };

            // 推理
            var result = await model.EvaluateAsync(modelInput);

            // 得到每个数字的得分
            var scoreList = result.dense3port.GetAsVectorView().ToList();

            // 从输出中取出得分最高的
            var max = scoreList.IndexOf(scoreList.Max());

            // 显示在控件中
            lbResult.Text = max.ToString();
        }
        /// <summary>
        /// PreProcessing.
        /// Converts image data in SoftwareBitmap to TensorFloat.
        /// </summary>
        public static TensorFloat SoftwareBitmapToTensorFloat(SoftwareBitmap image)
        {
            int width  = image.PixelWidth;
            int height = image.PixelHeight;

            using (BitmapBuffer buffer = image.LockBuffer(BitmapBufferAccessMode.Read))
            {
                using (var reference = buffer.CreateReference())
                {
                    // Implementation Reference:
                    // https://github.com/Microsoft/Windows-Machine-Learning/issues/22
                    unsafe
                    {
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);

                        long[]  shape      = { 1, 3, height, width };
                        float[] pCPUTensor = new float[3 * width * height];
                        for (int i = 0; i < capacity; i += 4)
                        {
                            int pixelInd = i / 4;
                            pCPUTensor[pixelInd] = (float)dataInBytes[i];
                            pCPUTensor[(height * width) + pixelInd]     = (float)dataInBytes[i + 1];
                            pCPUTensor[(height * width * 2) + pixelInd] = (float)dataInBytes[i + 2];
                        }

                        float[] processedTensor = NormalizeFloatArray(pCPUTensor);

                        TensorFloat tensorFloats = TensorFloat.CreateFromArray(shape, processedTensor);
                        return(tensorFloats);
                    }
                }
            }
        }
        public static async Task <TensorFloat> GetAsTensorFloat(this IRandomAccessStream stream,
                                                                int imageSize, float[] mean = null, float[] std = null)
        {
            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

            var softwareBitmap = await decoder.GetSoftwareBitmapAsync();

            softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Premultiplied);
            WriteableBitmap innerBitmap = new WriteableBitmap(softwareBitmap.PixelWidth, softwareBitmap.PixelHeight);

            softwareBitmap.CopyToBuffer(innerBitmap.PixelBuffer);

            int channelSize = imageSize * imageSize;

            using (var context = innerBitmap.GetBitmapContext())
            {
                int[] src = context.Pixels;

                var normalized = new float[imageSize * imageSize * 3];

                for (var x = 0; x < imageSize; x++)
                {
                    for (var y = 0; y < imageSize; y++)
                    {
                        var   color = innerBitmap.GetPixel(y, x);
                        float r, g, b;

                        r = color.R;
                        g = color.G;
                        b = color.B;

                        if (mean != null && std != null)
                        {
                            r /= 255f;
                            g /= 255f;
                            b /= 255f;

                            r = (r - mean[0]) / std[0];
                            g = (g - mean[1]) / std[1];
                            b = (b - mean[2]) / std[2];
                        }

                        var indexChannelR = (x * imageSize) + y;
                        var indexChannelG = indexChannelR + channelSize;
                        var indexChannelB = indexChannelG + channelSize;

                        normalized[indexChannelR] = r;
                        normalized[indexChannelG] = g;
                        normalized[indexChannelB] = b;
                    }
                }
                return(TensorFloat.CreateFromArray(new List <long>()
                {
                    1, 3, imageSize, imageSize
                }, normalized));
            }
        }
Example #6
0
        public IEnumerable <float> EstimateDepth(float[] inTensor)
        {
            var task = Task.Run(async() =>
            {
                return(await onnxModel.EvaluateAsync(
                           TensorFloat.CreateFromArray(new long[] { 1, 3, InputHeight, InputWidth }, inTensor)));
            });

            return(task.Result.GetAsVectorView());
        }
Example #7
0
        private async void PredictHangul(VideoFrame inputimage)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();

            // convert to bgr8
            SoftwareBitmap bitgray8 = SoftwareBitmap.Convert(inputimage.SoftwareBitmap, BitmapPixelFormat.Gray8);
            var            buff     = new byte[64 * 64];

            bitgray8.CopyToBuffer(buff.AsBuffer());
            var fbuff = new float[4096];

            for (int i = 0; i < 4096; i++)
            {
                fbuff[i] = (float)buff[i] / 255;
            }

            long[] shape = { 1, 4096 };
            charInput.input00 = TensorFloat.CreateFromArray(shape, fbuff);

            var dummy = new float[1];

            long[] dummy_shape = { };
            charInput.keep_prob = TensorFloat.CreateFromArray(dummy_shape, dummy);

            //Evaluate the model
            charOuput = await charModel.EvaluateAsync(charInput);

            //Convert output to datatype
            IReadOnlyList <float> VectorImage = charOuput.output00.GetAsVectorView();
            IList <float>         ImageList   = VectorImage.ToList();

            //Display top results
            var topPred = ImageList.Select((value, index) => new { index, value })
                          .ToDictionary(pair => pair.index, pair => pair.value)
                          .OrderByDescending(key => key.Value)
                          .ToArray();

            string topLabeltxt = "";

            for (int i = 1; i < 6; i++)
            {
                var item = topPred[i];
                Debug.WriteLine($"{item.Key}, {item.Value}, {charLabel[item.Key]}");
                topLabeltxt += $"{charLabel[item.Key]} ";
            }

            numberLabel.Text = charLabel[topPred[0].Key];
            topLabel.Text    = topLabeltxt;

            Debug.WriteLine($"process time = {sw.Elapsed}");
        }
        private static TensorFloat Normalize(byte[] src, System.Numerics.Vector3 mean, System.Numerics.Vector3 std, uint width, uint height)
        {
            var normalized = new float[src.Length / 4 * 3];

            for (int i = 0; i < src.Length / 4; i++)
            {
                var val = src[i];
                normalized[i * 3 + 0] = ((src[4 * i] / 255f) - mean.X) / std.X;
                normalized[i * 3 + 1] = ((src[4 * i + 1] / 255f) - mean.Y) / std.Y;
                normalized[i * 3 + 2] = ((src[4 * i + 2] / 255f) - mean.Z) / std.Z;
            }
            var shape = new List <long> {
                3, width, height
            };

            return(TensorFloat.CreateFromArray(shape, normalized));
        }
Example #9
0
        internal String Evaluate()
        {
            // input tensor shape is [1x4]
            long[] shape = new long[2];
            shape[0] = 1;
            shape[1] = 4;

            // set up the input tensor
            float[] input_data = new float[4];
            input_data[0] = _sepal_length;
            input_data[1] = _sepal_width;
            input_data[2] = _petal_length;
            input_data[3] = _petal_width;
            TensorFloat tensor_float = TensorFloat.CreateFromArray(shape, input_data);

            // bind the tensor to "input"
            var binding = new LearningModelBinding(_session);

            binding.Bind("input", tensor_float);

            // evaluate
            var results = _session.Evaluate(binding, "");

            // get the results
            TensorFloat prediction      = (TensorFloat)results.Outputs.First().Value;
            var         prediction_data = prediction.GetAsVectorView();

            // find the highest predicted value
            int   max_index = 0;
            float max_value = 0;

            for (int i = 0; i < prediction_data.Count; i++)
            {
                var val = prediction_data.ElementAt(i);
                if (val > max_value)
                {
                    max_value = val;
                    max_index = i;
                }
            }

            // return the label corresponding to the highest predicted value
            return(_labels.ElementAt(max_index));
        }
Example #10
0
        public override List <ONNXTensor> Run(IEnumerable <string> outputs, Dictionary <string, ONNXTensor> feedDict)
        {
            var binding = new LearningModelBinding(sess);

            foreach (var item in feedDict)
            {
                object tensor;
                if (!IsFP16)
                {
                    tensor = TensorFloat.CreateFromArray(item.Value.Shape, item.Value.Buffer);
                    if (IsGPU)
                    {
                        //TODO: Move SoftwareTensor to DX12Tensor
                        tensor = MoveToGPU((TensorFloat)tensor);
                    }
                }
                else
                {
                    tensor = TensorFloat16Bit.CreateFromArray(item.Value.Shape, item.Value.Buffer);
                }
                binding.Bind(item.Key, tensor);
            }

            var result = sess.Evaluate(binding, $"eval{++evalCount}");

            var ret = new List <ONNXTensor>();

            foreach (var item in outputs)
            {
                var tensor = result.Outputs[item] as TensorFloat;
                var vector = tensor.GetAsVectorView().ToArray();
                ret.Add(new ONNXTensor()
                {
                    Buffer = vector, Shape = tensor.Shape.ToArray()
                });
            }

            return(ret);
        }
Example #11
0
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            var shape      = new long[] { 1, 1 };
            var modelInput = new taxiFarePredInput()
            {
                PassengerCount = TensorFloat.CreateFromArray(shape, new float[] { 1f }),
                TripTime       = TensorFloat.CreateFromArray(shape, new float[] { 1140f }),
                TripDistance   = TensorFloat.CreateFromArray(shape, new float[] { 3.75f }),
                FareAmount     = TensorFloat.CreateFromArray(shape, new float[] { 0f }),
            };


            var modelFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/taxiFarePred.onnx"));

            var session = await taxiFarePredModel.CreateFromStreamAsync(modelFile);

            var modelOutput = await session.EvaluateAsync(modelInput);

            var score = modelOutput.Score0.GetAsVectorView();

            btn1.Content = score[0];
        }
Example #12
0
        private async Task <List <float> > ArcFace(SoftwareBitmap softwareBitmap)
        {
            // Encapsulate the image within a VideoFrame to be bound and evaluated
            VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);

            int height = inputImage.SoftwareBitmap.PixelHeight;
            int width  = inputImage.SoftwareBitmap.PixelWidth;

            float[] data = new float[1 * 3 * ARC_FACE_INPUT * ARC_FACE_INPUT];

            byte[] imageBytes = new byte[4 * height * width];
            inputImage.SoftwareBitmap.CopyToBuffer(imageBytes.AsBuffer());

            int id = 0;

            for (int i = 0; i < data.Length; i += 4)
            {
                float blue  = (float)imageBytes[i];
                float green = (float)imageBytes[i + 1];
                float red   = (float)imageBytes[i + 2];

                data[id++] = blue;
                data[id++] = green;
                data[id++] = red;
            }

            _arcFaceInput.data = TensorFloat.CreateFromArray(new List <long> {
                1, 3, ARC_FACE_INPUT, ARC_FACE_INPUT
            }, data);

            // Process the frame with the model
            _arcFaceOutput = await _arcFaceModel.EvaluateAsync(_arcFaceInput);

            IReadOnlyList <float> vectorImage = _arcFaceOutput.fc1.GetAsVectorView();

            return(vectorImage.ToList());
        }
Example #13
0
        static async Task <int> Main(string[] args)
        {
            try
            {
                //
                // Parse options
                //

                Options = new AppOptions();
                Options.Parse(args);

                if (Options.ShowList)
                {
                }
                if (Options.Exit)
                {
                    return(-1);
                }
                if (string.IsNullOrEmpty(Options.FileName))
                {
                    throw new ApplicationException("Please use --file to specify which file to use");
                }


                //
                // Init module client
                //

                if (Options.UseEdge)
                {
                    Log.WriteLine($"{AppOptions.AppName} module starting.");
                    await BlockTimer("Initializing Azure IoT Edge", async() => await InitEdge());
                }

                cts = new CancellationTokenSource();
                AssemblyLoadContext.Default.Unloading += (ctx) => cts.Cancel();
                Console.CancelKeyPress += (sender, cpe) => cts.Cancel();


                //
                // Load model
                //

                MLModel model = null;
                Console.WriteLine($"Loading model from: '{Options.ModelPath}', Exists: '{File.Exists(Options.ModelPath)}'");
                await BlockTimer($"Loading modelfile '{Options.ModelPath}' on the {(Options.UseGpu ? "GPU" : "CPU")}",
                                 async() =>
                {
                    var d    = Directory.GetCurrentDirectory();
                    var path = d + "\\" + Options.ModelPath;

                    StorageFile modelFile = await AsAsync(StorageFile.GetFileFromPathAsync(path));
                    model = await MLModel.CreateFromStreamAsync(modelFile);
                });


                do
                {
                    //
                    // Open file
                    //
                    var rows = new List <DataRow>();
                    try
                    {
                        using (var fs = new StreamReader(Options.FileName))
                        {
                            // I just need this one line to load the records from the file in my List<CsvLine>
                            rows = new CsvHelper.CsvReader(fs).GetRecords <DataRow>().ToList();
                            Console.WriteLine($"Loaded {rows.Count} row(s)");
                        }
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine(e);
                    }
                    Console.WriteLine(rows);


                    //
                    // Main loop
                    //

                    foreach (var row in rows)
                    {
                        //
                        // Evaluate model
                        //

                        var inputShape = new long[2] {
                            1, 4
                        };
                        var inputFeatures = new float[4] {
                            row.Temperature, row.Pressure, row.Humidity, row.ExternalTemperature
                        };

                        MLModelVariable result    = null;
                        var             evalticks = await BlockTimer("Running the model",
                                                                     async() =>
                        {
                            result = await model.EvaluateAsync(new MLModelVariable()
                            {
                                Variable = TensorFloat.CreateFromArray(inputShape, inputFeatures)
                            });
                        });

                        //
                        // Print results
                        //

                        var message = new MessageBody
                        {
                            result = result.Variable.GetAsVectorView().First()
                        };
                        message.metrics.evaltimeinms = evalticks;
                        var json = JsonConvert.SerializeObject(message);
                        Log.WriteLineRaw($"Recognized {json}");

                        //
                        // Send results to Edge
                        //

                        if (Options.UseEdge)
                        {
                            var eventMessage = new Message(Encoding.UTF8.GetBytes(json));
                            await ioTHubModuleClient.SendEventAsync("resultsOutput", eventMessage);

                            // Let's not totally spam Edge :)
                            await Task.Delay(500);
                        }


                        Console.WriteLine("Waiting 1 second...");
                        Thread.Sleep(1000);
                    }
                }while (Options.RunForever && !cts.Token.IsCancellationRequested);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                return(-1);
            }

            return(0);
        }
Example #14
0
        static SoftwareBitmap GetMelspectrogramFromSignal(
            IEnumerable <float> rawSignal,
            int batchSize    = 1,
            int windowSize   = 256,
            int dftSize      = 256,
            int hopSize      = 3,
            int nMelBins     = 1024,
            int samplingRate = 8192,
            int amplitude    = 5000
            )
        {
            float[] signal = rawSignal.ToArray();

            //Scale the signal by a given amplitude
            for (int i = 0; i < signal.Length; i++)
            {
                signal[i] = signal[i] * amplitude;
            }

            int signalSize      = signal.Length;
            var nDFT            = 1 + (signalSize - dftSize) / hopSize;
            var onesidedDftSize = (dftSize >> 1) + 1;

            long[] signalShape         = { batchSize, signalSize };
            long[] melSpectrogramShape = { batchSize, 1, nDFT, nMelBins };

            var builder = LearningModelBuilder.Create(13)
                          .Inputs.Add(LearningModelBuilder.CreateTensorFeatureDescriptor("Input.TimeSignal", TensorKind.Float, signalShape))
                          .Outputs.Add(LearningModelBuilder.CreateTensorFeatureDescriptor("Output.MelSpectrogram", TensorKind.Float, melSpectrogramShape))
                          .Operators.Add(new Operator("HannWindow", MicrosoftExperimentalDomain)
                                         .SetConstant("size", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { windowSize }))
                                         .SetOutput("output", "hann_window"))
                          .Operators.Add(new Operator("STFT", MicrosoftExperimentalDomain)
                                         .SetName("STFT_NAMED_NODE")
                                         .SetInput("signal", "Input.TimeSignal")
                                         .SetInput("window", "hann_window")
                                         .SetConstant("frame_length", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { dftSize }))
                                         .SetConstant("frame_step", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { hopSize }))
                                         .SetOutput("output", "stft_output"))
                          .Operators.Add(new Operator("ReduceSumSquare")
                                         .SetInput("data", "stft_output")
                                         .SetAttribute("axes", TensorInt64Bit.CreateFromArray(new List <long>()
            {
                1
            }, new long[] { 3 }))
                                         .SetAttribute("keepdims", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { 0 }))
                                         .SetOutput("reduced", "magnitude_squared"))
                          .Operators.Add(new Operator("Div")
                                         .SetInput("A", "magnitude_squared")
                                         .SetConstant("B", TensorFloat.CreateFromArray(new List <long>(), new float[] { dftSize }))
                                         .SetOutput("C", "power_frames"))
                          .Operators.Add(new Operator("MelWeightMatrix", MicrosoftExperimentalDomain)
                                         .SetConstant("num_mel_bins", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { nMelBins }))
                                         .SetConstant("dft_length", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { dftSize }))
                                         .SetConstant("sample_rate", TensorInt64Bit.CreateFromArray(new List <long>(), new long[] { samplingRate }))
                                         .SetConstant("lower_edge_hertz", TensorFloat.CreateFromArray(new List <long>(), new float[] { 0 }))
                                         .SetConstant("upper_edge_hertz", TensorFloat.CreateFromArray(new List <long>(), new float[] { (float)(samplingRate / 2.0) }))
                                         .SetOutput("output", "mel_weight_matrix"))
                          .Operators.Add(new Operator("Reshape")
                                         .SetInput("data", "power_frames")
                                         .SetConstant("shape", TensorInt64Bit.CreateFromArray(new List <long>()
            {
                2
            }, new long[] { batchSize *nDFT, onesidedDftSize }))
                                         .SetOutput("reshaped", "reshaped_output"))
                          .Operators.Add(new Operator("MatMul")
                                         .SetInput("A", "reshaped_output")
                                         .SetInput("B", "mel_weight_matrix")
                                         .SetOutput("Y", "mel_spectrogram"))
                          .Operators.Add(new Operator("Reshape")
                                         .SetInput("data", "mel_spectrogram")
                                         .SetConstant("shape", TensorInt64Bit.CreateFromArray(new List <long>()
            {
                4
            }, melSpectrogramShape))
                                         .SetOutput("reshaped", "Output.MelSpectrogram"));

            var model = builder.CreateModel();

            LearningModelSession session = new LearningModelSession(model);
            LearningModelBinding binding = new LearningModelBinding(session);

            // Bind input
            binding.Bind("Input.TimeSignal", TensorFloat.CreateFromArray(signalShape, signal));

            // Bind output
            var outputImage = new VideoFrame(
                BitmapPixelFormat.Bgra8,
                nMelBins,
                nDFT);

            binding.Bind("Output.MelSpectrogram", outputImage);

            // Evaluate
            var sw     = Stopwatch.StartNew();
            var result = session.Evaluate(binding, "");

            sw.Stop();
            Console.WriteLine("Evaluate Took: %f\n", sw.ElapsedMilliseconds);

            return(outputImage.SoftwareBitmap);
        }
Example #15
0
        public TensorFloat grid; // shape(-1,125,13,13)

        public TinyYoloV1_2Output()
        {
            var tensorFloat = TensorFloat.CreateFromArray(new long[] { -1, 125, 13, 13 }, new float[21125]);

            grid = tensorFloat;
        }
Example #16
0
        /// <summary>
        /// タイマイベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void TimerCapFlame(object sender)
        {
            //複数スレッドでの同時実行を抑制
            if (!semaphore.Wait(0))
            {
                return;
            }
            else if (this.ModelGen == null)
            {
                semaphore.Release();
                return;
            }

            try
            {
                //AIモデルのインプットデータは解像度224x224,BGRA8にする必要がある。
                BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Bgra8;
                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, 640, 480, BitmapAlphaMode.Ignore))
                {
                    //フレームを取得
                    await this.mediaCapture.GetPreviewFrameAsync(previewFrame);

                    if (previewFrame != null)                           //フレームを正しく取得できた時
                    {
                        //モデルへのデータ入力クラスでインスタンスを作成する
                        var modelInput = new Input();

                        //SoftwareBitmapを作成
                        SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8, 224, 224, BitmapAlphaMode.Ignore);

                        //SoftwareBitmapでVideoFrameを作成する
                        VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer);

                        //キャプチャしたフレームを作成したVideoFrameへコピーする
                        await previewFrame.CopyToAsync(buffer);

                        //SoftwareBitmapを取得する(これはリサイズ済みになる)
                        SoftwareBitmap resizedBitmap = buffer.SoftwareBitmap;

                        //WritableBitmapへ変換する
                        WriteableBitmap innerBitmap = null;
                        byte[]          buf         = null;
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => {
                            innerBitmap = new WriteableBitmap(resizedBitmap.PixelWidth, resizedBitmap.PixelHeight);

                            resizedBitmap.CopyToBuffer(innerBitmap.PixelBuffer);
                            buf = new byte[innerBitmap.PixelBuffer.Length];
                            innerBitmap.PixelBuffer.CopyTo(buf);
                        });

                        //バッファへコピーする
                        //innerBitmap.PixelBuffer.CopyTo(buf);6
                        SoftwareBitmap sb = SoftwareBitmap.CreateCopyFromBuffer(buf.AsBuffer(), BitmapPixelFormat.Bgra8, 224, 224, BitmapAlphaMode.Ignore);

                        //取得画像をコントロールに表示する
                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                        {
                            var src = new SoftwareBitmapSource();
                            //await src.SetBitmapAsync(previewFrame.SoftwareBitmap);
                            await src.SetBitmapAsync(sb);
                            Image_CapImg.Source = src;
                        });

                        //画像のアルファチャンネル削除と配列形状変更
                        byte[] buf2 = ConvertImageaArray(buf);


                        //正規化しつつfloat配列に変換する
                        float[] inData = NormalizeImage(buf2);

                        //入力用のテンソルを作成(Windows.AI.MachineLearning.TensorFloatクラス)
                        TensorFloat tf =
                            TensorFloat.CreateFromArray(new long[] { 1, 3, 224, 224 }, inData);

                        //入力フォーマットに合わせたデータをセットする
                        Input indata = new Input();
                        indata.data     = tf;
                        modelInput.data = tf;

                        //AIモデルにデータを渡すと推定値の入ったリストが返る
                        //ModelOutput = await ModelGen.EvaluateAsync(modelInput);
                        var output = await ModelGen.EvaluateAsync(modelInput);

                        //UIスレッドに結果を表示
                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            //予測結果を表示
                            //string label = outputData.classLabel[0];
                            var result_vec = output.mobilenetv20_output_flatten0_reshape0.GetAsVectorView();
                            var list       = result_vec.ToArray <float>();
                            var max1st     = list.Max();
                            var index1st   = Array.IndexOf(list, max1st);                               //最大確立のインデックスを取得

                            string ans = classList.Classes[index1st].ToString();
                            //result = result + "Class: " + label + ", Prob: " + ModelOutput.prob_1[label];

                            //結果表示
                            this.Text_Result_1st.Text = ans + ":" + max1st.ToString("0.0");
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine("周期処理で例外発生");
                Debug.WriteLine(ex.ToString());
            }
            finally
            {
                semaphore.Release();
            }
        }