コード例 #1
0
ファイル: MistySkill.cs プロジェクト: cbattlegear/MistyMiner
        private async Task <OnnxModelOutput> EvaluateAsyncHelper(OnnxModelInput input)
        {
            binding.Bind("image_tensor", input.data);
            var result = await session.EvaluateAsync(binding, string.Empty);

            OnnxModelOutput output = new OnnxModelOutput();

            output.detected_boxes   = result.Outputs["detected_boxes"] as TensorFloat;
            output.detected_classes = result.Outputs["detected_classes"] as TensorInt64Bit;
            output.detected_scores  = result.Outputs["detected_scores"] as TensorFloat;

            return(output);
        }
コード例 #2
0
ファイル: MistySkill.cs プロジェクト: cbattlegear/MistyMiner
        async void RunCustomVision()
        {
            try
            {
                _misty.SkillLogger.Log("Taking picture to analyze");
                _misty.SendDebugMessage("Taking picture to analyze", null);
                ITakePictureResponse takePictureResponse = await _misty.TakePictureAsync("oretest.jpg", false, true, true, 640, 480);

                _misty.SendDebugMessage("Picture taken", null);
                SoftwareBitmap softwareBitmap;
                using (IRandomAccessStream stream = new MemoryStream((byte[])takePictureResponse.Data.Image).AsRandomAccessStream())
                {
                    stream.Seek(0);
                    // Create the decoder from the stream
                    BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                    // Get the SoftwareBitmap representation of the file in BGRA8 format
                    softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                    softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                }

                // Encapsulate the image in the WinML image type (VideoFrame) to be bound and evaluated
                VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
                _misty.SendDebugMessage("Picture processed, sending to model", null);

                // Evaluate the image
                OnnxModelOutput output = await EvaluateVideoFrameAsync(inputImage);

                _misty.SendDebugMessage("Model finished eval", null);

                await _misty.DisplayImageAsync("e_DefaultContent.jpg", 100);

                if (output == null)
                {
                    _misty.SendDebugMessage("Model output empty", null);
                    _misty.ChangeLED(0, 0, 0, OnResponse);
                    alreadyRunning = false;
                }
                else
                {
                    int    vectorCount  = output.detected_classes.GetAsVectorView().Count;
                    double initialScore = output.detected_scores.GetAsVectorView()[0];
                    long   initialClass = output.detected_classes.GetAsVectorView()[0];

                    if (vectorCount == 0 || initialScore < 0.25)
                    {
                        _misty.ChangeLED(0, 0, 0, OnResponse);
                        alreadyRunning = false;
                    }
                    else if (initialClass == 1 && initialScore >= 0.25)
                    {
                        _misty.ChangeLED(255, 0, 0, OnResponse);
                        _misty.RunSkill("e1fcbf5b-9163-4d09-8707-bffd00ddcd5d", null, null);
                        alreadyRunning = false;
                    }
                    else if (initialClass == 0 && initialScore >= 0.25)
                    {
                        _misty.ChangeLED(0, 0, 255, OnResponse);
                        //Say found Ore
                        //_misty.RunSkill("a61832ab-6bc1-4f1a-9de1-0d1dc8bf3ff0", null, null);

                        var data = new StringContent("{ \"text\":\"Ore Found!\",\"pitch\":0,\"speechRate\":0,\"voice\":null,\"flush\":false,\"utteranceId\":null }", Encoding.UTF8, "application/json");
                        HttpResponseMessage result = await client.PostAsync("http://127.0.0.1/api/tts/speak?text=Ore Found!&pitch=0&speechRate=0&flush=false", data);

                        double calcTrajectory = yaw.getYaw() + (25 * (((output.detected_boxes.GetAsVectorView()[0] + output.detected_boxes.GetAsVectorView()[2]) / 2) - 0.5) * -1);

                        await _misty.SendDebugMessageAsync("Trajectory: " + calcTrajectory);

                        //Take the current yaw of the robot and then add the box X axis percentage
                        //The 20 number is approximately how many degrees you have to rotate to go from edge to center of the camera

                        if (calcTrajectory > yaw.getYaw())
                        {
                            await _misty.DriveAsync(0, 5);
                        }
                        else
                        {
                            await _misty.DriveAsync(0, -5);
                        }


                        //data = new StringContent("{ \"heading\":" + calcTrajectory.ToString() + ",\"radius\":0,\"timeMs\":3000,\"reverse\":false }", Encoding.UTF8, "application/json");
                        //result = await client.PostAsync("http://127.0.0.1/api/drive/arc", data);

                        yaw.setTargetYaw(calcTrajectory);

                        yaw.YawReached += HandleYawReached;

                        calcTrajectory = _currentHeadPitch + (80 * (((output.detected_boxes.GetAsVectorView()[1] + output.detected_boxes.GetAsVectorView()[3]) / 2) - 0.5));

                        await _misty.MoveHeadAsync(calcTrajectory, 0, 0, 100, AngularUnit.Degrees);

                        //_misty.DriveArc(calcTrajectory, 0.2, 2000, false, null);


                        //357.47 deg 50% at 2sec = 341.88 16 degree 342.46
                    }
                }
            }
            catch (Exception ex)
            {
                alreadyRunning = false;
                _misty.SendDebugMessage($"error: {ex.Message}", null);
                _misty.SendDebugMessage("Picture processing failed", null);
            }
        }