/// <summary> /// Process Face Recognition events /// Should only be called if FaceRec is on /// </summary> /// <param name="faceRecEvent"></param> private async void ProcessFaceRecognitionEvent(IFaceRecognitionEvent faceRecEvent) { try { string label = faceRecEvent.Label.ToLower(); ITakePictureResponse takePictureResponse = await _misty.TakePictureAsync(label, false, true, true, null, null); if (label == "unknown person") { Stream stream = new MemoryStream((byte[])takePictureResponse.Data.Image); string description = await _azureCognitive.AnalyzeImage(stream); BroadcastDetails($"I see a person, {(string.IsNullOrWhiteSpace(description) ? "but I cannot describe them." : description)}", _defaultVoice); } else { BroadcastDetails($"Hello, {label}!", _defaultVoice); } //Wait a bit so we don't flood with face events, how about 10 seconds? _misty.Wait(10000); _misty.RegisterFaceRecognitionEvent(ProcessFaceRecognitionEvent, 100, false, null, null, null); _assetWrapper.ShowSystemImage(SystemImage.Amazement); } catch (Exception ex) { _misty.SkillLogger.Log("Failed to process the face recognition event.", ex); } }
public async Task TakePictureAsync(string fileName) { try { ITakePictureResponse response = await _misty.TakePictureAsync("sadf", false, false, true, 640, 480); if (response.Status == MistyRobotics.Common.Types.ResponseStatus.Success) { StorageFolder sdkFolder = await StorageFolder.GetFolderFromPathAsync(@"c:\Data\Misty\SDK"); StorageFolder folder = null; if (await sdkFolder.TryGetItemAsync("Images") == null) { folder = await sdkFolder.CreateFolderAsync("Images"); } else { folder = await sdkFolder.GetFolderAsync("Images"); } IBuffer buff = WindowsRuntimeBufferExtensions.AsBuffer(response.Data.Image.ToArray()); InMemoryRandomAccessStream ms = new InMemoryRandomAccessStream(); await ms.WriteAsync(buff); BitmapDecoder decoder = await BitmapDecoder.CreateAsync(ms); SoftwareBitmap softwareBitmap = await decoder.GetSoftwareBitmapAsync(); StorageFile file = await folder.CreateFileAsync(fileName); using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.ReadWrite)) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream); encoder.SetSoftwareBitmap(softwareBitmap); await encoder.FlushAsync(); } } } catch (Exception ex) { LogMessage("Failed to save picture: " + ex.Message); } }
async void RunCustomVision() { try { _misty.SkillLogger.Log("Taking picture to analyze"); _misty.SendDebugMessage("Taking picture to analyze", null); ITakePictureResponse takePictureResponse = await _misty.TakePictureAsync("oretest.jpg", false, true, true, 640, 480); _misty.SendDebugMessage("Picture taken", null); SoftwareBitmap softwareBitmap; using (IRandomAccessStream stream = new MemoryStream((byte[])takePictureResponse.Data.Image).AsRandomAccessStream()) { stream.Seek(0); // Create the decoder from the stream BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream); // Get the SoftwareBitmap representation of the file in BGRA8 format softwareBitmap = await decoder.GetSoftwareBitmapAsync(); softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied); } // Encapsulate the image in the WinML image type (VideoFrame) to be bound and evaluated VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap); _misty.SendDebugMessage("Picture processed, sending to model", null); // Evaluate the image OnnxModelOutput output = await EvaluateVideoFrameAsync(inputImage); _misty.SendDebugMessage("Model finished eval", null); await _misty.DisplayImageAsync("e_DefaultContent.jpg", 100); if (output == null) { _misty.SendDebugMessage("Model output empty", null); _misty.ChangeLED(0, 0, 0, OnResponse); alreadyRunning = false; } else { int vectorCount = output.detected_classes.GetAsVectorView().Count; double initialScore = output.detected_scores.GetAsVectorView()[0]; long initialClass = output.detected_classes.GetAsVectorView()[0]; if (vectorCount == 0 || initialScore < 0.25) { _misty.ChangeLED(0, 0, 0, OnResponse); alreadyRunning = false; } else if (initialClass == 1 && initialScore >= 0.25) { _misty.ChangeLED(255, 0, 0, OnResponse); _misty.RunSkill("e1fcbf5b-9163-4d09-8707-bffd00ddcd5d", null, null); alreadyRunning = false; } else if (initialClass == 0 && initialScore >= 0.25) { _misty.ChangeLED(0, 0, 255, OnResponse); //Say found Ore //_misty.RunSkill("a61832ab-6bc1-4f1a-9de1-0d1dc8bf3ff0", null, null); var data = new StringContent("{ \"text\":\"Ore Found!\",\"pitch\":0,\"speechRate\":0,\"voice\":null,\"flush\":false,\"utteranceId\":null }", Encoding.UTF8, "application/json"); HttpResponseMessage result = await client.PostAsync("http://127.0.0.1/api/tts/speak?text=Ore Found!&pitch=0&speechRate=0&flush=false", data); double calcTrajectory = yaw.getYaw() + (25 * (((output.detected_boxes.GetAsVectorView()[0] + output.detected_boxes.GetAsVectorView()[2]) / 2) - 0.5) * -1); await _misty.SendDebugMessageAsync("Trajectory: " + calcTrajectory); //Take the current yaw of the robot and then add the box X axis percentage //The 20 number is approximately how many degrees you have to rotate to go from edge to center of the camera if (calcTrajectory > yaw.getYaw()) { await _misty.DriveAsync(0, 5); } else { await _misty.DriveAsync(0, -5); } //data = new StringContent("{ \"heading\":" + calcTrajectory.ToString() + ",\"radius\":0,\"timeMs\":3000,\"reverse\":false }", Encoding.UTF8, "application/json"); //result = await client.PostAsync("http://127.0.0.1/api/drive/arc", data); yaw.setTargetYaw(calcTrajectory); yaw.YawReached += HandleYawReached; calcTrajectory = _currentHeadPitch + (80 * (((output.detected_boxes.GetAsVectorView()[1] + output.detected_boxes.GetAsVectorView()[3]) / 2) - 0.5)); await _misty.MoveHeadAsync(calcTrajectory, 0, 0, 100, AngularUnit.Degrees); //_misty.DriveArc(calcTrajectory, 0.2, 2000, false, null); //357.47 deg 50% at 2sec = 341.88 16 degree 342.46 } } } catch (Exception ex) { alreadyRunning = false; _misty.SendDebugMessage($"error: {ex.Message}", null); _misty.SendDebugMessage("Picture processing failed", null); } }