public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); var posture = session.GetService("ALRobotPosture"); motion["wakeUp"].Call(); posture["goToPosture"].Call("StandInit", 0.5f); motion["rest"].Call(); }
public IQiServiceProxy GetService(string name) { if (!IsConnected) { return(new NullQiServiceProxy()); } if (Services.ContainsKey(name)) { return(Services[name]); } try { //TODO: これさあ失敗したらどうなるんだっけか… var serviceModel = _session.GetService(name); var service = new QiServiceViewModel(this, serviceModel, name); _services[name] = service; return(Services[name]); } catch (InvalidOperationException) { return(new NullQiServiceProxy()); } }
static byte[] DownloadRawImage(QiSession session) { var vd = session.GetService("ALVideoDevice"); //忌々しいマジックナンバーを使っているが、パラメタについては //ALVideoDevice::subscribeのドキュメンテーションに載っているので参照されたく。 //http://doc.aldebaran.com/2-1/naoqi/vision/alvideodevice-api.html?highlight=alvideodevice#ALVideoDeviceProxy::subscribeCamera__ssCR.iCR.iCR.iCR.iCR string idName = (string)vd["subscribeCamera"].Call("mytestimage", //カメラ種類 0:正面, 1:下方, 2:深度 0, //解像度 1:320x240 1, //色空間 11が24bit RGBなので基本コレでいいがYUVの方が速度速そうみたいな話もあるので注意 11, //FPS: 1から30くらいまで 5 ); //画像がデータバッファに溜まるのを確実に待機 Task.Delay(500).Wait(); try { return(vd["getImageRemote"].Call(idName)[6].ToBytes()); } finally { vd["unsubscribe"].Call(idName); } }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); var sonar = session.GetService("ALSonar"); //NOTE: PythonサンプルにはFSR(力センサ)の話題も載ってるがだいたい同じなので省略 Console.WriteLine("First: read IMU values\n"); var results = new Dictionary <string, string>() { { "GyrX", "Device/SubDeviceList/InertialSensor/GyrX/Sensor/Value" }, { "GyrY", "Device/SubDeviceList/InertialSensor/GyrY/Sensor/Value" }, { "AccX", "Device/SubDeviceList/InertialSensor/AccX/Sensor/Value" }, { "AccY", "Device/SubDeviceList/InertialSensor/AccY/Sensor/Value" }, { "AccZ", "Device/SubDeviceList/InertialSensor/AccZ/Sensor/Value" }, { "TorsoAngleX", "Device/SubDeviceList/InertialSensor/AngleX/Sensor/Value" }, { "TorsoAngleY", "Device/SubDeviceList/InertialSensor/AngleY/Sensor/Value" } }.Select(p => $"key={p.Key}, value={mem["getData"].Call(p.Value).Dump()}"); foreach (var r in results) { //Dynamicのアンパックとかあると見た目的に見づらいのでそこを対策 Console.WriteLine(r.Replace("\n", "")); } Console.WriteLine("Second: read sonar values\n"); sonar["subscribe"].Call("MySampleApplication"); Console.WriteLine( "Left: {0}", mem["getData"].Call("Device/SubDeviceList/US/Left/Sensor/Value").Dump() ); Console.WriteLine( "Right: {0}", mem["getData"].Call("Device/SubDeviceList/US/Right/Sensor/Value").Dump() ); sonar["unsubscribe"].Call("MySampleApplication"); }
public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); motion["stiffnessInterpolation"].Call("Body", 1.0f, 1.0f); Console.WriteLine((string)motion["getSummary"].Call()); //なぜか知らないが状態が落ち着くまで待つらしい? Task.Delay(2000).Wait(); motion["rest"].Call(); }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); //人の会話 //var subscriberHumanSpeech = mem.CallObject("subscriber", new QiString("Dialog/LastInput")); var subscriberHumanSpeech = mem["subscriber"].CallObject("Dialog/LastInput"); ulong idHumanSpeech = subscriberHumanSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { Console.WriteLine($"Human: {qv[0].ToString()}"); } else { Console.WriteLine("Human: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); //ロボットの発話 var subscriberRobotSpeech = mem["subscriber"].CallObject("ALTextToSpeech/CurrentSentence"); ulong idRobotSpeech = subscriberRobotSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { string sentence = qv[0].ToString(); if (!string.IsNullOrWhiteSpace(sentence)) { Console.WriteLine($"Robot: {sentence}"); } } else { Console.WriteLine("Robot: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); Console.WriteLine("Press ENTER to quit logging results."); Console.ReadLine(); subscriberHumanSpeech.DisconnectSignal(idHumanSpeech).Wait(); subscriberRobotSpeech.DisconnectSignal(idRobotSpeech).Wait(); }
public static void Execute(QiSession session) { var recorder = session.GetService("ALVideoRecorder"); string savePath = "/home/nao/recordings/cameras"; //Motion JPEGのaviにするパターン recorder["setResolution"].Call(1); recorder["setFramerate"].Call(10); recorder["setVideoFormat"].Call("MJPG"); recorder["startRecording"].Call(savePath, "myvideo"); Task.Delay(5000).Wait(); recorder["stopRecording"].Call(); Console.WriteLine($"If test succeeded, video was saved in {savePath} on the robot"); }
public override void OnXboxInputUpdate(XboxControllerEventData eventData){ if (string.IsNullOrEmpty(GamePadName)){ Debug.LogFormat("Joystick {0} with id: \"{1}\" Connected", eventData.GamePadName, eventData.SourceId); } base.OnXboxInputUpdate(eventData); /*get information from xbox controller*/ if(_session.IsConnected){ if (eventData.XboxLeftStickHorizontalAxis != 0 || eventData.XboxLeftStickVerticalAxis != 0){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(eventData.XboxLeftStickHorizontalAxis * move_scalefactor, eventData.XboxLeftStickVerticalAxis * (-1) * move_scalefactor, 0f); } if (eventData.XboxLeftBumper_Pressed){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(0f, 0f, rotation_scalefactor); } else if (eventData.XboxRightBumper_Pressed){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(0f, 0f, (-1) * rotation_scalefactor); } if (eventData.XboxB_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ var motion = _session.GetService("ALMotion"); motion["setAngles"].Call("HeadYaw", angle, 0f); } first_buttonpressed = Time.time; } if (eventData.XboxX_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ if (pepperIP == "192.168.10.51"){ _session.Close(); _session.Destroy(); pepperIP = "192.168.10.48" _session = QiSession.Create(tcpPrefix + pepperIP + portSuffix); }else{ _session.Close(); _session.Destroy(); pepperIP = "192.168.10.51" _session = QiSession.Create(tcpPrefix + pepperIP + portSuffix); } } first_buttonpressed = Time.time; } //another robot (estimated) }else if(session_robot.isConnected){ if (eventData.XboxLeftStickHorizontalAxis != 0 || eventData.XboxLeftStickVerticalAxis != 0){ CallRobotsAPI_move(eventData.XboxLeftStickHorizontalAxis * move_scalefactor_robot, eventData.XboxLeftStickVerticalAxis * (-1) * move_scalefactor_robot, 0f); } if (eventData.XboxLeftBumper_Pressed){ CallRobotsAPI_rotate(0f, 0f, rotation_scalefactor); } else if (eventData.XboxRightBumper_Pressed){ CallRobotsAPI_rotate(0f, 0f, (-1) * rotation_scalefactor); } if (eventData.XboxB_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ CallRobotsAPI_rotate(0f, 0f, 0f); } first_buttonpressed = Time.time; } if (eventData.XboxX_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ if (robotIP == "192.168.10.53"){ session_robot.Close(); session_robot.Destroy(); robotIP = "192.168.10.54" session_robot = RobotSession.Create(tcpPrefix + robotIP + portSuffix); }else{ session_robot.Close(); session_robot.Destroy(); robotIP = "192.168.10.53" session_robot = RobotSession.Create(tcpPrefix + robotIP + portSuffix); } } first_buttonpressed = Time.time; } } }
public static void Execute(QiSession session) { string serviceName = "CSharpSoundDownloaderSpare"; var audioDevice = session.GetService("ALAudioDevice"); var waveIn = new WaveInEvent(); #region 1/4: ロボットへ音を投げる方の仕込み //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意 audioDevice["setParameter"].Call("outputSampleRate", 16000); //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。 //バッファ長は16384を超えてはいけない点に注意 //(詳細は公式ドキュメンテーション参照) waveIn.BufferMilliseconds = 200; //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる waveIn.WaveFormat = new WaveFormat(16000, 16, 2); int count = 0; waveIn.DataAvailable += (_, e) => { if (e.BytesRecorded > 16384) { return; } byte[] bufferToSend = new byte[e.BytesRecorded]; Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded); int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend); Console.WriteLine($"received data, {count}"); count++; }; #endregion #region 2/4 ロボットから音を拾う, 再生デバイス準備 var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1)); var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200); wavPlayer.Init(new VolumeWaveProvider16(wavProvider)); wavPlayer.Play(); #endregion #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る var objBuilder = QiObjectBuilder.Create(); //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応 objBuilder.AdvertiseMethod( "processRemote::v(iimm)", (sig, arg) => { //ここで処理 //Console.WriteLine("Received Buffer!"); //Console.WriteLine(arg.Dump()); //データの内容については上記のダンプを行うことである程度確認可能 byte[] raw = arg[3].ToBytes(); wavProvider.AddSamples(raw, 0, raw.Length); return(QiValue.Void); }); //上記のコールバック取得用サービスを登録 session.Listen("tcp://0.0.0.0:0").Wait(); ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL); #endregion #region 4/4 設定を調整して実際に入出力を行う //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0); //開始 audioDevice["subscribe"].Call(serviceName); waveIn.StartRecording(); #endregion Console.WriteLine("Press ENTER to quit.."); Console.ReadLine(); audioDevice["unsubscribe"].Call(serviceName); session.UnregisterService((uint)registeredId); wavPlayer.Stop(); wavPlayer.Dispose(); waveIn.StopRecording(); waveIn.Dispose(); }