static byte[] DownloadRawImage(QiSession session) { var vd = session.GetService("ALVideoDevice"); //忌々しいマジックナンバーを使っているが、パラメタについては //ALVideoDevice::subscribeのドキュメンテーションに載っているので参照されたく。 //http://doc.aldebaran.com/2-1/naoqi/vision/alvideodevice-api.html?highlight=alvideodevice#ALVideoDeviceProxy::subscribeCamera__ssCR.iCR.iCR.iCR.iCR string idName = (string)vd["subscribeCamera"].Call("mytestimage", //カメラ種類 0:正面, 1:下方, 2:深度 0, //解像度 1:320x240 1, //色空間 11が24bit RGBなので基本コレでいいがYUVの方が速度速そうみたいな話もあるので注意 11, //FPS: 1から30くらいまで 5 ); //画像がデータバッファに溜まるのを確実に待機 Task.Delay(500).Wait(); try { return vd["getImageRemote"].Call(idName)[6].ToBytes(); } finally { vd["unsubscribe"].Call(idName); } }
public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); var posture = session.GetService("ALRobotPosture"); motion["wakeUp"].Call(); posture["goToPosture"].Call("StandInit", 0.5f); motion["rest"].Call(); }
public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); motion["stiffnessInterpolation"].Call("Body", 1.0f, 1.0f); Console.WriteLine((string)motion["getSummary"].Call()); //なぜか知らないが状態が落ち着くまで待つらしい? Task.Delay(2000).Wait(); motion["rest"].Call(); }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); //人の会話 //var subscriberHumanSpeech = mem.CallObject("subscriber", new QiString("Dialog/LastInput")); var subscriberHumanSpeech = mem["subscriber"].CallObject("Dialog/LastInput"); ulong idHumanSpeech = subscriberHumanSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { Console.WriteLine($"Human: {qv[0].ToString()}"); } else { Console.WriteLine("Human: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); //ロボットの発話 var subscriberRobotSpeech = mem["subscriber"].CallObject("ALTextToSpeech/CurrentSentence"); ulong idRobotSpeech = subscriberRobotSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { string sentence = qv[0].ToString(); if(!string.IsNullOrWhiteSpace(sentence)) { Console.WriteLine($"Robot: {sentence}"); } } else { Console.WriteLine("Robot: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); Console.WriteLine("Press ENTER to quit logging results."); Console.ReadLine(); subscriberHumanSpeech.DisconnectSignal(idHumanSpeech).Wait(); subscriberRobotSpeech.DisconnectSignal(idRobotSpeech).Wait(); }
public static void Execute(QiSession session) { var recorder = session.GetService("ALVideoRecorder"); string savePath = "/home/nao/recordings/cameras"; //Motion JPEGのaviにするパターン recorder["setResolution"].Call(1); recorder["setFramerate"].Call(10); recorder["setVideoFormat"].Call("MJPG"); recorder["startRecording"].Call(savePath, "myvideo"); Task.Delay(5000).Wait(); recorder["stopRecording"].Call(); Console.WriteLine($"If test succeeded, video was saved in {savePath} on the robot"); }
public static void Execute(QiSession session) { byte[] rawImageData = DownloadRawImage(session); Console.WriteLine($"raw data length = {rawImageData.Length}"); try { int height = 240; int width = 320; int channel = 3; var img = GetImageFromRawData(rawImageData, width, height, channel); img.Save("result.png", ImageFormat.Png); Console.WriteLine("Front camera's image was saved to working directory with name 'result.png'"); } catch (InvalidOperationException ex) { Console.WriteLine("Failed to get image: " + ex.Message); } }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); var sonar = session.GetService("ALSonar"); //NOTE: PythonサンプルにはFSR(力センサ)の話題も載ってるがだいたい同じなので省略 Console.WriteLine("First: read IMU values\n"); var results = new Dictionary<string, string>() { { "GyrX", "Device/SubDeviceList/InertialSensor/GyrX/Sensor/Value" }, { "GyrY", "Device/SubDeviceList/InertialSensor/GyrY/Sensor/Value" }, { "AccX", "Device/SubDeviceList/InertialSensor/AccX/Sensor/Value" }, { "AccY", "Device/SubDeviceList/InertialSensor/AccY/Sensor/Value" }, { "AccZ", "Device/SubDeviceList/InertialSensor/AccZ/Sensor/Value" }, { "TorsoAngleX", "Device/SubDeviceList/InertialSensor/AngleX/Sensor/Value" }, { "TorsoAngleY", "Device/SubDeviceList/InertialSensor/AngleY/Sensor/Value" } }.Select(p => $"key={p.Key}, value={mem["getData"].Call(p.Value).Dump()}"); foreach (var r in results) { //Dynamicのアンパックとかあると見た目的に見づらいのでそこを対策 Console.WriteLine(r.Replace("\n", "")); } Console.WriteLine("Second: read sonar values\n"); sonar["subscribe"].Call("MySampleApplication"); Console.WriteLine( "Left: {0}", mem["getData"].Call("Device/SubDeviceList/US/Left/Sensor/Value").Dump() ); Console.WriteLine( "Right: {0}", mem["getData"].Call("Device/SubDeviceList/US/Right/Sensor/Value").Dump() ); sonar["unsubscribe"].Call("MySampleApplication"); }
public static void Execute(QiSession session) { string serviceName = "CSharpSoundDownloaderSpare"; var audioDevice = session.GetService("ALAudioDevice"); var waveIn = new WaveInEvent(); #region 1/4: ロボットへ音を投げる方の仕込み //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意 audioDevice["setParameter"].Call("outputSampleRate", 16000); //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。 //バッファ長は16384を超えてはいけない点に注意 //(詳細は公式ドキュメンテーション参照) waveIn.BufferMilliseconds = 200; //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる waveIn.WaveFormat = new WaveFormat(16000, 16, 2); int count = 0; waveIn.DataAvailable += (_, e) => { if (e.BytesRecorded > 16384) return; byte[] bufferToSend = new byte[e.BytesRecorded]; Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded); int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend); Console.WriteLine($"received data, {count}"); count++; }; #endregion #region 2/4 ロボットから音を拾う, 再生デバイス準備 var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1)); var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200); wavPlayer.Init(new VolumeWaveProvider16(wavProvider)); wavPlayer.Play(); #endregion #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る var objBuilder = QiObjectBuilder.Create(); //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応 objBuilder.AdvertiseMethod( "processRemote::v(iimm)", (sig, arg) => { //ここで処理 //Console.WriteLine("Received Buffer!"); //Console.WriteLine(arg.Dump()); //データの内容については上記のダンプを行うことである程度確認可能 byte[] raw = arg[3].ToBytes(); wavProvider.AddSamples(raw, 0, raw.Length); return QiValue.Void; }); //上記のコールバック取得用サービスを登録 session.Listen("tcp://0.0.0.0:0").Wait(); ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL); #endregion #region 4/4 設定を調整して実際に入出力を行う //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0); //開始 audioDevice["subscribe"].Call(serviceName); waveIn.StartRecording(); #endregion Console.WriteLine("Press ENTER to quit.."); Console.ReadLine(); audioDevice["unsubscribe"].Call(serviceName); session.UnregisterService((uint)registeredId); wavPlayer.Stop(); wavPlayer.Dispose(); waveIn.StopRecording(); waveIn.Dispose(); }