static void Main(string[] args) { //作業ディレクトリだけでなく"dlls"というフォルダのライブラリも実行中に参照できるよう設定を変更 PathModifier.AddEnvironmentPath("dlls", PathModifyMode.RelativeToEntryAssembly); //HelloWorldの対象とするマシンのアドレスをIPとポート(ポートは通常9559)で指定 string address = "tcp://127.0.0.1:9559"; var session = QiSession.Create(address); Console.WriteLine($"Connected? {session.IsConnected}"); if (!session.IsConnected) { Console.WriteLine("end program because there is no connection"); return; } //最も基本的なモジュールの一つとして合成音声のモジュールを取得 var tts = session.GetService("ALTextToSpeech"); //"say"関数に文字列引数を指定して実行 tts["say"].Call("this is test"); session.Close(); session.Destroy(); }
static byte[] DownloadRawImage(QiSession session) { var vd = session.GetService("ALVideoDevice"); //忌々しいマジックナンバーを使っているが、パラメタについては //ALVideoDevice::subscribeのドキュメンテーションに載っているので参照されたく。 //http://doc.aldebaran.com/2-1/naoqi/vision/alvideodevice-api.html?highlight=alvideodevice#ALVideoDeviceProxy::subscribeCamera__ssCR.iCR.iCR.iCR.iCR string idName = (string)vd["subscribeCamera"].Call("mytestimage", //カメラ種類 0:正面, 1:下方, 2:深度 0, //解像度 1:320x240 1, //色空間 11が24bit RGBなので基本コレでいいがYUVの方が速度速そうみたいな話もあるので注意 11, //FPS: 1から30くらいまで 5 ); //画像がデータバッファに溜まるのを確実に待機 Task.Delay(500).Wait(); try { return(vd["getImageRemote"].Call(idName)[6].ToBytes()); } finally { vd["unsubscribe"].Call(idName); } }
/// <summary>指定したアドレスへ接続を試みる。</summary> /// <param name="address">接続先(例:"127.0.0.1", "tcp://pepper.local", "tcp://192.168.3.3:9559")</param> /// <returns>接続成功した場合はtrue</returns> public async Task <TryConnectResult> TryConnectAsync(string address) { //未初期化対策 if (!IsInitialized) { Initialize(); } //アドレスを正規化 string normalizedAddress = GetNormalizedUrl(address); if (string.IsNullOrEmpty(normalizedAddress)) { return(new TryConnectResult(false, null)); } return(await Task.Run(() => { //ここが重い var session = QiSession.Create(normalizedAddress); if (!session.IsConnected) { return new TryConnectResult(false, null); } return new TryConnectResult(true, new QiSessionViewModel(session)); })); }
void OnDestroy() { if (_session != null && _session.IsConnected) { _session.Close(); _session = null; } }
public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); var posture = session.GetService("ALRobotPosture"); motion["wakeUp"].Call(); posture["goToPosture"].Call("StandInit", 0.5f); motion["rest"].Call(); }
void Start() { //_session = new QiSessionFactory().CreateLibqiSession(); _session = new QiSessionFactory().CreateSocketIoSession(); if (audioSourceToPlayPepperSound == null) { audioSourceToPlayPepperSound = GetComponent <AudioSource>(); } }
public static void Execute(QiSession session) { var motion = session.GetService("ALMotion"); motion["stiffnessInterpolation"].Call("Body", 1.0f, 1.0f); Console.WriteLine((string)motion["getSummary"].Call()); //なぜか知らないが状態が落ち着くまで待つらしい? Task.Delay(2000).Wait(); motion["rest"].Call(); }
static void Main(string[] args) { //作業ディレクトリだけでなく"dlls"というフォルダのライブラリも実行中に参照できるよう設定を変更 PathModifier.AddEnvironmentPaths(Path.Combine(Environment.CurrentDirectory, "dlls")); //HelloWorldの対象とするマシンのアドレスをIPとポート(ポートは通常9559)で指定 string address = "tcp://xxx.xxx.xxx.xxx:9559"; var session = QiSession.Create(address); //ALAnimatedSpeech::sayに渡す典型的なannotated text var text = new QiString( "^start(animation/Stand/Gestures/Hey_1) " + "Hello, this is a typical sample sentence for animated say module, to check some autonomous motion suited for the conversation " + "^wait(animation/Stand/Gestures/Hey_1)" ); //AnimatedSay::sayの第二引数に渡す、モーションのモードを表す単一ペアの辞書 var config = QiMap <QiString, QiString> .Create(new[] { new KeyValuePair <QiString, QiString>( new QiString("bodyLanguageMode"), new QiString("contextual") ) }); var animatedSayArgs = QiTuple.Create(text, config); Debug.WriteLine(animatedSayArgs.Signature); Debug.WriteLine(animatedSayArgs.Dump()); var animatedSpeech = session.GetService("ALAnimatedSpeech"); //1. CallメソッドはQiMap型のシグネチャをハンドリングできない(.NETラッパー側の不備)ので // 低レイヤ呼び出しのCallDirectを用いる //2. "(sm)"はqi Frameworkの型を表す文字列で、 // 関数の引数に文字列("s")と、動的型("m")からなるタプル(全体を囲うカッコ)であることを示す // 詳細はは公式の型-文字列対応表(http://doc.aldebaran.com/libqi/api/cpp/type/signature.html)などを参照 //3. CallDirectの戻り値は非同期形式(QiFuture)なのでGetValue()での結果取得により明示的にブロックして同期処理化 // (非同期で使いたければGetValue()を後で呼ぶ) var res = animatedSpeech .CallDirect("say::(sm)", animatedSayArgs.QiValue) .GetValue(); //Void型が返却されるのを確認 Debug.WriteLine(res.ContentValueKind); }
private void InitializeServices(QiSession session) { _tts = ALTextToSpeech.CreateService(session); _motion = ALMotion.CreateService(session); _memory = ALMemory.CreateService(session); _leds = ALLeds.CreateService(session); try { _audioDevice = ALAudioDevice.CreateService(session); } catch (InvalidOperationException) { Debug.Log("Failed to get ALAudioDevice: Service is not on the robot. Are you use a simulator?"); } }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); //人の会話 //var subscriberHumanSpeech = mem.CallObject("subscriber", new QiString("Dialog/LastInput")); var subscriberHumanSpeech = mem["subscriber"].CallObject("Dialog/LastInput"); ulong idHumanSpeech = subscriberHumanSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { Console.WriteLine($"Human: {qv[0].ToString()}"); } else { Console.WriteLine("Human: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); //ロボットの発話 var subscriberRobotSpeech = mem["subscriber"].CallObject("ALTextToSpeech/CurrentSentence"); ulong idRobotSpeech = subscriberRobotSpeech.ConnectSignal("signal", qv => { if (qv.Count > 0 && qv[0].ContentValueKind == QiValueKind.QiString) { string sentence = qv[0].ToString(); if (!string.IsNullOrWhiteSpace(sentence)) { Console.WriteLine($"Robot: {sentence}"); } } else { Console.WriteLine("Robot: Received unexpected data"); Console.WriteLine(qv.Dump()); } }); Console.WriteLine("Press ENTER to quit logging results."); Console.ReadLine(); subscriberHumanSpeech.DisconnectSignal(idHumanSpeech).Wait(); subscriberRobotSpeech.DisconnectSignal(idRobotSpeech).Wait(); }
public static void Execute(QiSession session) { var recorder = session.GetService("ALVideoRecorder"); string savePath = "/home/nao/recordings/cameras"; //Motion JPEGのaviにするパターン recorder["setResolution"].Call(1); recorder["setFramerate"].Call(10); recorder["setVideoFormat"].Call("MJPG"); recorder["startRecording"].Call(savePath, "myvideo"); Task.Delay(5000).Wait(); recorder["stopRecording"].Call(); Console.WriteLine($"If test succeeded, video was saved in {savePath} on the robot"); }
void Start(){ initialPosition = transform.position; //Pepper if(!string.IsNullOrEmpty(pepperIP)){ _session = QiSession.Create(tcpPrefix + pepperIP + portSuffix); if (!_session.IsConnected){ Debug.Log("Failed to establish connection"); return; } //another robot (estimated) }else if(!string.IsNullOrEmpty(robotIP)){ session_robot = RobotSession.Create(tcpPrefix + robotIP + portSuffix); if (!_session.IsConnected){ Debug.Log("Failed to establish connection"); return; } } }
public static void Execute(QiSession session) { byte[] rawImageData = DownloadRawImage(session); Console.WriteLine($"raw data length = {rawImageData.Length}"); try { int height = 240; int width = 320; int channel = 3; var img = GetImageFromRawData(rawImageData, width, height, channel); img.Save("result.png", ImageFormat.Png); Console.WriteLine("Front camera's image was saved to working directory with name 'result.png'"); } catch (InvalidOperationException ex) { Console.WriteLine("Failed to get image: " + ex.Message); } }
public static void Execute(QiSession session) { var mem = session.GetService("ALMemory"); var sonar = session.GetService("ALSonar"); //NOTE: PythonサンプルにはFSR(力センサ)の話題も載ってるがだいたい同じなので省略 Console.WriteLine("First: read IMU values\n"); var results = new Dictionary <string, string>() { { "GyrX", "Device/SubDeviceList/InertialSensor/GyrX/Sensor/Value" }, { "GyrY", "Device/SubDeviceList/InertialSensor/GyrY/Sensor/Value" }, { "AccX", "Device/SubDeviceList/InertialSensor/AccX/Sensor/Value" }, { "AccY", "Device/SubDeviceList/InertialSensor/AccY/Sensor/Value" }, { "AccZ", "Device/SubDeviceList/InertialSensor/AccZ/Sensor/Value" }, { "TorsoAngleX", "Device/SubDeviceList/InertialSensor/AngleX/Sensor/Value" }, { "TorsoAngleY", "Device/SubDeviceList/InertialSensor/AngleY/Sensor/Value" } }.Select(p => $"key={p.Key}, value={mem["getData"].Call(p.Value).Dump()}"); foreach (var r in results) { //Dynamicのアンパックとかあると見た目的に見づらいのでそこを対策 Console.WriteLine(r.Replace("\n", "")); } Console.WriteLine("Second: read sonar values\n"); sonar["subscribe"].Call("MySampleApplication"); Console.WriteLine( "Left: {0}", mem["getData"].Call("Device/SubDeviceList/US/Left/Sensor/Value").Dump() ); Console.WriteLine( "Right: {0}", mem["getData"].Call("Device/SubDeviceList/US/Right/Sensor/Value").Dump() ); sonar["unsubscribe"].Call("MySampleApplication"); }
static void Main(string[] args) { PathModifier.AddEnvironmentPath("dlls", PathModifyMode.RelativeToEntryAssembly); string address = "tcp://127.0.0.1:9559"; var s = QiSession.Create(address); string text = "^start(animation/Stand/Gestures/Hey_1) " + "Hello, this is a typical sample sentence for animated say module, to check some autonomous motion suited for the conversation " + "^wait(animation/Stand/Gestures/Hey_1)"; //AnimatedSay::sayの第二引数に渡す、モーションのモードを表す単一ペアの辞書 var config = QiMap.Create(new[] { new KeyValuePair <QiString, QiString>("bodyLanguageMode", "contextual") }); var animatedSpeech = s.GetService("ALAnimatedSpeech"); var res = animatedSpeech["say"].Call(text, config); Console.WriteLine(res.ContentValueKind); }
public override void OnXboxInputUpdate(XboxControllerEventData eventData){ if (string.IsNullOrEmpty(GamePadName)){ Debug.LogFormat("Joystick {0} with id: \"{1}\" Connected", eventData.GamePadName, eventData.SourceId); } base.OnXboxInputUpdate(eventData); /*get information from xbox controller*/ if(_session.IsConnected){ if (eventData.XboxLeftStickHorizontalAxis != 0 || eventData.XboxLeftStickVerticalAxis != 0){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(eventData.XboxLeftStickHorizontalAxis * move_scalefactor, eventData.XboxLeftStickVerticalAxis * (-1) * move_scalefactor, 0f); } if (eventData.XboxLeftBumper_Pressed){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(0f, 0f, rotation_scalefactor); } else if (eventData.XboxRightBumper_Pressed){ var motion = _session.GetService("ALMotion"); motion["moveTo"].Call(0f, 0f, (-1) * rotation_scalefactor); } if (eventData.XboxB_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ var motion = _session.GetService("ALMotion"); motion["setAngles"].Call("HeadYaw", angle, 0f); } first_buttonpressed = Time.time; } if (eventData.XboxX_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ if (pepperIP == "192.168.10.51"){ _session.Close(); _session.Destroy(); pepperIP = "192.168.10.48" _session = QiSession.Create(tcpPrefix + pepperIP + portSuffix); }else{ _session.Close(); _session.Destroy(); pepperIP = "192.168.10.51" _session = QiSession.Create(tcpPrefix + pepperIP + portSuffix); } } first_buttonpressed = Time.time; } //another robot (estimated) }else if(session_robot.isConnected){ if (eventData.XboxLeftStickHorizontalAxis != 0 || eventData.XboxLeftStickVerticalAxis != 0){ CallRobotsAPI_move(eventData.XboxLeftStickHorizontalAxis * move_scalefactor_robot, eventData.XboxLeftStickVerticalAxis * (-1) * move_scalefactor_robot, 0f); } if (eventData.XboxLeftBumper_Pressed){ CallRobotsAPI_rotate(0f, 0f, rotation_scalefactor); } else if (eventData.XboxRightBumper_Pressed){ CallRobotsAPI_rotate(0f, 0f, (-1) * rotation_scalefactor); } if (eventData.XboxB_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ CallRobotsAPI_rotate(0f, 0f, 0f); } first_buttonpressed = Time.time; } if (eventData.XboxX_Pressed){ if (Time.time - first_buttonpressed > timeBetweenbuttonpressed){ if (robotIP == "192.168.10.53"){ session_robot.Close(); session_robot.Destroy(); robotIP = "192.168.10.54" session_robot = RobotSession.Create(tcpPrefix + robotIP + portSuffix); }else{ session_robot.Close(); session_robot.Destroy(); robotIP = "192.168.10.53" session_robot = RobotSession.Create(tcpPrefix + robotIP + portSuffix); } } first_buttonpressed = Time.time; } } }
internal static void Destroy(QiSession session) => qi_session_destroy(session.Handle);
internal static bool IsConnected(QiSession session) => Convert.ToBoolean(qi_session_is_connected(session.Handle));
internal static QiFuture Listen(QiSession session, string address, bool standAlone = false) => standAlone ? new QiFuture(qi_session_listen_standalone(session.Handle, address)) : new QiFuture(qi_session_listen(session.Handle, address));
internal static QiValue GetEndpoints(QiSession session) => new QiValue(qi_session_endpoints(session.Handle));
internal static QiFuture Close(QiSession session) => new QiFuture(qi_session_close(session.Handle));
internal static string GetUrl(QiSession session) => Marshal.PtrToStringAnsi(qi_session_url(session.Handle));
internal static QiFuture RegisterService(QiSession session, string name, QiObject obj) => new QiFuture(qi_session_register_service(session.Handle, name, obj.Handle));
internal static QiFuture GetServices(QiSession session) => new QiFuture(qi_session_get_services(session.Handle));
internal static QiFuture GetService(QiSession session, string name) => new QiFuture(qi_session_get_service(session.Handle, name));
internal static QiFuture Connect(QiSession session, string address) => new QiFuture(qi_session_connect(session.Handle, address));
internal static int SetIdentity(QiSession session, string key, string crt) => qi_session_set_identity(session.Handle, key, crt);
internal static QiFuture UnregisterService(QiSession session, uint idx) => new QiFuture(qi_session_unregister_service(session.Handle, idx));
public QiSessionViewModel(QiSession session) { _session = session; Url = _session?.GetUrl() ?? "Unknown"; }
public static void Execute(QiSession session) { string serviceName = "CSharpSoundDownloaderSpare"; var audioDevice = session.GetService("ALAudioDevice"); var waveIn = new WaveInEvent(); #region 1/4: ロボットへ音を投げる方の仕込み //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意 audioDevice["setParameter"].Call("outputSampleRate", 16000); //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。 //バッファ長は16384を超えてはいけない点に注意 //(詳細は公式ドキュメンテーション参照) waveIn.BufferMilliseconds = 200; //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる waveIn.WaveFormat = new WaveFormat(16000, 16, 2); int count = 0; waveIn.DataAvailable += (_, e) => { if (e.BytesRecorded > 16384) { return; } byte[] bufferToSend = new byte[e.BytesRecorded]; Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded); int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend); Console.WriteLine($"received data, {count}"); count++; }; #endregion #region 2/4 ロボットから音を拾う, 再生デバイス準備 var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1)); var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200); wavPlayer.Init(new VolumeWaveProvider16(wavProvider)); wavPlayer.Play(); #endregion #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る var objBuilder = QiObjectBuilder.Create(); //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応 objBuilder.AdvertiseMethod( "processRemote::v(iimm)", (sig, arg) => { //ここで処理 //Console.WriteLine("Received Buffer!"); //Console.WriteLine(arg.Dump()); //データの内容については上記のダンプを行うことである程度確認可能 byte[] raw = arg[3].ToBytes(); wavProvider.AddSamples(raw, 0, raw.Length); return(QiValue.Void); }); //上記のコールバック取得用サービスを登録 session.Listen("tcp://0.0.0.0:0").Wait(); ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL); #endregion #region 4/4 設定を調整して実際に入出力を行う //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0); //開始 audioDevice["subscribe"].Call(serviceName); waveIn.StartRecording(); #endregion Console.WriteLine("Press ENTER to quit.."); Console.ReadLine(); audioDevice["unsubscribe"].Call(serviceName); session.UnregisterService((uint)registeredId); wavPlayer.Stop(); wavPlayer.Dispose(); waveIn.StopRecording(); waveIn.Dispose(); }