Пример #1
0
        static void _main()
        {
            BlackCore.basic.cParams args = bcore.app.args;

               client = new System.Net.Sockets.TcpClient();

               int wavInDevices = WaveIn.DeviceCount;
               int selWav = 0;
               for (int wavDevice = 0; wavDevice < wavInDevices; wavDevice++)
               {
               WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(wavDevice);
               Console.WriteLine("Device {0}: {1}, {2} channels", wavDevice, deviceInfo.ProductName, deviceInfo.Channels);
               }

               Console.Write("Select device: ");
               selWav = int.Parse(Console.ReadLine());
               Console.WriteLine("Selected device is " + selWav.ToString());

               sshClient = new SshClient(args["host"], args["user"], args["pass"]);
               sshClient.Connect();

               if (sshClient.IsConnected)
               {

               shell = sshClient.CreateShellStream("xterm", 50, 50, 640, 480, 17640);
               Console.WriteLine("Open listening socket...");
               shell.WriteLine("nc -l " + args["port"] + "|pacat --playback");
               System.Threading.Thread.Sleep(2000);

               Console.WriteLine("Try to connect...");
               client.Connect(args["host"], int.Parse(args["port"]));
               if (!client.Connected) return;
               upStream = client.GetStream();

               //====================

               WaveInEvent wavInStream = new WaveInEvent();
               wavInStream.DataAvailable += new EventHandler<WaveInEventArgs>(wavInStream_DataAvailable);
               wavInStream.DeviceNumber = selWav;
               wavInStream.WaveFormat = new WaveFormat(44100, 16, 2);
               wavInStream.StartRecording();
               Console.WriteLine("Working.....");

               Console.ReadKey();
               sshClient.Disconnect();
               client.Close();
               wavInStream.StopRecording();
               wavInStream.Dispose();
               wavInStream = null;
               }
        }
Пример #2
0
        public void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
        {
            if (WaveSource != null)
            {
                WaveSource.Dispose();
                WaveSource = null;
            }

            if (WaveFile != null)
            {
                WaveFile.Dispose();
                WaveFile = null;
            }
        }
Пример #3
0
        private static void MicAud_RecordingStopped(object sender, NAudio.Wave.StoppedEventArgs e)
        {
            if (micAud != null)
            {
                micAud.Dispose();
                micAud = null;
            }

            if (wfw != null)
            {
                wfw.Dispose();
                wfw = null;
            }
        }
Пример #4
0
        public static void Execute(QiSession session)
        {
            string serviceName = "CSharpSoundDownloaderSpare";
            var audioDevice = session.GetService("ALAudioDevice");

            var waveIn = new WaveInEvent();

            #region 1/4: ロボットへ音を投げる方の仕込み
            //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる
            //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意
            audioDevice["setParameter"].Call("outputSampleRate", 16000);

            //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。
            //バッファ長は16384を超えてはいけない点に注意
            //(詳細は公式ドキュメンテーション参照)
            waveIn.BufferMilliseconds = 200;
            //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる
            waveIn.WaveFormat = new WaveFormat(16000, 16, 2);

            int count = 0;
            waveIn.DataAvailable += (_, e) =>
            {
                if (e.BytesRecorded > 16384) return;

                byte[] bufferToSend = new byte[e.BytesRecorded];
                Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded);

                int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend);
                Console.WriteLine($"received data, {count}");
                count++;
            };
            #endregion

            #region 2/4 ロボットから音を拾う, 再生デバイス準備
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200);
            wavPlayer.Init(new VolumeWaveProvider16(wavProvider));
            wavPlayer.Play();
            #endregion

            #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る
            var objBuilder = QiObjectBuilder.Create();
            //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応
            objBuilder.AdvertiseMethod(
                "processRemote::v(iimm)",
                (sig, arg) =>
                {
                    //ここで処理
                    //Console.WriteLine("Received Buffer!");
                    //Console.WriteLine(arg.Dump());

                    //データの内容については上記のダンプを行うことである程度確認可能
                    byte[] raw = arg[3].ToBytes();
                    wavProvider.AddSamples(raw, 0, raw.Length);

                    return QiValue.Void;
                });

            //上記のコールバック取得用サービスを登録
            session.Listen("tcp://0.0.0.0:0").Wait();
            ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL);

            #endregion

            #region 4/4 設定を調整して実際に入出力を行う
            //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 
            audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0);

            //開始
            audioDevice["subscribe"].Call(serviceName);
            waveIn.StartRecording();
            #endregion

            Console.WriteLine("Press ENTER to quit..");
            Console.ReadLine();

            audioDevice["unsubscribe"].Call(serviceName);
            session.UnregisterService((uint)registeredId);
            wavPlayer.Stop();
            wavPlayer.Dispose();

            waveIn.StopRecording();
            waveIn.Dispose();
        }
Пример #5
0
        //Microphone.
        //http://mark-dot-net.blogspot.com/2011/04/how-to-use-wavefilewriter.html
        public void recordInput()
        {
            Console.WriteLine("Now recording...");
            waveSource = new WaveInEvent();
            waveSource.WaveFormat = new WaveFormat(16000, 1);

            waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
            waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
            //string tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".wav");
            string tempFile = Path.Combine(@"C:\Users\Nick\Desktop",  "test.wav");
            waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);
            waveSource.StartRecording();

            Thread.Sleep(3000);
            waveSource.StopRecording();
            waveSource.Dispose();
            waveSource = null;
            waveFile.Close();

            Console.WriteLine("Finished record");
        }