Exemple #1
0
 public void stop()
 {
     Console.WriteLine($"Stopping recording. ({Path.GetFileName(Filename)})");
     //Console.ReadLine();
     WaveSource.StopRecording();
     count++;
 }
Exemple #2
0
        /// <summary>
        ///
        /// </summary>
        /// <returns></returns>
        public async Task StopSpeechRecognition()
        {
            if (null == waveIn)
            {
                return;
            }

            waveIn.StopRecording();

            lock (writeLock) writeMore = false;
            await streamingCall.WriteCompleteAsync();
        }
Exemple #3
0
        static void _main()
        {
            BlackCore.basic.cParams args = bcore.app.args;

               client = new System.Net.Sockets.TcpClient();

               int wavInDevices = WaveIn.DeviceCount;
               int selWav = 0;
               for (int wavDevice = 0; wavDevice < wavInDevices; wavDevice++)
               {
               WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(wavDevice);
               Console.WriteLine("Device {0}: {1}, {2} channels", wavDevice, deviceInfo.ProductName, deviceInfo.Channels);
               }

               Console.Write("Select device: ");
               selWav = int.Parse(Console.ReadLine());
               Console.WriteLine("Selected device is " + selWav.ToString());

               sshClient = new SshClient(args["host"], args["user"], args["pass"]);
               sshClient.Connect();

               if (sshClient.IsConnected)
               {

               shell = sshClient.CreateShellStream("xterm", 50, 50, 640, 480, 17640);
               Console.WriteLine("Open listening socket...");
               shell.WriteLine("nc -l " + args["port"] + "|pacat --playback");
               System.Threading.Thread.Sleep(2000);

               Console.WriteLine("Try to connect...");
               client.Connect(args["host"], int.Parse(args["port"]));
               if (!client.Connected) return;
               upStream = client.GetStream();

               //====================

               WaveInEvent wavInStream = new WaveInEvent();
               wavInStream.DataAvailable += new EventHandler<WaveInEventArgs>(wavInStream_DataAvailable);
               wavInStream.DeviceNumber = selWav;
               wavInStream.WaveFormat = new WaveFormat(44100, 16, 2);
               wavInStream.StartRecording();
               Console.WriteLine("Working.....");

               Console.ReadKey();
               sshClient.Disconnect();
               client.Close();
               wavInStream.StopRecording();
               wavInStream.Dispose();
               wavInStream = null;
               }
        }
        public static void RecThread()
        {
            micAud = new NAudio.Wave.WaveInEvent();
            //micAud.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1);
            //micAud.DataAvailable += MicAud_DataAvailable;
            //micAud.RecordingStopped += MicAud_RecordingStopped;
            //// micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
            //wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
            //micAud.StartRecording();

            using (spkAud = new CSCore.SoundIn.WasapiLoopbackCapture())
            {
                spkAud.Initialize();

                micAud.WaveFormat        = new NAudio.Wave.WaveFormat(spkAud.WaveFormat.SampleRate, spkAud.WaveFormat.Channels);
                micAud.DataAvailable    += MicAud_DataAvailable;
                micAud.RecordingStopped += MicAud_RecordingStopped;
                // micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
                wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
                micAud.StartRecording();

                using (var w = new WaveWriter(_spkLoc, spkAud.WaveFormat))
                {
                    spkAud.DataAvailable += (s, capData) => w.Write(capData.Data, capData.Offset, capData.ByteCount);
                    spkAud.Start();

                    while (!stopRec)
                    {
                        ;
                    }

                    spkAud.Stop();
                    micAud.StopRecording();
                }
            }
        }
Exemple #5
0
        public static void Execute(QiSession session)
        {
            string serviceName = "CSharpSoundDownloaderSpare";
            var audioDevice = session.GetService("ALAudioDevice");

            var waveIn = new WaveInEvent();

            #region 1/4: ロボットへ音を投げる方の仕込み
            //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる
            //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意
            audioDevice["setParameter"].Call("outputSampleRate", 16000);

            //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。
            //バッファ長は16384を超えてはいけない点に注意
            //(詳細は公式ドキュメンテーション参照)
            waveIn.BufferMilliseconds = 200;
            //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる
            waveIn.WaveFormat = new WaveFormat(16000, 16, 2);

            int count = 0;
            waveIn.DataAvailable += (_, e) =>
            {
                if (e.BytesRecorded > 16384) return;

                byte[] bufferToSend = new byte[e.BytesRecorded];
                Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded);

                int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend);
                Console.WriteLine($"received data, {count}");
                count++;
            };
            #endregion

            #region 2/4 ロボットから音を拾う, 再生デバイス準備
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200);
            wavPlayer.Init(new VolumeWaveProvider16(wavProvider));
            wavPlayer.Play();
            #endregion

            #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る
            var objBuilder = QiObjectBuilder.Create();
            //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応
            objBuilder.AdvertiseMethod(
                "processRemote::v(iimm)",
                (sig, arg) =>
                {
                    //ここで処理
                    //Console.WriteLine("Received Buffer!");
                    //Console.WriteLine(arg.Dump());

                    //データの内容については上記のダンプを行うことである程度確認可能
                    byte[] raw = arg[3].ToBytes();
                    wavProvider.AddSamples(raw, 0, raw.Length);

                    return QiValue.Void;
                });

            //上記のコールバック取得用サービスを登録
            session.Listen("tcp://0.0.0.0:0").Wait();
            ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL);

            #endregion

            #region 4/4 設定を調整して実際に入出力を行う
            //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 
            audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0);

            //開始
            audioDevice["subscribe"].Call(serviceName);
            waveIn.StartRecording();
            #endregion

            Console.WriteLine("Press ENTER to quit..");
            Console.ReadLine();

            audioDevice["unsubscribe"].Call(serviceName);
            session.UnregisterService((uint)registeredId);
            wavPlayer.Stop();
            wavPlayer.Dispose();

            waveIn.StopRecording();
            waveIn.Dispose();
        }
        static async Task <object> StreamingMicrophoneRecognizeAsync(int seconds = 60, string languageCode = "en-US")
        {
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();
            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding        = RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 44100,
                        LanguageCode    = languageCode
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                var responseStream = streamingCall.GetResponseStream();
                while (await responseStream.MoveNextAsync())
                {
                    StreamingRecognizeResponse response = responseStream.Current;
                    Console.WriteLine(response.Results[0].Alternatives[0].Transcript); // Print most probable result.
                }
            });

            object writeLock = new object();
            bool   writeMore = true;
            var    waveIn    = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(44100, 1); // 44100Hz Mono.
            waveIn.DataAvailable += (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }

                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString.CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };

            waveIn.StartRecording();
            Console.WriteLine("Speek now.");
            await Task.Delay(TimeSpan.FromSeconds(seconds));

            waveIn.StopRecording();
            lock (writeLock)
            {
                writeMore = false;
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            return(0);
        }
Exemple #7
0
        async Task <object> StreamingMicRecognizeAsync(int seconds)
        {
            object writeLock = new object();
            bool   writeMore = true;

            if (tamam)
            {
                return(0);
            }


            if (NAudio.Wave.WaveIn.DeviceCount < 1)
            {
                metin.Content = "Mikrofon Yok!";
                return(-1);
            }
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();

            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding =
                            RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 16000,
                        LanguageCode    = "tr",
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                while (await streamingCall.ResponseStream.MoveNext(
                           default(System.Threading.CancellationToken)))
                {
                    foreach (var result in streamingCall.ResponseStream
                             .Current.Results)
                    {
                        foreach (var alternative in result.Alternatives)
                        {
                            if (!tamam)
                            {
                                yazi = alternative.Transcript;
                                timer.Start();
                            }
                        }
                    }
                }
            });



            var waveIn = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(16000, 1);
            waveIn.DataAvailable +=
                (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }
                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString
                                       .CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };


            waveIn.StartRecording();
            metin.Content        = "Şimdi Konuşabilirsiniz";
            kulak.Visibility     = Visibility.Visible;
            acikAgiz.IsEnabled   = false;
            kapaliAgiz.IsEnabled = false;
            try
            {
                await Task.Delay(TimeSpan.FromSeconds(seconds), cancellationTokenSource.Token);
            }
            catch (TaskCanceledException ex)
            {
                Console.WriteLine(ex.Message);
            }
            finally
            {
                cancellationTokenSource.Dispose();
            }

            acikAgiz.IsEnabled   = true;
            kapaliAgiz.IsEnabled = true;
            kulak.Visibility     = Visibility.Hidden;
            waveIn.StopRecording();

            lock (writeLock) writeMore = false;


            if (genelMod.IsChecked == true)
            {
                cevapla(yazi);
            }
            if (ceviriMod.IsChecked == true)
            {
                cevir(yazi);
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            metin.Content = yazi;

            return(0);
        }
Exemple #8
0
        //Microphone.
        //http://mark-dot-net.blogspot.com/2011/04/how-to-use-wavefilewriter.html
        public void recordInput()
        {
            Console.WriteLine("Now recording...");
            waveSource = new WaveInEvent();
            waveSource.WaveFormat = new WaveFormat(16000, 1);

            waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
            waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
            //string tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".wav");
            string tempFile = Path.Combine(@"C:\Users\Nick\Desktop",  "test.wav");
            waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);
            waveSource.StartRecording();

            Thread.Sleep(3000);
            waveSource.StopRecording();
            waveSource.Dispose();
            waveSource = null;
            waveFile.Close();

            Console.WriteLine("Finished record");
        }
Exemple #9
0
 public void Stop()
 {
     wvin.StopRecording();
     Console.WriteLine($"Recording stopped.");
 }