Exemple #1
1
 public void StartRec()
 {
     WaveSourceStream = new NAudio.Wave.WaveInEvent();
     WaveSourceStream.DeviceNumber = 0;
     WaveSourceStream.WaveFormat = new WaveFormat(16000,1);
     WaveSourceStream.DataAvailable += sourceStream_DataAvailable;
     WaveSourceStream.StartRecording();
     bufl = new List<byte[]>();
 }
        public void iniciarCaptura()
        {
            try
            {
                /*WaveInCapabilities capabilities;

                for (int numberDevice = 0; numberDevice < WaveIn.DeviceCount; numberDevice++)
                {
                    capabilities = WaveIn.GetCapabilities(numberDevice);
                    Console.WriteLine("Producto->" + capabilities.ProductName.ToUpper().Trim());
                    if (capabilities.ProductName.ToUpper().Trim().Contains("BLUETOOTH"))
                    {
                        deviceBluetooth = numberDevice;
                        break;
                    }
                }*/

                foreach (IPAddress ip in System.Net.Dns.GetHostAddresses(""))
                {
                    if (Regex.IsMatch(ip.ToString(), @"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}"))
                    {
                        ipLocal = ip.ToString();
                    }
                }

                wi = new WaveInEvent();
                wi.BufferMilliseconds = 1000;
                wi.DeviceNumber = deviceBluetooth;
                wi.WaveFormat = new WaveFormat(44100, 2);
                wi.DataAvailable += new EventHandler<WaveInEventArgs>(wi_DataAvailable);
                wi.StartRecording();

                /*wo = new WaveOutEvent();
                bwp = new BufferedWaveProvider(wi.WaveFormat);
                bwp.DiscardOnBufferOverflow = true;
                wo.Init(bwp);
                wo.Play();*/

                tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".wav");
                writer = new WaveFileWriter(tempFile, wi.WaveFormat);

                hilo = new Thread(new ThreadStart(iniciarStreaming));
                hilo.Start();
            }
            catch (Exception ex)
            {
                logger.WriteToEventLog("ERROR: " + ex.Message +
                                        Environment.NewLine +
                                        "STACK TRACE: " + ex.StackTrace,
                                        "Servicio de captura de audio [iniciarCaptura]",
                                        EventLogEntryType.Error,
                                        "LogSucursalAudio");
                logger.WriteToErrorLog("ERROR: " + ex.Message,
                                        ex.StackTrace,
                                        "capturaAudio.cs");
                Console.WriteLine("Error [iniciarCaptura]->" + ex.Message);
            }
        }
        public MicrophoneRecorder(IMumbleProtocol protocol)
        {
            _protocol = protocol;
            var sourceStream = new WaveInEvent
            {
                WaveFormat = new WaveFormat(48000, 1)
            };
            sourceStream.DataAvailable += VoiceDataAvailable;

            sourceStream.StartRecording();
        }
Exemple #4
0
        /// <summary>
        /// Starts recording.
        /// </summary>
        public void StartRecord(string audioFileName)
        {
            waveIn = new WaveInEvent();
            waveIn.DeviceNumber = AudioController.getInstance().GetDefaultInputDeviceNumber();
            waveIn.WaveFormat = new WaveFormat(44100, 2);
            waveIn.DataAvailable += OnDataAvailable;
            writer = new WaveFileWriter(audioFileName, waveIn.WaveFormat);
            isRecording = true;

            waveIn.StartRecording();
        }
Exemple #5
0
 //------------------------------------------------------------------------------------------------------------------------
 public void Start()
 {
     audioFormat = new AudioFormat(8000, 16, 2);
     IsActive = true;
     waveSource = new WaveInEvent();
     //wave format
     waveSource.WaveFormat = new WaveFormat(audioFormat.samplerate, audioFormat.bitsperchannel, audioFormat.channels);
     //register event cbs
     waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
     waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
     //start record from mic
     waveSource.StartRecording();
 }
Exemple #6
0
        static void _main()
        {
            BlackCore.basic.cParams args = bcore.app.args;

               client = new System.Net.Sockets.TcpClient();

               int wavInDevices = WaveIn.DeviceCount;
               int selWav = 0;
               for (int wavDevice = 0; wavDevice < wavInDevices; wavDevice++)
               {
               WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(wavDevice);
               Console.WriteLine("Device {0}: {1}, {2} channels", wavDevice, deviceInfo.ProductName, deviceInfo.Channels);
               }

               Console.Write("Select device: ");
               selWav = int.Parse(Console.ReadLine());
               Console.WriteLine("Selected device is " + selWav.ToString());

               sshClient = new SshClient(args["host"], args["user"], args["pass"]);
               sshClient.Connect();

               if (sshClient.IsConnected)
               {

               shell = sshClient.CreateShellStream("xterm", 50, 50, 640, 480, 17640);
               Console.WriteLine("Open listening socket...");
               shell.WriteLine("nc -l " + args["port"] + "|pacat --playback");
               System.Threading.Thread.Sleep(2000);

               Console.WriteLine("Try to connect...");
               client.Connect(args["host"], int.Parse(args["port"]));
               if (!client.Connected) return;
               upStream = client.GetStream();

               //====================

               WaveInEvent wavInStream = new WaveInEvent();
               wavInStream.DataAvailable += new EventHandler<WaveInEventArgs>(wavInStream_DataAvailable);
               wavInStream.DeviceNumber = selWav;
               wavInStream.WaveFormat = new WaveFormat(44100, 16, 2);
               wavInStream.StartRecording();
               Console.WriteLine("Working.....");

               Console.ReadKey();
               sshClient.Disconnect();
               client.Close();
               wavInStream.StopRecording();
               wavInStream.Dispose();
               wavInStream = null;
               }
        }
    public Form1()
    {
        InitializeComponent();

        var waveIn = new NAudio.Wave.WaveInEvent
        {
            DeviceNumber       = 0, // customize this to select your microphone device
            WaveFormat         = new NAudio.Wave.WaveFormat(rate: 1000, bits: 16, channels: 1),
            BufferMilliseconds = 10
        };

        waveIn.DataAvailable += WaveIn_DataAvailable;;
        waveIn.StartRecording();
    }
        public void Record()
        {
            _recording = true;

            if (sourceStream != null)
                sourceStream.Dispose();
            sourceStream = new WaveInEvent
            {
                WaveFormat = new WaveFormat(48000,16, 1)
            };
            sourceStream.BufferMilliseconds = 5;
            sourceStream.DeviceNumber = SelectedDevice;
            sourceStream.NumberOfBuffers = 3;
            sourceStream.DataAvailable += VoiceDataAvailable;

            sourceStream.StartRecording();
        }
        //Set's up the stream for recording/streaming. it makes it so when data is available in the stream it calls function dataAvailable with said data
        public void StartRecording(int index, EventHandler<NAudio.Wave.WaveInEventArgs> dataAvailable)
        {
            if (dataAvailable == null)
                return;

            //setup the input stream. we get the device number from the selected index, setup the format for reading
            sourceStream = new NAudio.Wave.WaveInEvent();//NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = index;
            sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(index).Channels);
            waveFormat = sourceStream.WaveFormat;

            //setup the callbacks when there is data or the recording stopped(suddenly disconnection = no recording = the function)
            sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(dataAvailable);
            sourceStream.RecordingStopped += new EventHandler<NAudio.Wave.StoppedEventArgs>(StopRecording);

            sourceStream.StartRecording();
        }
    public override void Setup() {
      base.Setup();

      int waveInDevices = WaveIn.DeviceCount;
      for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) {
        WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
        Host.Log(this, "Device " + waveInDevice + ": " + deviceInfo.ProductName + ", " + deviceInfo.Channels + " channels");
      }

      waveIn = new WaveInEvent();
      waveIn.DeviceNumber = ConfigManager.GetInstance().Find("microphone.device", 0);
      waveIn.WaveFormat = new WaveFormat(16000, 2);
      waveIn.DataAvailable += waveIn_DataAvailable;

      buffer = new StreamBuffer();
      waveIn.StartRecording();

      double confidence = ConfigManager.GetInstance().Find("microphone.confidence", 0.6);
      AddOnManager.GetInstance().AddAudioSource("Microphone", buffer, "Microphone", null, confidence);
    }
Exemple #11
0
        public void record()
        {
            Console.WriteLine();
            Console.WriteLine("Recording on Device  # 0 ");

            WaveSource = new WaveInEvent();
            WaveSource.DeviceNumber = ActiveDevice;
            WaveSource.WaveFormat   = new WaveFormat(44100, 1);

            WaveSource.DataAvailable    += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable);
            WaveSource.RecordingStopped += new EventHandler <StoppedEventArgs>(waveSource_RecordingStopped);

            long milliseconds = (long)Math.Round(DateTime.Now.Subtract(DateTime.MinValue.AddYears(1969)).TotalMilliseconds);

            Filename = Path.Combine(samplePath, $"{sampleCount}_AudioSample_{milliseconds}.wav");
            sampleCount++;
            WaveFile = new WaveFileWriter(Filename, WaveSource.WaveFormat);

            WaveSource.StartRecording();
        }
Exemple #12
0
        public void Start(IPEndPoint RemoteUdpPoint)
        {
            RemoteServer = RemoteUdpPoint;

            provider = new BufferedWaveProvider(format);

            if (waveIn == null)
            {
                waveIn = new WaveInEvent();
                waveIn.WaveFormat = format;
                waveIn.BufferMilliseconds = 500;
                waveIn.DataAvailable += waveIn_DataAvailable;
                waveIn.StartRecording();
            }
            if (waveOut == null)
            {
                waveOut = new WaveOut();
                waveOut.DesiredLatency = 500;
                waveOut.Init(provider);
                waveOut.Play();
            }
        }
Exemple #13
0
        /// <summary>
        ///
        /// </summary>
        /// <returns></returns>
        public async Task BeginSpeechRecognition()
        {
            if (null == waveIn)
            {
                return;
            }

            waveIn.StartRecording();

            await Task.Run(() =>
            {
                var startTicks = DateTime.Now;

                do
                {
                    var currentTime   = DateTime.Now;
                    TimeSpan interval = currentTime - startTicks;

                    if (interval.TotalSeconds >= 55)
                    {
                        lock (writeLock)
                        {
                            streamingCall.WriteCompleteAsync();
#pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
                            ConfigureSpeechRequest(RecognitionConfig.Types.AudioEncoding.Linear16, 16000, "en", false);
                            ProcessResponses();
#pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
                        }

                        startTicks = DateTime.Now;
                    }
                }while (true);
            }
                           );

            return;
        }
Exemple #14
0
        static void Main(string[] args)
        {
            StatsdClient.Metrics.Configure(new MetricsConfig { StatsdServerName = "127.0.0.1" });

            int waveInDevices = WaveIn.DeviceCount;
            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            Console.WriteLine();
            Console.Write("Select Device: ");

            int device = Int32.Parse(Console.ReadLine());

            waveIn = new WaveInEvent();
            waveIn.DeviceNumber = device;
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.WaveFormat = new WaveFormat(200, 2);
            waveIn.StartRecording();

            while (true) Thread.Sleep(100);
        }
        public static void RecThread()
        {
            micAud = new NAudio.Wave.WaveInEvent();
            //micAud.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1);
            //micAud.DataAvailable += MicAud_DataAvailable;
            //micAud.RecordingStopped += MicAud_RecordingStopped;
            //// micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
            //wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
            //micAud.StartRecording();

            using (spkAud = new CSCore.SoundIn.WasapiLoopbackCapture())
            {
                spkAud.Initialize();

                micAud.WaveFormat        = new NAudio.Wave.WaveFormat(spkAud.WaveFormat.SampleRate, spkAud.WaveFormat.Channels);
                micAud.DataAvailable    += MicAud_DataAvailable;
                micAud.RecordingStopped += MicAud_RecordingStopped;
                // micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded);
                wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat);
                micAud.StartRecording();

                using (var w = new WaveWriter(_spkLoc, spkAud.WaveFormat))
                {
                    spkAud.DataAvailable += (s, capData) => w.Write(capData.Data, capData.Offset, capData.ByteCount);
                    spkAud.Start();

                    while (!stopRec)
                    {
                        ;
                    }

                    spkAud.Stop();
                    micAud.StopRecording();
                }
            }
        }
        static async Task <object> StreamingMicrophoneRecognizeAsync(int seconds = 60, string languageCode = "en-US")
        {
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();
            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding        = RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 44100,
                        LanguageCode    = languageCode
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                var responseStream = streamingCall.GetResponseStream();
                while (await responseStream.MoveNextAsync())
                {
                    StreamingRecognizeResponse response = responseStream.Current;
                    Console.WriteLine(response.Results[0].Alternatives[0].Transcript); // Print most probable result.
                }
            });

            object writeLock = new object();
            bool   writeMore = true;
            var    waveIn    = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(44100, 1); // 44100Hz Mono.
            waveIn.DataAvailable += (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }

                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString.CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };

            waveIn.StartRecording();
            Console.WriteLine("Speek now.");
            await Task.Delay(TimeSpan.FromSeconds(seconds));

            waveIn.StopRecording();
            lock (writeLock)
            {
                writeMore = false;
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            return(0);
        }
Exemple #17
0
 public void Start()
 {
     Console.WriteLine($"Starting recording...");
     wvin.StartRecording();
 }
Exemple #18
0
        public MicManager()
        {
            pitchTracker.SampleRate = 16000.0;
            pitchTracker.PitchDetected += pitchTracker_PitchDetected;

            waveInEvent = new WaveInEvent();
            waveInEvent.DataAvailable += WaveOnDataAvailable;
            waveInEvent.WaveFormat = new NAudio.Wave.WaveFormat(16000, 1); // 16kHz mono
            waveInEvent.StartRecording();
        }
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (string.IsNullOrEmpty(_source))
                throw new ArgumentException("Audio source is not specified.");

            if (_started) return;

            // check source
            lock (_lock)
            {
                if (_started)
                    return;

                int i = 0, selind = -1;
                for (var n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                    return;
                }

                _started = true;
                _waveIn = new WaveInEvent
                          {
                              BufferMilliseconds = 200,
                              DeviceNumber = selind,
                              WaveFormat = RecordingFormat
                          };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                _waveIn.StartRecording();
            }
        }
Exemple #20
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (!IsRunning)
            {
                // check source

                int i = 0, selind = -1;
                for (int n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    //device no longer connected or not configured
                    if (i > 0)
                        selind = 0;
                    else
                    {
                        //if (AudioSourceError != null)
                        //    AudioSourceError(this, new AudioSourceErrorEventArgs("not connected"));
                        AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                        return;    
                    }
                    
                }

                _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                
                if (LevelChanged != null)
                {
                    _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                }
                _waveIn.StartRecording();

            }
        }
        private void sendSong(int deviceNumber)
        {
            //if (sourceList.SelectedItems.Count == 0) return;
            // set up the recorder
            recorder = new WaveInEvent();
            recorder.DataAvailable += SendDataAvaible;
            recorder.DeviceNumber = deviceNumber;
            recorder.WaveFormat = new WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            // set up our signal chain
            bufferedWaveProvider = new BufferedWaveProvider(recorder.WaveFormat);

            recorder.StartRecording();
        }
Exemple #22
0
        //這是引擎的初始化的部分,如果成功請返回 true, 如果失敗請返回 false
        static bool Initialize()
        {
            WaveInEvent WavEvent = new WaveInEvent();
            WavEvent.DeviceNumber = 0;

            WavEvent.DataAvailable += new EventHandler<WaveInEventArgs>(InputDevice_DataAvailable);
            WavEvent.WaveFormat = WavFormat;
            WavEvent.StartRecording();

            Console.WriteLine("SYS_LANG?");
            lang = Console.ReadLine().ToLower();

            Console.WriteLine("GSR_THRESHOLD?");
            if(!float.TryParse(Console.ReadLine(),out threshold)){
                threshold = 0.1f;
            }

            if (lang == "sys_lang")
            {
                lang = "ja";
            }

            return true;
        }
Exemple #23
0
        //Microphone.
        //http://mark-dot-net.blogspot.com/2011/04/how-to-use-wavefilewriter.html
        public void recordInput()
        {
            Console.WriteLine("Now recording...");
            waveSource = new WaveInEvent();
            waveSource.WaveFormat = new WaveFormat(16000, 1);

            waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
            waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
            //string tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".wav");
            string tempFile = Path.Combine(@"C:\Users\Nick\Desktop",  "test.wav");
            waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);
            waveSource.StartRecording();

            Thread.Sleep(3000);
            waveSource.StopRecording();
            waveSource.Dispose();
            waveSource = null;
            waveFile.Close();

            Console.WriteLine("Finished record");
        }
        private void record_Click(object sender, EventArgs e)
        {
            int deviceNumber = sourceList.SelectedItems[0].Index;
            if (sourceList.SelectedItems.Count == 0) return;
            // set up the recorder
            recorder = new WaveInEvent();
            //recorder.DataAvailable += RecorderOnDataAvailable;
            recorder.DataAvailable += SendDataAvaible;
            recorder.DeviceNumber = deviceNumber;
            recorder.WaveFormat = new WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            // set up our signal chain
            bufferedWaveProvider = new BufferedWaveProvider(recorder.WaveFormat);
            //writer = new WaveFileWriter("temp.wav", bufferedWaveProvider.WaveFormat);
            //savingWaveProvider = new LoopBack(bufferedWaveProvider, "temp.wav");

            recorder.StartRecording();
        }
Exemple #25
0
        public static void Execute(QiSession session)
        {
            string serviceName = "CSharpSoundDownloaderSpare";
            var audioDevice = session.GetService("ALAudioDevice");

            var waveIn = new WaveInEvent();

            #region 1/4: ロボットへ音を投げる方の仕込み
            //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる
            //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意
            audioDevice["setParameter"].Call("outputSampleRate", 16000);

            //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。
            //バッファ長は16384を超えてはいけない点に注意
            //(詳細は公式ドキュメンテーション参照)
            waveIn.BufferMilliseconds = 200;
            //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる
            waveIn.WaveFormat = new WaveFormat(16000, 16, 2);

            int count = 0;
            waveIn.DataAvailable += (_, e) =>
            {
                if (e.BytesRecorded > 16384) return;

                byte[] bufferToSend = new byte[e.BytesRecorded];
                Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded);

                int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend);
                Console.WriteLine($"received data, {count}");
                count++;
            };
            #endregion

            #region 2/4 ロボットから音を拾う, 再生デバイス準備
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200);
            wavPlayer.Init(new VolumeWaveProvider16(wavProvider));
            wavPlayer.Play();
            #endregion

            #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る
            var objBuilder = QiObjectBuilder.Create();
            //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応
            objBuilder.AdvertiseMethod(
                "processRemote::v(iimm)",
                (sig, arg) =>
                {
                    //ここで処理
                    //Console.WriteLine("Received Buffer!");
                    //Console.WriteLine(arg.Dump());

                    //データの内容については上記のダンプを行うことである程度確認可能
                    byte[] raw = arg[3].ToBytes();
                    wavProvider.AddSamples(raw, 0, raw.Length);

                    return QiValue.Void;
                });

            //上記のコールバック取得用サービスを登録
            session.Listen("tcp://0.0.0.0:0").Wait();
            ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL);

            #endregion

            #region 4/4 設定を調整して実際に入出力を行う
            //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 
            audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0);

            //開始
            audioDevice["subscribe"].Call(serviceName);
            waveIn.StartRecording();
            #endregion

            Console.WriteLine("Press ENTER to quit..");
            Console.ReadLine();

            audioDevice["unsubscribe"].Call(serviceName);
            session.UnregisterService((uint)registeredId);
            wavPlayer.Stop();
            wavPlayer.Dispose();

            waveIn.StopRecording();
            waveIn.Dispose();
        }
Exemple #26
0
        async Task <object> StreamingMicRecognizeAsync(int seconds)
        {
            object writeLock = new object();
            bool   writeMore = true;

            if (tamam)
            {
                return(0);
            }


            if (NAudio.Wave.WaveIn.DeviceCount < 1)
            {
                metin.Content = "Mikrofon Yok!";
                return(-1);
            }
            var speech        = SpeechClient.Create();
            var streamingCall = speech.StreamingRecognize();

            await streamingCall.WriteAsync(
                new StreamingRecognizeRequest()
            {
                StreamingConfig = new StreamingRecognitionConfig()
                {
                    Config = new RecognitionConfig()
                    {
                        Encoding =
                            RecognitionConfig.Types.AudioEncoding.Linear16,
                        SampleRateHertz = 16000,
                        LanguageCode    = "tr",
                    },
                    InterimResults = true,
                }
            });

            Task printResponses = Task.Run(async() =>
            {
                while (await streamingCall.ResponseStream.MoveNext(
                           default(System.Threading.CancellationToken)))
                {
                    foreach (var result in streamingCall.ResponseStream
                             .Current.Results)
                    {
                        foreach (var alternative in result.Alternatives)
                        {
                            if (!tamam)
                            {
                                yazi = alternative.Transcript;
                                timer.Start();
                            }
                        }
                    }
                }
            });



            var waveIn = new NAudio.Wave.WaveInEvent();

            waveIn.DeviceNumber   = 0;
            waveIn.WaveFormat     = new NAudio.Wave.WaveFormat(16000, 1);
            waveIn.DataAvailable +=
                (object sender, NAudio.Wave.WaveInEventArgs args) =>
            {
                lock (writeLock)
                {
                    if (!writeMore)
                    {
                        return;
                    }
                    streamingCall.WriteAsync(
                        new StreamingRecognizeRequest()
                    {
                        AudioContent = Google.Protobuf.ByteString
                                       .CopyFrom(args.Buffer, 0, args.BytesRecorded)
                    }).Wait();
                }
            };


            waveIn.StartRecording();
            metin.Content        = "Şimdi Konuşabilirsiniz";
            kulak.Visibility     = Visibility.Visible;
            acikAgiz.IsEnabled   = false;
            kapaliAgiz.IsEnabled = false;
            try
            {
                await Task.Delay(TimeSpan.FromSeconds(seconds), cancellationTokenSource.Token);
            }
            catch (TaskCanceledException ex)
            {
                Console.WriteLine(ex.Message);
            }
            finally
            {
                cancellationTokenSource.Dispose();
            }

            acikAgiz.IsEnabled   = true;
            kapaliAgiz.IsEnabled = true;
            kulak.Visibility     = Visibility.Hidden;
            waveIn.StopRecording();

            lock (writeLock) writeMore = false;


            if (genelMod.IsChecked == true)
            {
                cevapla(yazi);
            }
            if (ceviriMod.IsChecked == true)
            {
                cevir(yazi);
            }

            await streamingCall.WriteCompleteAsync();

            await printResponses;

            metin.Content = yazi;

            return(0);
        }
        /// <summary>
        /// Open socket for voice chat
        /// </summary>
        /// <param name="address"></param>
        private void StartVoiceChat(string address)
        {
            var splittedAddress = address.Split(':');
            var ip = splittedAddress[0];
            var port = splittedAddress[1];

            BindSocket();

            remoteEndPoint = new IPEndPoint(IPAddress.Parse(ip),Int32.Parse(port));

            sourceStream = new WaveInEvent
            {
                DeviceNumber = InputAudioDevice,
                WaveFormat = new WaveFormat(8000, 16, WaveIn.GetCapabilities(0).Channels)
            };

            udpConnectionActive = true;

            sourceStream.DataAvailable += sourceStream_DataAvailable;
            sourceStream.StartRecording();

            udpReceiveThread = new Thread(ReceiveUdpData);
            udpReceiveThread.Start();
        }
        private void ChangeAudioSource(CaptureDevice selectedAudioDevice)
        {
            if (selectedAudioDevice == null)
            {
                return;
            }

            StopAudio();

            this.bufferedWaveProvider = new BufferedWaveProvider(waveFormat);

            var player = new WaveOut();
            player.Init(bufferedWaveProvider);
            player.Play();

            audioSource = new WaveInEvent
            {
                WaveFormat = waveFormat,
                DeviceNumber = int.Parse(selectedAudioDevice.Id)
            };
            audioSource.DataAvailable += AudioAvailable;
            audioSource.StartRecording();
        }
Exemple #29
0
        public virtual void Start(int input, int output, ToxAvCodecSettings settings, string videoDevice = "")
        {
            toxav.PrepareTransmission(callIndex, true);

            WaveFormat outFormat = new WaveFormat((int)settings.AudioSampleRate, (int)settings.AudioChannels);
            wave_provider = new BufferedWaveProvider(outFormat);
            wave_provider.DiscardOnBufferOverflow = true;

            filterAudio = new FilterAudio((int)settings.AudioSampleRate);
            filterAudio.EchoFilterEnabled = false;

            if (WaveIn.DeviceCount > 0)
            {
                wave_source = new WaveInEvent();

                if (input != -1)
                    wave_source.DeviceNumber = input;

                WaveFormat inFormat = new WaveFormat((int)ToxAv.DefaultCodecSettings.AudioSampleRate, 1);

                wave_source.WaveFormat = inFormat;
                wave_source.DataAvailable += wave_source_DataAvailable;
                wave_source.RecordingStopped += wave_source_RecordingStopped;
                wave_source.BufferMilliseconds = ToxAv.DefaultCodecSettings.AudioFrameDuration;
                wave_source.StartRecording();
            }

            if (WaveOut.DeviceCount > 0)
            {
                wave_out = new WaveOut();

                if (output != -1)
                    wave_out.DeviceNumber = output;

                wave_out.Init(wave_provider);
                wave_out.Play();
            }
        }