示例#1
0
文件: Form1.cs 项目: syanick/Delay
        private void btnDump_Click(object sender, EventArgs e)
        {
            input.StopRecording();
            recording = false;

            int tempbufferbytes;

            if (curdelay > dumpMs && dumps > 1)
            {
                tempbufferbytes = buffer.BufferedBytes - ((waveformat.AverageBytesPerSecond * (targetMs / 1000) / dumps));//* (dumps - 1) / dumps / waveformat.BlockAlign * waveformat.BlockAlign;
                var tempbuffer = new byte[buffer.BufferedBytes];

                tempbufferbytes = buffer.Read(tempbuffer, 0, tempbufferbytes);

                buffer.ClearBuffer();

                buffer.AddSamples(tempbuffer, 0, tempbufferbytes);
            }
            else
            {
                buffer.ClearBuffer();
            }


            curdelay = (int)buffer.BufferedDuration.TotalMilliseconds;
            if (targetRampedUp && curdelay < targetMs)
            {
                rampingup = true;
            }
            else
            {
                rampingdown = true;
            }
        }
示例#2
0
        private void button1_Click(object sender, EventArgs e)
        {
            textBox1.Text = "";



            var outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyMusic), "NAudio");

            Directory.CreateDirectory(outputFolder);
            string FileName       = KeyGeneretor.GetUniqueKey(8);
            var    outputFilePath = Path.Combine(outputFolder, FileName + ".wav");


            var waveIn = new WaveInEvent();

            WaveFileWriter writer = null;

            bool closing = false;

            var format = new WaveFormat(16000, 16, 1);

            writer = new WaveFileWriter(outputFilePath, format);
            waveIn.StartRecording();
            button1.Enabled = false;
            button2.Enabled = true;

            /*WaveFileReader reader = new NAudio.Wave.WaveFileReader(outputFilePath);
             * WaveFormat newFormat = new WaveFormat(8000, 16, 1);
             * WaveFormatConversionStream str = new WaveFormatConversionStream(newFormat, reader);*/


            waveIn.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
                if (writer.Position > waveIn.WaveFormat.AverageBytesPerSecond * 30)
                {
                    waveIn.StopRecording();
                }
            };


            button2.Click += (s, a) =>
                             waveIn.StopRecording();

            FormClosing += (s, a) => { closing = true; waveIn.StopRecording(); };


            waveIn.RecordingStopped += (s, a) =>
            {
                writer?.Dispose();
                writer          = null;
                button1.Enabled = true;
                button2.Enabled = true;

                if (closing)
                {
                    waveIn.Dispose();
                }
            };
        }
        private static void Timer1_Tick()
        {
            Console.WriteLine("Stop recording");
            //Stop recording
            waveIn.StopRecording();
            //Turn off events, will get re-enabled once another audio peak gets detected
            //Console.Beep();
            //Thread.Sleep(1000);
            timer1Enabled = false;
            timer1?.Dispose();
            timer1 = null;
            //Console.WriteLine("Timer stopped");
            //audioRecorder.SampleAggregator.Reset();
            //Console.Beep(800, 800);

            //Call the async google voice stream method with our saved audio buffer
            Task me = StreamBufferToGooglesAsync();

            try
            {
                //bool complete = me.Wait(5000);
                // Console.WriteLine((!complete ? "Not" : "") + "Complete");
                me.Wait();
            }
            catch
            {
            }
            Console.WriteLine("Listening - Say Robco, followed by a command.");
            //Console.Beep();
            audioRecorder.SampleAggregator.Reset();
            audioRecorder.BeginMonitoring(0);
        }
        private void checkBoxChanged(object sender, EventArgs e)
        {
            CheckBox ch = (CheckBox)sender;

            if (ch == MicEnabled_checkbox)
            {
                DebugLine.setVisible("Microphone: ", ch.Checked);
                Settings.Default.MicShow = ch.Checked;
            }
            else if (ch == volume_checkbox)
            {
                DebugLine.setVisible("Volume: ", ch.Checked);
                Settings.Default.VolumeShow = ch.Checked;
                if (ch.Checked)
                {
                    WaveEvent.StartRecording();
                    TimerIndex.Start();
                }
                else
                {
                    TimerIndex.Stop();
                    WaveEvent.StopRecording();
                }
            }
            else if (ch == CPULoad_check)
            {
                DebugLine.setVisible("CPU Load: ", ch.Checked);
                Settings.Default.CPUVisible = ch.Checked;
            }
            else if (ch == MemLoad_check)
            {
                DebugLine.setVisible("Mem: ", ch.Checked);
                Settings.Default.MemVisible = ch.Checked;
            }
            else if (ch == cbBlockKey)
            {
                Settings.Default.BlockKey = ch.Checked;
            }
            else if (ch == cbAlwaysAsAdmin)
            {
                Settings.Default.AlwaysRunAsAdmin = ch.Checked;
            }
            else if (ch == cbEnableOverlay)
            {
                Settings.Default.OverlayEnabled = btnRunHookProcess.Enabled = ch.Checked;
            }
            else if (ch == cbDragBoxMode)
            {
                DebugLine.DragMode = ch.Checked;
                if (!ch.Checked)
                {
                    lPosition.Text = "Position: " + Settings.Default.WindowPosition.X.ToString() + ", " + Settings.Default.WindowPosition.Y.ToString();
                }
            }
            else if (ch == cbAutorun)
            {
                Autorun = cbAutorun.Checked;
            }
            Settings.Default.Save();
        }
示例#5
0
        public void testFunc()
        {
            // Define Output Path and Create a new WaveInEvent
            var outputFilePath = @"C:\Users\Joe\source\repos\Monotone\Monotone\bin\x86\Debug x86\Samples\";
            var filename       = "sample";
            var index          = 0;
            var waveInEvent    = new WaveInEvent();

            // NAudio WaveInEvent Event-Based Handeling
            #region NAudio WaveInEvent Event-Based Handeling
            waveInEvent.DataAvailable += (s, a) =>
            {
                fileWriter.Write(a.Buffer, 0, a.BytesRecorded);

                // Force Stop Recording after 30 seconds
                if (fileWriter.Position > waveInEvent.WaveFormat.AverageBytesPerSecond * 30)
                {
                    waveInEvent.StopRecording();
                }
            };
            #endregion

            Console.WriteLine("This file will make new samples at each keypress.");
            Console.WriteLine("To actually end the program, you must press the \"q\" key\n");
            Console.WriteLine("Press any key to begin.");

            Console.ReadKey();

            Console.WriteLine("-------------------------------------\n\n");

            while (true)
            {
                fileWriter = new WaveFileWriter(outputFilePath + $"{filename}_{index}.wav", waveInEvent.WaveFormat);
                waveInEvent.StartRecording();

                Console.WriteLine($"Recording {filename}_{index}.wav ... ");
                Console.WriteLine("Press Any Key continue to next file");

                var answr = Console.ReadKey();


                waveInEvent.StopRecording();
                fileWriter?.Dispose();
                fileWriter = null;

                index++;

                if (answr.Key == ConsoleKey.Q)
                {
                    break;
                }
            }

            // Stop Recording and dispose object

            waveInEvent.Dispose();

            Console.WriteLine("\n\nRecording Stopped.");
        }
 private void TestWave_DataAvailable(object sender, WaveInEventArgs e)
 {
     saver.Write(e.Buffer, 0, e.BytesRecorded);
     if (saver.Position > saveWave.WaveFormat.AverageBytesPerSecond * 30)
     {
         saveWave.StopRecording();
     }
 }
示例#7
0
 private void OnRecorderDataAvailable(object sender, WaveInEventArgs args)
 {
     writer.Write(args.Buffer, 0, args.BytesRecorded);
     if (writer.Position > waveIn.WaveFormat.AverageBytesPerSecond * 30)
     {
         waveIn.StopRecording();
     }
 }
示例#8
0
        public void CloseRecording()
        {
            if (captureDevice != null)
            {
                captureDevice.StopRecording();
            }


            recordedStream.Position = 0;
            RaiseCaptureStopped();
        }
示例#9
0
        public void StopRecordingAudio(ref MemoryStream stream)
        {
            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }

            recordingStreams.Remove(stream);
            if (recordingStreams.Count == 0)
            {
                inputDevice.StopRecording();
            }
        }
示例#10
0
        //Public: Публичные методы

        /// <summary>
        /// Тест чувствительности микрофона.
        /// </summary>
        public void SensitivityTest()
        {
            _waveIn.DataAvailable += Sensitivity_Tick;
            MicrophoneSens         = 0;

            _waveIn.StartRecording();

            Thread.Sleep(Core.TIME_TO_NOISETEST);

            _waveIn.StopRecording();

            _waveIn.DataAvailable -= Sensitivity_Tick;
        }
 public void Dispose()
 {
     if (IsPaused) Resume();
     stopThread.Set();
     screenThread.Join();
     if (audioSource != null)
     {
         audioSource.StopRecording();
         audioSource.DataAvailable -= AudioDataAvailable;
     }
     // Close writer: the remaining data is written to a file and file is closed
     writer.Close();
     stopThread.Close();
 }
示例#12
0
        private void stopWatch_EventArrived(object sender, EventArrivedEventArgs e)
        {
            try
            {
                Process[] proc = Process.GetProcessesByName("Slack");
                if (proc.Length < 8)
                {
                    if (counter == 3)
                    {
                        notifyIcon1.Visible = true;
                        notifyIcon1.ShowBalloonTip(1000, "Slack Recorder", "Call recorded", ToolTipIcon.Info);
                        notifyIcon1.Visible = false;

                        if (waveIn != null)
                        {
                            try
                            {
                                waveIn.StopRecording();
                                CaptureInstance.StopRecording();

                                saveDate = DateTime.Now.ToString("dd.MM.yyyy");
                                saveTime = DateTime.Now.ToString("HH.mm.ss");

                                MixTwoSamples();

                                ConvertToMP3(saveDirectory.SelectedPath + "\\" + "result.wav", saveDirectory.SelectedPath + "\\" + saveDate + "_" + saveTime + ".mp3", 128);

                                DeleteTempFiles();

                                InsertIntoDatabase(saveDate, saveTime, saveDirectory.SelectedPath);
                            }
                            #pragma warning disable CS0168 // Variable is declared but never used
                            catch (NullReferenceException ex)
                            #pragma warning restore CS0168 // Variable is declared but never used
                            {
                                waveIn.StopRecording();
                                CaptureInstance.StopRecording();
                            }
                        }
                        counter = 0;
                    }
                    counter++;
                }
            }
            catch (NullReferenceException ex)
            {
                //MessageBox.Show("Slack call servers are down, please try later");
            }
        }
示例#13
0
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (waveIn != null)
                {
                    waveIn.StopRecording();
                    waveIn.DataAvailable    -= WaveIn_DataAvailable;
                    waveIn.RecordingStopped -= WaveIn_RecordingStopped;
                    waveIn.Dispose();
                }

                dummy.Dispose();
            }
        }
 public void Stop()
 {
     if (recordingState == RecordingState.Recording)
     {
         recordingState = RecordingState.RequestedStop;
         waveIn.StopRecording();
     }
     if (recordingState == RecordingState.Monitoring)
     {
         recordingState = RecordingState.Stopped;
         waveIn.StopRecording();
         waveIn.Dispose();
         waveIn = null;
     }
 }
示例#15
0
        public void testFunc()
        {
            // Define Output Path and Create a new WaveInEvent
            var outputFilePath = @"C:\Users\Joe\source\repos\Monotone\Monotone\bin\x64\Debug\audio.wav";
            var waveInEvent    = new WaveInEvent();

            // Prepare the fileWriter
            fileWriter = new WaveFileWriter(outputFilePath, waveInEvent.WaveFormat);

            // Start Recording
            waveInEvent.StartRecording();

            Console.WriteLine("Begin Recording... ");
            Console.WriteLine("Press Any Key to Stop Recording.");

            // NAudio WaveInEvent Event-Based Handeling
            #region NAudio WaveInEvent Event-Based Handeling
            waveInEvent.DataAvailable += (s, a) =>
            {
                fileWriter.Write(a.Buffer, 0, a.BytesRecorded);

                // Force Stop Recording after 30 seconds
                if (fileWriter.Position > waveInEvent.WaveFormat.AverageBytesPerSecond * 30)
                {
                    waveInEvent.StopRecording();
                }
            };

            waveInEvent.RecordingStopped += (s, a) =>
            {
                Console.WriteLine($"Sampling Rate: {waveInEvent.WaveFormat.SampleRate}");
                Console.WriteLine($"Bits Per Sample: {waveInEvent.WaveFormat.BitsPerSample}");
                Console.WriteLine($"Channels: {waveInEvent.WaveFormat.Channels}");
                Console.WriteLine($"Encoding: {waveInEvent.WaveFormat.Encoding}");

                fileWriter?.Dispose();
                fileWriter = null;
            };
            #endregion

            Console.ReadKey();

            // Stop Recording and dispose object
            waveInEvent.StopRecording();
            waveInEvent.Dispose();

            Console.WriteLine("Recording Stopped.");
        }
示例#16
0
 public void Stop()
 {
     if (wi != null)
     {
         wi.StopRecording();
     }
 }
示例#17
0
        public Recorder()
        {
            waveIn = new WaveInEvent();

            writer  = null;
            closing = false;

            waveIn.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
                if (writer.Position > waveIn.WaveFormat.AverageBytesPerSecond * 30)
                {
                    waveIn.StopRecording();
                }
            };

            waveIn.RecordingStopped += (s, a) =>
            {
                writer?.Dispose();
                writer = null;
                if (closing)
                {
                    waveIn.Dispose();
                }
            };
        }
示例#18
0
        /// <summary>
        /// Closes the session.
        /// </summary>
        /// <param name="reason">Reason for the closure.</param>
        public override void Close(string reason)
        {
            if (!_isClosed)
            {
                _isClosed = true;

                base.OnRtpPacketReceived -= RtpPacketReceived;

                _waveOutEvent?.Stop();

                if (_waveInEvent != null)
                {
                    _waveInEvent.DataAvailable -= LocalAudioSampleAvailable;
                    _waveInEvent.StopRecording();
                }

                _audioStreamTimer?.Dispose();

                if (_testPatternVideoSource != null)
                {
                    _testPatternVideoSource.SampleReady -= LocalVideoSampleAvailable;
                    _testPatternVideoSource.Stop();
                    _testPatternVideoSource.Dispose();
                }

                // The VPX encoder is a memory hog.
                _vpxDecoder.Dispose();
                _imgConverter.Dispose();

                _vpxEncoder?.Dispose();
                _imgEncConverter?.Dispose();

                base.Close(reason);
            }
        }
示例#19
0
        static void Main(string[] args)
        {
            string s = "";

            while (s != "q")
            {
                Thread thread = new Thread(delegate()
                {
                    init();
                });

                thread.Start();
                Console.ReadLine();

                s_WaveIn.StopRecording();
                writer.Close();

                convert();

                System.Threading.Thread.Sleep(500);


                Console.WriteLine(Send("test.flac"));
                s = Console.ReadLine();
            }
        }
示例#20
0
        public void Reset()
        {
            MyWaveIn.StopRecording();
            StopReplay();

            _buffer      = _buffer = new byte[(int)(MyWaveIn.WaveFormat.AverageBytesPerSecond * RecordTime)];
            _isFull      = false;
            _isRecording = false;
            _pos         = 0;
            MyWaveIn.Dispose();
            MyWaveIn = new WaveInEvent();
            MyWaveIn.DeviceNumber      = Device;
            MyWaveIn.DataAvailable    += DataAvailable;
            MyWaveIn.RecordingStopped += Stopped;
            MyWaveIn.DeviceNumber      = _deviceIndex;
        }
示例#21
0
 public void Dispose()
 {
     mIsDisposing = true;
     mTimer.Dispose();
     mWaveIn.StopRecording();
     mWaveIn.Dispose();
 }
示例#22
0
        static async Task <object> StreamingMicRecognizeAsync(string locale, int seconds, int waveInDeviceNumber = 0)
        {
            waveIn.DeviceNumber = waveInDeviceNumber;
            waveIn.WaveFormat   = waveFormat;

            waveIn.StartRecording();

            Console.WriteLine(String.Format("Recording has been started on {0}",
                                            WaveIn.GetCapabilities(waveInDeviceNumber).ProductName), Color.Lime);

            var loadDataTasks = new Task[]
            {
                Task.Run(async() => await Loop(locale, seconds)),
                Task.Run(async() => await ndiRenderer.Run())
            };

            try
            {
                await Task.WhenAll(loadDataTasks);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }

            waveIn.StopRecording();
            Console.WriteLine("exited gracefully");

            return(0);
        }
示例#23
0
        public void UpdateDeviceList()
        {
            var enumerator     = new MMDeviceEnumerator();
            var wasapi         = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications);
            var default_device = wasapi.FriendlyName;

            deviceName = default_device;

            waveIn.DeviceNumber = -1;
            for (int n = -1; n < WaveIn.DeviceCount; n++)
            {
                var caps = WaveIn.GetCapabilities(n);
                if (default_device.Contains(caps.ProductName) && waveIn.DeviceNumber != n)
                {
                    if (recStarted)
                    {
                        waveIn.StopRecording();
                    }
                    waveIn.DeviceNumber = n;
                    if (recStarted)
                    {
                        Thread.Sleep(100);
                        waveIn.StartRecording();
                    }
                    break;
                }
            }
        }
示例#24
0
        /// <summary>
        /// Stop audio source.
        /// </summary>
        ///
        /// <remarks><para>Stops audio source.</para>
        /// </remarks>
        ///
        public void Stop()
        {
            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
            }

            if (_waveIn != null)
            {
                // signal to stop
                _waveIn.DataAvailable -= WaveInDataAvailable;
                _waveIn.StopRecording();
                _waveIn.RecordingStopped -= WaveInRecordingStopped;

                if (WaveOutProvider != null)
                {
                    if (WaveOutProvider.BufferedBytes > 0)
                    {
                        WaveOutProvider.ClearBuffer();
                    }
                    WaveOutProvider = null;
                }

                _waveIn.Dispose();
                _waveIn = null;
            }
        }
示例#25
0
 public void Disable()
 {
     try
     {
         if (_waveIn != null)
         {
             try
             {
                 _waveIn.StopRecording();
                 logger.Info("关闭语音接收。");
             }
             catch (Exception exc)
             {
                 logger.Info("关闭语音接收出现异常。Exception:{0}", exc.Message);
             }
         }
         if (client != null)
         {
             logger.Info("关闭语音接收Socket套接字。");
             client.Shutdown(SocketShutdown.Both);
             client.Close();
             client = null;
         }
     }
     catch (Exception ex)
     {
         logger.Error("Disable 异常:{0}", ex.Message);
     }
 }
        public async Task SpeechToText()
        {
            //var authData = GenerateAuthData(path, AccessKey, SecretKey, ContractId);
            //var authString = GenerateAuthString(authData);
            //var uri = $"{endpoint}?auth={authString}";

            var jwtToken = await HttpUtils.GenerateJwtDataAsync(AccessKey, SecretKey, 3 * 60 /* 3 minutes */, jwtEndpoint);

            var uri = $"{sttEndpoint}?token=Bearer {jwtToken}";

            var waveIn = new WaveInEvent();

            // デフォルト録音デバイスを利用します。
            waveIn.DeviceNumber = 0;
            // サンプルレート、ビットレート、チャンネル数を16000Hz、16bits、1に指定します。
            waveIn.WaveFormat     = new WaveFormat(16000, 16, 1);
            waveIn.DataAvailable += (object sender, WaveInEventArgs e) =>
            {
                var inputMemStream  = new MemoryStream(e.Buffer);
                var rawWaveStream   = new RawSourceWaveStream(inputMemStream, waveIn.WaveFormat);
                var outputMemStream = new MemoryStream();
                WaveFileWriter.WriteWavFileToStream(outputMemStream, rawWaveStream);
                audioBuffers.Enqueue(outputMemStream.ToArray());
            };
            waveIn.RecordingStopped += (object sender, StoppedEventArgs e) =>
            {
                clientAsyncTokenSource.Cancel();
            };

            var client = new ClientWebSocket();

            await client.ConnectAsync(new Uri(uri), CancellationToken.None);

            // 日本語の音声を認識します。
            _ = await SetLanguageAsync(client, "ja");

            _ = await SetSamplingRateAsync(client, 16000);

            try
            {
                waveIn.StartRecording();
                Console.WriteLine("(音声認識:認識中です。)");

                var sendLoop = this.InitSendLoop(client);
                var readLoop = this.InitReadLoop(client);
                Console.Read();

                waveIn.StopRecording();
                Console.WriteLine("(音声認識:完了しました。)");

                await sendLoop;
                await readLoop;

                await client.CloseAsync(WebSocketCloseStatus.NormalClosure, "OK", CancellationToken.None);
            }
            catch (OperationCanceledException)
            {
                Console.WriteLine("(音声認識:サーバとの通信を止めました。)");
            }
        }
示例#27
0
        private static void Main(string[] args)
        {
            var elapsed = new TimeSpan(0, 0, 60);
            var id      = Guid.NewGuid();
            var capture = new WaveInEvent
            {
                DeviceNumber       = 0,
                BufferMilliseconds = 1000,
                WaveFormat         = new WaveFormat(44100, 2),
            };
            var writer = new WaveFileWriter($"record-{id}.wav", capture.WaveFormat);

            capture.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
            };

            capture.RecordingStopped += (s, a) =>
            {
                writer.Dispose();
                writer = null;
                capture.Dispose();
            };

            Task.Run(async() => await ExecFfm($"-y -f vfwcap -r 25 -t {elapsed:g} -i 0 front-{id}.mp4"));
            Task.Run(async() => await ExecFfm($"-y -f gdigrab -framerate 25 -t {elapsed:g} -i desktop desk-{id}.mkv"));

            capture.StartRecording();

            Thread.Sleep(elapsed);
            Console.WriteLine("Done!");
            capture.StopRecording();

            Environment.Exit(Environment.ExitCode);
        }
        static Task recordAsync(int deviceNum, CancellationToken cancellationToken)
        {
            return(new Task(async() =>
            {
                WaveFileWriter waveFile;
                WaveInEvent waveSource = new WaveInEvent();
                waveSource.DeviceNumber = deviceNum;
                waveSource.WaveFormat = new WaveFormat(44100, 1);

                string tempFile = ($@"{_saveDirectory}mic_recording_{deviceNum}.wav");
                waveFile = new WaveFileWriter(tempFile, waveSource.WaveFormat);

                waveSource.DataAvailable += (sender, e) =>
                {
                    waveFile.Write(e.Buffer, 0, e.BytesRecorded);
                };

                waveSource.StartRecording();

                while (!cancellationToken.IsCancellationRequested)
                {
                    await Task.Delay(100);
                }

                waveSource.StopRecording();
                waveFile.Dispose();
            }));
        }
示例#29
0
        public Task RecognizeStreaming(WaveInEvent waveIn, CancellationToken ct)
        {
            if (waveIn.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
            {
                throw new InvalidCastException("Format not supported for Streaming recognize");
            }

            waveIn.DataAvailable += WaveIn_DataAvailable;


            Task recognizeTask = Task.Factory.StartNew(async() =>
            {
                try
                {
                    for (; !ct.IsCancellationRequested;)
                    {
                        await StreamingMicRecognizeAsync(waveIn.WaveFormat.SampleRate,
                                                         SAFE_STREAM_TIMEOUT,
                                                         ct);
                    }

                    waveIn.DataAvailable -= WaveIn_DataAvailable;
                    waveIn.StopRecording();
                    audioBuffer.Clear(); // Clear temp buffer
                }
                catch (Exception ex)
                {
                    LogHelper.WriteLog(this, ex.ToString());
                }
            }, ct, TaskCreationOptions.LongRunning, TaskScheduler.Default);

            waveIn.StartRecording();
            return(recognizeTask);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            if (inputDevices == 0)
            {
                return;
            }

            button3.Enabled    = false;
            PlayButton.Enabled = false;
            stopButton.Enabled = false;
            button1.Enabled    = false;
            button2.Enabled    = true;

            saveWave = new WaveInEvent();
            saveWave.DeviceNumber = 0;
            saveWave.WaveFormat   = new WaveFormat(sampleRate, bitDepth, channels);
            saver = new WaveFileWriter(outPutFilePath, saveWave.WaveFormat);
            saveWave.StartRecording();



            saveWave.DataAvailable += (s, a) => {
                saver.Write(a.Buffer, 0, a.BytesRecorded);
                if (saver.Position > saveWave.WaveFormat.AverageBytesPerSecond * 30)
                {
                    saveWave.StopRecording();
                }
            };
        }
        public float[] ReadMonoSamplesFromMicrophone(int sampleRate, int secondsToRecord)
        {
            var producer = new BlockingCollection<float[]>();
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 1);
            float[] samples;
            using (var waveIn = new WaveInEvent { WaveFormat = waveFormat })
            {
                waveIn.DataAvailable += (sender, e) =>
                    {
                        var chunk = GetFloatSamplesFromByte(e.BytesRecorded, e.Buffer);
                        producer.Add(chunk);
                    };

                waveIn.RecordingStopped += (sender, args) => producer.CompleteAdding();

                waveIn.StartRecording();

                samples = samplesAggregator.ReadSamplesFromSource(new BlockingQueueSamplesProvider(producer), secondsToRecord, sampleRate);

                waveIn.StopRecording();
            }

            return samples;
        }