コード例 #1
0
 public override void Dispose()
 {
     lock (this)
     {
         currentCapture?.StopRecording();
     }
 }
コード例 #2
0
        public void UpdateDevices(MMDevice input, MMDevice output)
        {
            outp?.Stop();
            outp?.Dispose();

            inp?.StopRecording();
            inp?.Dispose();

            inp = new WasapiCapture(input, true, 5);
            inp.DataAvailable += OnCapture;

            buffer = new BufferedWaveProvider(inp.WaveFormat);

            mixer = new MixingWaveProvider32();
            mixer.AddInputStream(buffer);

            if (resampler == null)
            {
                resampler = new AudioResampler(mixer);
            }
            else
            {
                resampler.Update(mixer);
            }

            outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5);
            outp.Init(resampler);

            inp.StartRecording();
            outp.Play();
        }
コード例 #3
0
        public void StopForwarding()
        {
            _recorder?.StopRecording();
            _player?.Stop();

            _recorder?.Dispose();
            _player?.Dispose();
        }
コード例 #4
0
ファイル: AudioClientTests.cs プロジェクト: jnm2/NAudio
 public void CanCaptureDefaultDeviceInDefaultFormatUsingWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
     }
 }
コード例 #5
0
ファイル: AudioClientTests.cs プロジェクト: jnm2/NAudio
 public void CanReuseWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
         Thread.Sleep(1000);
         wasapiClient.StartRecording();
     }
 }
コード例 #6
0
 public void stopRecording()
 {
     micCapture.StopRecording();
     speakCapture.StopRecording();
     micWriter.Close();
     micWriter.Dispose();
     speakWriter.Close();
     speakWriter.Dispose();
     soundOut.Stop();
     soundOut.Dispose();
     window.unlock();
 }
コード例 #7
0
ファイル: AudioClientTests.cs プロジェクト: zp9611/NAudio
 public void CanReuseWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
         Thread.Sleep(1000);
         wasapiClient.StartRecording();
         Console.WriteLine("Disposing");
     }
 }
コード例 #8
0
        public void StopEncoding()
        {
            lock (lockObj)
            {
                _textToSpeech?.Dispose();
                _textToSpeech = null;

                _wasapiCapture?.StopRecording();
                _wasapiCapture?.Dispose();
                _wasapiCapture = null;

                _resampler?.Dispose(true);
                _resampler = null;

                _waveOut?.Stop();
                _waveOut?.Dispose();
                _waveOut = null;

                _micWaveOut?.Stop();
                _micWaveOut?.Dispose();
                _micWaveOut = null;

                _volumeSampleProvider = null;
                _clientAudioMixer?.RemoveAllMixerInputs();
                _clientAudioMixer = null;

                _clientsBufferedAudio.Clear();

                _encoder?.Dispose();
                _encoder = null;

                if (_udpVoiceHandler != null)
                {
                    _udpVoiceHandler.RequestStop();
                    _udpVoiceHandler = null;
                }

                _speex?.Dispose();
                _speex = null;

                SpeakerMax = -100;
                MicMax     = -100;

                _effectsOutputBuffer = null;

                foreach (var guid in _subs)
                {
                    MessageHub.Instance.UnSubscribe(guid);
                }
                _subs.Clear();
            }
        }
コード例 #9
0
 private void HangupButton_Click(object sender, RoutedEventArgs e)
 {
     modem.Hangup();
     capture.StopRecording();
     waveOut.Stop();
     capture.Dispose();
     waveOut.Dispose();
     capture = null;
     waveOut = null;
     HangupButton.IsEnabled = false;
     CallButton.IsEnabled   = true;
     AnswerButton.IsEnabled = true;
 }
コード例 #10
0
 private void StopRecording()
 {
     StartButton.IsEnabled = true;
     StopButton.IsEnabled  = false;
     LoadButton.IsEnabled  = true;
     if (Capture != null)
     {
         Capture.DataAvailable    -= ReceiveWave;
         Capture.RecordingStopped -= RecordingStopped;
         Capture.StopRecording();
         Capture = null;
     }
 }
コード例 #11
0
        private void MainWindow_Closing(object sender, CancelEventArgs e)
        {
            m_AudioCapture?.StopRecording();
            m_AudioCapture?.Dispose();
            m_WaveFileWriter.Close();
            DisposeAudioFrameReader();
            DisposeColorFrameReader();
            CloseKinectSensor();
            CloseVideoRecorder();


            CombineAndSaveAv();
        }
コード例 #12
0
        public void StopEncoding()
        {
            lock (lockObj)
            {
                _wasapiCapture?.StopRecording();
                _wasapiCapture?.Dispose();
                _wasapiCapture = null;

                _resampler?.Dispose(true);
                _resampler = null;

                //Debug Wav
                // _afterFileWriter?.Close();
                // _afterFileWriter?.Dispose();
                // _beforeWaveFile?.Close();
                // _beforeWaveFile?.Dispose();

                _waveOut?.Stop();
                _waveOut?.Dispose();
                _waveOut = null;

                _micWaveOut?.Stop();
                _micWaveOut?.Dispose();
                _micWaveOut = null;

                _volumeSampleProvider = null;
                _clientAudioMixer?.RemoveAllMixerInputs();
                _clientAudioMixer = null;

                _clientsBufferedAudio.Clear();

                _encoder?.Dispose();
                _encoder = null;

                if (_udpVoiceHandler != null)
                {
                    _udpVoiceHandler.RequestStop();
                    _udpVoiceHandler = null;
                }

                _speex?.Dispose();
                _speex = null;

                SpeakerMax = -100;
                MicMax     = -100;

                _effectsOutputBuffer = null;

                MessageHub.Instance.ClearSubscriptions();
            }
        }
コード例 #13
0
        public void StopRecording()
        {
            if (!IsRecording)
            {
                return;
            }

            Call.Duration = DateTime.Now.Subtract(Call.Started);

            log.Debug($"Finishing recording, call duration = {Call.Duration}");

            micSource.StopRecording();
            spkSource.StopRecording();
        }
コード例 #14
0
ファイル: LSMixer.cs プロジェクト: Wessie/Loopstream
 public void Dispose(ref string tex)
 {
     Logger.mix.a("dispose recCap"); tex = "recCap"; if (recCap != null)
     {
         recCap.StopRecording();
     }
     Logger.mix.a("dispose micCap"); tex = "micCap"; if (micCap != null)
     {
         micCap.StopRecording();
     }
     Logger.mix.a("dispose mixOut"); tex = "mixOut"; if (mixOut != null)
     {
         mixOut.Dispose();
     }
     Logger.mix.a("dispose recRe"); tex = "recRe"; if (recRe != null)
     {
         recRe.Dispose();
     }
     Logger.mix.a("dispose micRe"); tex = "micRe"; if (micRe != null)
     {
         micRe.Dispose();
     }
     Logger.mix.a("disposed");
 }
コード例 #15
0
ファイル: AudioPlayer.cs プロジェクト: Hywel-Stoakes/MPAi-2
        /// <summary>
        /// If a file is being recorded, stop recording and tidy up the stream.
        /// </summary>
        private void StopRecording()
        {
            if (onDataAvailableSubscribed)
            {
                waveIn.DataAvailable     -= OnDataAvailable;
                onDataAvailableSubscribed = false;
            }

            recordButton.Text = recordText;
            if (waveIn != null)
            {
                waveIn.StopRecording();
            }
            FinalizeWaveFile(writer);
        }
コード例 #16
0
ファイル: Program.cs プロジェクト: KrzysztofMajor/Recorder
        static void Main(string[] args)
        {
            var now         = DateTime.Now;
            var machineName = Environment.MachineName.ToLower(CultureInfo.CurrentCulture);

            var outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "NAudio");

            Directory.CreateDirectory(outputFolder);
            var outputFilepath = Path.Combine(outputFolder, $"output.wav");
            var mp3Filepath    = Path.Combine(outputFolder, $"{machineName}{now:ddMMdyyyyHHmmss}.mp3");

            var waveIn = new WasapiCapture {
            };
            var writer = new WaveFileWriter(outputFilepath, waveIn.WaveFormat);

            waveIn.StartRecording();

            var tm = new System.Timers.Timer(10 * 1000);

            tm.Elapsed += (sender, eventArgs) => waveIn.StopRecording();
            tm.Start();

            waveIn.DataAvailable += (sender, eventArgs) =>
            {
                Console.Write(".");
                writer.Write(eventArgs.Buffer, 0, eventArgs.BytesRecorded);
            };

            var e = new ManualResetEvent(false);

            waveIn.RecordingStopped += (sender, eventArgs) =>
            {
                writer.Dispose();
                waveIn.Dispose();

                Console.WriteLine("writing mp3");
                using (var reader = new AudioFileReader(outputFilepath))
                    using (var mp3Writer = new LameMP3FileWriter(mp3Filepath, reader.WaveFormat, 128))
                        reader.CopyTo(mp3Writer);

                Console.WriteLine("writing done");
                e.Set();
            };

            e.WaitOne();
        }
コード例 #17
0
ファイル: Recorder.cs プロジェクト: cheanizer/wslinphone
        public Recorder(string prefix = "")
        {
            Prefix = prefix;
            initRecording();

            outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "recording");
            Console.WriteLine(outputFolder);
            Directory.CreateDirectory(outputFolder);
            Name         = getSessionName();
            Location     = outputFolder;
            eventInput   = new WasapiCapture();
            writerInput  = null;
            writerOutput = null;
            eventOutput  = new WasapiLoopbackCapture();

            eventInput.DataAvailable += (s, a) => {
                writerInput.Write(a.Buffer, 0, a.BytesRecorded);
                if (writerInput.Position > eventInput.WaveFormat.AverageBytesPerSecond * (60 * 30))
                {
                    eventInput.StopRecording();
                }
            };
            eventInput.RecordingStopped += (s, a) =>
            {
                writerInput?.Dispose();
                writerInput = null;
                inputDone   = true;
                combineAll();
            };

            eventOutput.DataAvailable += (s, a) => {
                writerOutput.Write(a.Buffer, 0, a.BytesRecorded);
                if (writerOutput.Position > eventOutput.WaveFormat.AverageBytesPerSecond * (60 * 30))
                {
                    eventOutput.StopRecording();
                }
            };

            eventOutput.RecordingStopped += (s, a) => {
                writerOutput?.Dispose();
                writerOutput = null;
                outputDone   = true;
                combineAll();
            };
        }
コード例 #18
0
ファイル: Form1.cs プロジェクト: my04337/sound_utils
        private void button1_Click(object sender, EventArgs e)
        {
            var waveIn = new WasapiCapture();
            var writer = new WaveFileWriter("a.wav", waveIn.WaveFormat);

            waveIn.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
                if (writer.Position > waveIn.WaveFormat.AverageBytesPerSecond * 3)
                {
                    waveIn.StopRecording();
                }
            };
            waveIn.RecordingStopped += (s, a) =>
            {
                writer?.Dispose();
                writer = null;
            };
            waveIn.StartRecording();
        }
コード例 #19
0
        static void Main(string[] args)
        {
            var reader = new Mp3FileReader(@"C:\Users\Null\Desktop\Disconnected - Pegboard Nerds.mp3");

            WaveFileWriter.CreateWaveFile(@"C:\Users\Null\Desktop\Disconnected - Pegboard Nerds.wav", reader);
            //// WaveFileReader waveFileReader = new WaveFileReader("");
            //WaveOut wout = new WaveOut();

            //wout.Init(reader);
            //wout.Play();
            //Console.ReadLine();

            //new StreamMediaFoundationReader();
            var cap = new WasapiCapture();
            //var cap = new WasapiLoopbackCapture();
            //var cap = new WaveInEvent();
            WaveFileWriter writer = new WaveFileWriter("output.wav", cap.WaveFormat);

            cap.DataAvailable += (s, args) =>
            {
                float waveHeight = Enumerable
                                   .Range(0, args.BytesRecorded / 32)
                                   .Select(i => BitConverter.ToSingle(args.Buffer, i * 32))
                                   .Aggregate((v1, v2) => v1 > v2 ? v1 : v2);
                writer.Write(args.Buffer, 0, args.BytesRecorded);
                Console.WriteLine($"BufferSize:{args.Buffer.Length}, Recorded:{args.BytesRecorded}");
            };
            cap.StartRecording();
            Console.WriteLine("录制已开始, 按Enter结束");
            ConsoleKey key;

            do
            {
                key = Console.ReadKey(true).Key;
            }while (key != ConsoleKey.Enter);
            cap.StopRecording();
            writer.Close();
            Console.WriteLine("录制结束");
            Console.ReadLine();
        }
        private Task RecordAsync(string deviceId, CancellationToken cancellationToken)
        {
            return(new Task(() =>
            {
                MMDevice device = new MMDeviceEnumerator().GetDevice(deviceId);
                WasapiCapture capture = new WasapiCapture(device);
                capture.ShareMode = ShareModeIndex == 0 ? AudioClientShareMode.Shared : AudioClientShareMode.Exclusive;
                string filename = String.Format("ChatteringFools {0} {1:yyy-MM-dd HH-mm-ss}.wav", device.FriendlyName, DateTime.Now);
                WaveFileWriter writer = new WaveFileWriter(Path.Combine(RecordingsViewModel.OutputFolder,
                                                                        filename),
                                                           capture.WaveFormat);
                capture.StartRecording();

                capture.RecordingStopped += delegate
                {
                    writer.Dispose();
                    writer = null;
                    Application.Current.Dispatcher.Invoke(new Action(() =>
                    {
                        RecordingsViewModel.Recordings.Add(filename);
                        RecordingsViewModel.SelectedRecording = filename;
                    }));
                    capture.Dispose();
                    capture = null;
                    RecordCommand.IsEnabled = true;
                    StopCommand.IsEnabled = false;
                };

                capture.DataAvailable += new EventHandler <WaveInEventArgs>(delegate(object o, WaveInEventArgs waveInEventArgs)
                {
                    writer.Write(waveInEventArgs.Buffer, 0, waveInEventArgs.BytesRecorded);
                });

                cancellationToken.Register(() =>
                {
                    capture.StopRecording();
                });
            }));
        }
コード例 #21
0
ファイル: wPlay.cs プロジェクト: def-valerius/ARWMIO
        public void wasapiStop()
        {
            if (wCapture != null)
            {
                wCapture.StopRecording();
                wCapture.Dispose();
                wCapture = null;
            }

            if (wOut != null)
            {
                wOut.Stop();
                wOut.Dispose();
                wOut = null;
            }

            if (buffWaveProv != null)
            {
                buffWaveProv.ClearBuffer(); // ?
                buffWaveProv = null;
            }
        }
コード例 #22
0
        private void AudioInput_SelectedIndexChanged(object sender, EventArgs e)
        {
            // START CAPTURING SOUND FROM SELECTED DEVICE
            MMDevice inputDevice = inputs[audioInputSelector.SelectedIndex];

            StoreSetting("input_device", audioInputSelector.SelectedIndex.ToString());
            Logger.WriteLine("Set inputDevice to: " + audioOutputSelector.SelectedIndex.ToString());


            if (input != null)
            {
                input.StopRecording();
            }

            input = new WasapiCapture(inputDevice, true, inputLatency);

            inputBitsPrSample = input.WaveFormat.BitsPerSample;
            inputSampleRate   = input.WaveFormat.SampleRate;
            inputChannels     = input.WaveFormat.Channels;
            inputFormat       = WaveFormat.CreateIeeeFloatWaveFormat(inputSampleRate, inputChannels);

            inputBuffer           = new BufferedWaveProvider(inputFormat);
            inputBuffer.ReadFully = true;
            inputBuffer.DiscardOnBufferOverflow = true;

            inputResampler = new WdlResamplingSampleProvider(inputBuffer.ToSampleProvider(), internalFormatStereo.SampleRate);

            SetInputMode(inputMode);

            Logger.WriteLine("SET INPUT FORMAT: "
                             + "Sample Rate: " + inputSampleRate
                             + ", BitsPrSasmple: " + inputBitsPrSample
                             + ", Channels: " + inputChannels);

            input.DataAvailable += waveIn_DataAvailable;
            input.StartRecording();
        }
コード例 #23
0
        public void StopEncoding()
        {
            lock (lockob)
            {
                _wasapiCapture?.StopRecording();
                _wasapiCapture?.Dispose();
                _wasapiCapture = null;

                _resampler?.Dispose(true);
                _resampler = null;

                _waveOut?.Dispose();
                _waveOut = null;

                _playBuffer?.ClearBuffer();
                _playBuffer = null;

                _encoder?.Dispose();
                _encoder = null;

                _decoder?.Dispose();
                _decoder = null;

                _playBuffer?.ClearBuffer();
                _playBuffer = null;

                _speex?.Dispose();
                _speex = null;

                _waveFile?.Flush();
                _waveFile?.Dispose();
                _waveFile = null;

                SpeakerMax = -100;
                MicMax     = -100;
            }
        }
コード例 #24
0
        public void Shutdown()
        {
            // stop recording
            if (realMic != null)
            {
                realMic.StopRecording();
            }

            // stop playback
            if (virtualMic != null)
            {
                virtualMic.Stop();
            }

            var realMicStopped    = false;
            var virtualMicStopped = false;
            var stoppingStarted   = DateTime.Now;

            while ((!realMicStopped || !virtualMicStopped) && DateTime.Now - stoppingStarted < TimeSpan.FromSeconds(10))
            {
                realMicStopped    = realMic == null || realMic.CaptureState == CaptureState.Stopped;
                virtualMicStopped = virtualMic == null || virtualMic.PlaybackState == PlaybackState.Stopped;
            }
        }
コード例 #25
0
        protected override void Dispose(bool managed)
        {
            if (_disposed)
            {
                return;
            }

            if (managed)
            {
                _cancelled = true;

                //_worker.CancelAsync();

                //_worker?.Join(500);
                _worker.Stop();
                if (_capture?.CaptureState == CaptureState.Capturing)
                {
                    _capture?.StopRecording();
                }

                base.Dispose(managed);
                GC.SuppressFinalize(this);
            }
        }
コード例 #26
0
 private void Stop()
 {
     _capture?.StopRecording();
     Peak = 0;
 }
コード例 #27
0
 //stop the output when the stop button is hit
 private void OnStopClicked(object sender, RoutedEventArgs e)
 {
     wasapiOut.Stop();
     microphone.StopRecording();
 }
コード例 #28
0
        public static async Task RecognitionWithPushAudioStreamAsync()
        {
            var capture = new WasapiCapture();
            // Creates an instance of a speech config with specified subscription key and service region.
            // Replace with your own subscription key and service region (e.g., "westus").            
            var config = SpeechConfig.FromSubscription("your key", "your region");

            var stopRecognition = new TaskCompletionSource<int>();

            // Create a push stream
            using (var pushStream = AudioInputStream.CreatePushStream())
            {
                using (var audioInput = AudioConfig.FromStreamInput(pushStream))
                {
                    // Creates a speech recognizer using audio stream input.
                    using (var recognizer = new SpeechRecognizer(config, audioInput))
                    {
                        Console.WriteLine("Say something...");

                        // Subscribes to events.
                        recognizer.Recognizing += (s, e) =>
                        {
                            Console.WriteLine($"RECOGNIZING: Text={e.Result.Text}");
                        };

                        recognizer.Recognized += (s, e) =>
                        {
                            if (e.Result.Reason == ResultReason.RecognizedSpeech)
                            {
                                Console.WriteLine($"RECOGNIZED: Text={e.Result.Text}");
                                stopRecognition.TrySetResult(0);
                            }
                            else if (e.Result.Reason == ResultReason.NoMatch)
                            {
                                Console.WriteLine($"NOMATCH: Speech could not be recognized.");
                            }
                        };

                        recognizer.Canceled += (s, e) =>
                        {
                            Console.WriteLine($"CANCELED: Reason={e.Reason}");

                            if (e.Reason == CancellationReason.Error)
                            {
                                Console.WriteLine($"CANCELED: ErrorCode={e.ErrorCode}");
                                Console.WriteLine($"CANCELED: ErrorDetails={e.ErrorDetails}");
                                Console.WriteLine($"CANCELED: Did you update the subscription info?");
                            }

                            stopRecognition.TrySetResult(0);
                        };

                        recognizer.SessionStarted += (s, e) =>
                        {
                            Console.WriteLine("\nSession started event.");
                        };

                        recognizer.SessionStopped += (s, e) =>
                        {
                            Console.WriteLine("\nSession stopped event.");
                            Console.WriteLine("\nStop recognition.");
                            stopRecognition.TrySetResult(0);
                        };

                        capture.DataAvailable += (s, e) =>
                        {
                            if (e.BytesRecorded != 0)
                            {
                                var floatArray = new float[e.BytesRecorded / 4];
                                Buffer.BlockCopy(e.Buffer, 0, floatArray, 0, e.BytesRecorded);

                                byte[] ba = ConvertFloatArrayToInt16ByteArray(floatArray);
                                pushStream.Write(ba); // try to push buffer here
                            }
                        };

                        // Starts continuous recognition. Uses StopContinuousRecognitionAsync() to stop recognition.
                        await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false);

                        capture.StartRecording();

                        // Waits for completion.
                        // Use Task.WaitAny to keep the task rooted.
                        Task.WaitAny(new[] { stopRecognition.Task });

                        // Stops recognition.
                        await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false);
                        capture.StopRecording();
                    }
                }
            }
        }
コード例 #29
0
ファイル: Form1.cs プロジェクト: jmasterx/TextToSpeech
        private void BtnStop_Click(object sender, EventArgs e)
        {
            btnStop.Enabled = false;

            capture.StopRecording();
        }
コード例 #30
0
 public void StopRecording()
 {
     InfoMessage?.Invoke(this, "Запись останавливается...");
     _audioCapture?.StopRecording();
 }