Example #1
0
        public void UpdateDevices(MMDevice input, MMDevice output)
        {
            outp?.Stop();
            outp?.Dispose();

            inp?.StopRecording();
            inp?.Dispose();

            inp = new WasapiCapture(input, true, 5);
            inp.DataAvailable += OnCapture;

            buffer = new BufferedWaveProvider(inp.WaveFormat);

            mixer = new MixingWaveProvider32();
            mixer.AddInputStream(buffer);

            if (resampler == null)
            {
                resampler = new AudioResampler(mixer);
            }
            else
            {
                resampler.Update(mixer);
            }

            outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5);
            outp.Init(resampler);

            inp.StartRecording();
            outp.Play();
        }
Example #2
0
        private void btStop_Click(object sender, EventArgs e)
        {
            if (sourceStream != null)
            {
                sourceStream.StopRecording();
                sourceStream.Dispose();
                sourceStream = null;
            }

            if (sourceStream1 != null)
            {
                sourceStream1.StopRecording();
                sourceStream1.Dispose();
                sourceStream1 = null;
            }
            if (this.waveWriter == null)
            {
                return;
            }
            this.waveWriter.Dispose();
            waveWriter2.Dispose();
            this.waveWriter        = null;
            waveWriter2            = null;
            this.sbtRecord.Enabled = false;
            this.sbtStop.Enabled   = false;
            sbtPlay.Enabled        = true;
            sbtPlay.Focus();

            //mix();
        }
        /* TODO
         * //If the audio from the computer is silent for about 10 seconds, either NAudio or Wasapi Capture
         * //stops providing audio samples. This fixes this problem by:
         * //	Detect if there is more than <varNameHere=5s> of silence in the clip.
         * //	If so, check if the length of the current snippet is more than <varNameHere=0.2s> shorter
         * //	than the time since the last snippet.
         * //	If so, find the longest strech of silence within the current audio data snippet (this assumes
         * //	that there is only ever one timeout within an audio data snippet, i.e. snippets are shorter than 20s).
         * //	Then, insert the missing duration of silence into strech of silence (in the middle to be safe).
         * private void FillInUncapturedSilence(WaveInEventArgs e)
         * {
         *      var realtimeSampleDuration = _sampleDurationStopwatch.ElapsedMilliseconds;
         *      _sampleDurationStopwatch.Stop();
         *
         *      e.
         *
         *      _sampleDurationStopwatch.Restart();
         * }
         */

        private void RecordingStopped(object sender, StoppedEventArgs e)
        {
            _wasapiIn.Dispose();
            _wavOut.Dispose();

            File.Move(this.FullOutputPath, Path.ChangeExtension(this.FullOutputPath, ".wav"));
            this.CurrentExtension = ".wav";

            HasStopped = true;
            Stopped?.Invoke(this, EventArgs.Empty);
        }
Example #4
0
 public void Dispose()
 {
     if (_waveIn != null)
     {
         _waveIn.StopRecording();
         _waveIn.Dispose();
         _waveIn = null;
     }
     if (_writer != null)
     {
         _writer.Close();
         _writer = null;
     }
 }
        public void Dispose()
        {
            PSE.Enable                 = false;
            PSE.VolumeRequest         -= PSE_VolumeRequest;
            PSE.ExtractedDegreeOfRisk -= PSE_ExtractedDegreeOfRisk;
            PSEFadeTimer.Stop();
            PSEFadeTimer.Dispose();
            PSE.Dispose();
            if (Mixer != null)
            {
                Mixer.RemoveAllMixerInputs();
            }
            if (WasapiCapture != null)
            {
                WasapiCapture.Dispose();
            }
            if (BufferedWaveProvider != null)
            {
                BufferedWaveProvider.ClearBuffer();
            }

            Mixer                = null;
            WasapiCapture        = null;
            BufferedWaveProvider = null;
        }
Example #6
0
        private void StopRecording()
        {
            m_capture.Stop();
            m_capture.Dispose();

            m_ww.Dispose();
        }
Example #7
0
        /// 録音を停止します
        private void stopRecording(bool dispose = false)
        {
            if (mCapture == null)
            {
                return;
            }

            // 録音デバイスの停止
            if (mCapture.CaptureState != CaptureState.Stopped)
            {
                mCapture.StopRecording();

                // 録音内容の書き出し
                mWriter.Dispose();
                mWriter = null;
            }

            // その他録音後処理
            mRecordingFormat = null;

            // UI更新
            startRecordingButton.Enabled    = true;
            stopRecordingButton.Enabled     = false;
            inputDeviceListSelector.Enabled = true;
            inputChannelSelector.Enabled    = true;

            // デバイスの破棄等
            if (dispose)
            {
                mCapture.Dispose();
                mCapture = null;
            }
        }
 public void Dispose()
 {
     _microphoneBuffer?.Dispose();
     _soundIn?.Dispose();
     _soundInSource?.Dispose();
     _convertedSource?.Dispose();
 }
Example #9
0
    public bool stopRecording()
    {
        bool isStopped = false;

        if (System.Web.HttpContext.Current.Session[ww] != null && System.Web.HttpContext.Current.Session[wc] != null)
        {
            capture = (WasapiCapture)System.Web.HttpContext.Current.Session[wc];
            w       = (WaveWriter)System.Web.HttpContext.Current.Session[ww];
            //stop recording
            capture.Stop();
            w.Dispose();
            w = null;
            capture.Dispose();
            capture = null;

            System.Web.HttpContext.Current.Session[ww] = null;
            System.Web.HttpContext.Current.Session[wc] = null;
            //Label1.Text = "Stopped";
        }
        else
        {
            isStopped = true;
        }

        return(isStopped);
    }
Example #10
0
        private void Stop()
        {
            if (!recording)
            {
                return;
            }

            outputCapture.Stop();
            inputCapture.Stop();
            process.StandardInput.Write('q');
            process.WaitForExit(1000);
            process.Close();

            groupBox1.Enabled = true;
            groupBox2.Enabled = true;
            groupBox3.Enabled = true;
            groupBox4.Enabled = true;

            if (outputWaveWriter != null)
            {
                outputWaveWriter.Dispose();
            }

            if (inputWaveWriter != null)
            {
                inputWaveWriter.Dispose();
            }

            outputCapture.Dispose();

            inputCapture.Dispose();

            recording = false;
        }
Example #11
0
        public void Stop()
        {
            //SoundSpectrum.Stop();

            if (_soundIn != null)
            {
                _soundIn.Stop();
                _soundIn.Dispose();
                _soundIn = null;
            }
            if (_source != null)
            {
                _source.Dispose();
                _source = null;
            }
        }
Example #12
0
        public void stopRecording()
        {
            isRecording = false;
            micCapture.Stop();
            speakCapture.Stop();
            micWriter.Dispose();
            speakWriter.Dispose();
            micCapture.Dispose();
            speakCapture.Dispose();
            soundout.Stop();
            soundout.Dispose();

            string micSize = "-", speakSize = "-";

            if (File.Exists(micFileName))
            {
                FileInfo f      = new FileInfo(micFileName);
                int      mbytes = (int)(f.Length / 1024 / 1024);
                micSize = mbytes.ToString();
            }
            else
            {
                MessageBox.Show("No file with name\n   " + micFileName + "\nexists.\n\nMicrophone may not have been recorded.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            if (File.Exists(speakFileName))
            {
                FileInfo f      = new FileInfo(speakFileName);
                int      mbytes = (int)(f.Length / 1024 / 1024);
                speakSize = mbytes.ToString();
            }

            window.updateInfo(micFileName, micSize, speakFileName, speakSize);

            window.UnlockUI();
        }
Example #13
0
        private void StopCSCore()
        {
            if (_soundOut != null)
            {
                _soundOut.Stop();
                _soundOut.Dispose();
                _soundOut = null;
            }
            if (_soundIn != null)
            {
                _soundIn.Stop();
                _soundIn.Dispose();
                _soundIn = null;
            }
            if (_source != null)
            {
                _source.Dispose();
                _source = null;
            }

            if (_lineSpectrum != null)
            {
                _lineSpectrum = null;
            }
        }
Example #14
0
        private void bgInputListener_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
        {
            Trace.TraceInformation("Background Microphone Listener closing. State: {0}", state.ToString());
            state = States.Stopping;
            _soundCapture?.Stop();
            _soundCapture?.Dispose();

            _isSoundAlertPlaying = false;
            if (_soundOut.PlaybackState != PlaybackState.Stopped)
            {
                _soundOut.Stop();
            }
            _soundOut.Dispose();
            state = States.Stopped;
            Close();
        }
Example #15
0
        public void SoundInToSoundOutTest_Wasapi()
        {
            for (int i = 0; i < 10; i++)
            {
                var waveIn = new WasapiCapture();
                waveIn.Initialize();
                waveIn.Start();

                var waveInToSource = new SoundInSource(waveIn)
                {
                    FillWithZeros = true
                };

                var soundOut = new WasapiOut();
                soundOut.Initialize(waveInToSource);
                soundOut.Play();

                Thread.Sleep(2000);

                Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState);

                soundOut.Dispose();
                waveIn.Dispose();
            }
        }
Example #16
0
        public void Cleanup()
        {
            channelMemoryA?.Clear();
            channelMemoryB?.Clear();

            channelWriterA?.Dispose();
            channelWriterB?.Dispose();

            channelStreamA?.Dispose();
            channelStreamB?.Dispose();

            channelCapture?.Dispose();

            fileTimer?.Stop();
            fileTimer?.Dispose();
        }
Example #17
0
        public void Dispose()
        {
            if (_waveWriter != null)
            {
                _waveWriter.Dispose();
                _waveWriter = null;
            }

            if (_notificationSource != null)
            {
                _notificationSource.Dispose();
                _notificationSource = null;
            }

            if (_waveSource != null)
            {
                _waveSource.Dispose();
                _waveSource = null;
            }

            if (_soundInSource != null)
            {
                _soundInSource.Dispose();
                _soundInSource = null;
            }

            if (_capture != null)
            {
                _capture.Dispose();
                _capture = null;
            }
        }
Example #18
0
        private void OnRecordingStopped(object sender, StoppedEventArgs err)
        {
            if (err.Exception != null)
            {
                InfoMessage?.Invoke(this, $"Ошибка: {err.Exception.Message}");
            }

            ffmpegProcess?.StandardOutput.Close();
            ffmpegProcess?.StandardInput.Close();
            ffmpegProcess?.Kill();

            _audioCapture.RecordingStopped -= OnRecordingStopped;
            _audioCapture.DataAvailable    -= OnDataAvailable;


            _audioCapture.Dispose();
            _audioCapture            = null;
            _threadSafeBoolBackValue = 0;

            Task.Run(() => { _transportService.SendFinalData(); }).Wait();
            Task.Run(() => { _transportService.CloseConnection(); }).Wait();
            InfoMessage?.Invoke(this, "Запись остановлена");
            RecordLevel?.Invoke(this, 0.0F);
            RecordStopped?.Invoke(this, EventArgs.Empty);
        }
Example #19
0
 public void Dispose()
 {
     //Dont dispose capture device
     _wasapiCapture?.Stop();
     _captureSource?.Dispose();
     _wasapiCapture?.Dispose();
     _opusEncoder?.Dispose();
 }
Example #20
0
 private void DisposeAudioDevice()
 {
     _audioIn.Dispose();
     IsListening = false;
     OnAudioEventAvailable(new AudioEventArgs {
         State = AudioRecordState.ListeningStopped, Information = "Stopped Listening."
     });
 }
Example #21
0
 private void RecorderOnDataEnds(object sender, StoppedEventArgs stoppedEventArgs)
 {
     if (realMic != null)
     {
         realMic.Dispose();
         realMic = null;
     }
 }
        public void StopForwarding()
        {
            _recorder?.StopRecording();
            _player?.Stop();

            _recorder?.Dispose();
            _player?.Dispose();
        }
Example #23
0
 public void StopListen()
 {
     if (capture.RecordingState == RecordingState.Recording)
     {
         capture.Stop();
     }
     capture.Dispose();
 }
Example #24
0
 /// <summary>
 /// Stops the audio capture and disposes of the resources used by the recorder.
 /// </summary>
 public void Dispose()
 {
     Interlocked.Exchange(ref _isRunning, 0);
     _capture.Stop();
     _convertedSource.Dispose();
     _soundInSource.Dispose();
     _capture.Dispose();
 }
Example #25
0
 public void Free()
 {
     if (initialized)
     {
         capture.Stop();
         capture.Dispose();
     }
 }
Example #26
0
 public void Dispose()
 {
     if (_dummyCapture != null)
     {
         _dummyCapture.Dispose();
         _dummyCapture = null;
     }
 }
Example #27
0
 void OnApplicationQuit()
 {
     if (enabled)
     {
         capture.Stop();
         capture.Dispose();
     }
 }
 private static void CSCore_Cleanup()
 {
     if (capture != null)
     {
         capture.Dispose();
         capture = null;
     }
     UpdateStatusMessage.ShowStatusMessage(2, "Capture Destroyed");
 }
 //BLARG 01.14.2020: Added the rest of the disposables since we'll be calling this method a lot more
 public void Dispose()
 {
     //_capture?.Stop(); //Don't need this, Dispose() takes care of it
     _capture?.Dispose();
     _soundInSource?.Dispose();
     _source?.Dispose();
     _stream?.Dispose();
     _audioEndpointVolume?.Dispose();
 }
Example #30
0
        private void EnableCaptureEndpoint()
        {
            if (_dummyCapture != null)
            {
                _dummyCapture.Dispose();
                _dummyCapture = null;
            }

            if (Endpoint != null && Endpoint.DataFlow == DataFlow.Capture)
            {
                _dummyCapture = new WasapiCapture(true, AudioClientShareMode.Shared, 250)
                {
                    Device = Endpoint
                };
                _dummyCapture.Initialize();
                _dummyCapture.Start();
            }
        }