コード例 #1
0
ファイル: AudioClientTests.cs プロジェクト: jnm2/NAudio
 public void CanReuseWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
         Thread.Sleep(1000);
         wasapiClient.StartRecording();
     }
 }
コード例 #2
0
ファイル: AudioClientTests.cs プロジェクト: zp9611/NAudio
 public void CanReuseWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
         Thread.Sleep(1000);
         wasapiClient.StartRecording();
         Console.WriteLine("Disposing");
     }
 }
コード例 #3
0
 private void Record()
 {
     try
     {
         _capture = new WasapiCapture(SelectedDevice)
         {
             ShareMode  = ShareModeIndex == 0 ? AudioClientShareMode.Shared : AudioClientShareMode.Exclusive,
             WaveFormat = SampleTypeIndex == 0
                 ? WaveFormat.CreateIeeeFloatWaveFormat(SampleRate, ChannelCount)
                 : new WaveFormat(SampleRate, BitDepth, ChannelCount)
         };
         _currentFileName = $"Burp_N0_{DateTime.Now:yyy_dd_MM_HH_mm_ss}.wav";
         RecordLevel      = SelectedDevice.AudioEndpointVolume.MasterVolumeLevelScalar;
         _capture.StartRecording();
         _capture.DataAvailable    += CaptureOnDataAvailable;
         _capture.RecordingStopped += OnRecordingStopped;
         RecordCommand.IsEnabled    = false;
         StopCommand.IsEnabled      = true;
         Message = "Recording...";
     }
     catch (Exception e)
     {
         MessageBox.Show(e.Message);
     }
 }
コード例 #4
0
ファイル: AudioPlayer.cs プロジェクト: Hywel-Stoakes/MPAi-2
        /// <summary>
        /// Sets up the audio device, and the file to record into, adds listeners to the events, starts recording, and toggles the buttons.
        /// </summary>
        private void record()
        {
            try
            {
                var device = (MMDevice)AudioInputDeviceComboBox.SelectedItem;
                if (!(device == null))
                {
                    recordButton.Text          = stopText;
                    recordingProgressBar.Value = 0;

                    device.AudioEndpointVolume.Mute = false;
                    // Use wasapi by default
                    waveIn = new WasapiCapture(device);
                    waveIn.DataAvailable     += OnDataAvailable;
                    onDataAvailableSubscribed = true;
                    waveIn.RecordingStopped  += OnRecordingStopped;
                    writer = new WaveFileWriter(audioFilePath, waveIn.WaveFormat);
                    waveIn.StartRecording();
                    SetControlStates(true);
                }
                else
                {
                    recordButton.Text = recordText;
                    MPAiMessageBoxFactory.Show(noAudioDeviceText, warningText, MPAiMessageBoxButtons.OK);
                }
            }
            catch (Exception exp)
            {
#if DEBUG
                MPAiMessageBoxFactory.Show(exp.Message, warningText, MPAiMessageBoxButtons.OK);
#endif
            }
        }
コード例 #5
0
        //Safe to use Stopwatch (which is from System.Diagnostics) in production code:
        //https://stackoverflow.com/questions/2805362/can-stopwatch-be-used-in-production-code

        public AudioRecorder(string outputFolder, string recordingName)
        {
            this.OutputFolder     = outputFolder;
            this.RecordingName    = recordingName;
            this.OutputPath       = Path.Combine(this.OutputFolder, recordingName);
            this.CurrentExtension = ".wav-recording";

            //From original project:
            //	//	//Get Deivce
            //	//	MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
            //	//	MMDevice defaultAudioDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            _wasapiIn = new WasapiLoopbackCapture();             //TODO: Check if WasapiCapture should/can be used instead
            _wasapiIn.DataAvailable    += this.AudioDataAvailable;
            _wasapiIn.RecordingStopped += this.RecordingStopped;

            this.WaveFormat = _wasapiIn.WaveFormat;

            _wavOut = new WaveFileWriter(
                this.FullOutputPath,
                this.WaveFormat
                );

            _wasapiIn.StartRecording();

            _sampleDurationStopwatch = Stopwatch.StartNew();
        }
コード例 #6
0
        public void UpdateDevices(MMDevice input, MMDevice output)
        {
            outp?.Stop();
            outp?.Dispose();

            inp?.StopRecording();
            inp?.Dispose();

            inp = new WasapiCapture(input, true, 5);
            inp.DataAvailable += OnCapture;

            buffer = new BufferedWaveProvider(inp.WaveFormat);

            mixer = new MixingWaveProvider32();
            mixer.AddInputStream(buffer);

            if (resampler == null)
            {
                resampler = new AudioResampler(mixer);
            }
            else
            {
                resampler.Update(mixer);
            }

            outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5);
            outp.Init(resampler);

            inp.StartRecording();
            outp.Play();
        }
コード例 #7
0
ファイル: Form1.cs プロジェクト: my04337/sound_utils
        /// 録音を開始します
        private void startRecording()
        {
            // 録音準備
            if (mCapture == null)
            {
                MessageBox.Show(this, "録音デバイスが設定されていません", "エラー", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            if (mChannelInfo == null)
            {
                MessageBox.Show(this, "録音チャネルが設定されていません", "エラー", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            var f = mCapture.WaveFormat;

            mRecordingFormat = WaveFormat.CreateCustomFormat(
                f.Encoding,
                f.SampleRate,
                mChannelInfo.Channels, //f.Channels,
                (int)((float)f.AverageBytesPerSecond / f.Channels * mChannelInfo.Channels),
                (int)((float)f.BlockAlign / f.Channels * mChannelInfo.Channels),
                f.BitsPerSample
                );
            mWriter = new WaveFileWriter("a.wav", mRecordingFormat);

            // UI更新
            startRecordingButton.Enabled    = false;
            stopRecordingButton.Enabled     = true;
            inputDeviceListSelector.Enabled = false;
            inputChannelSelector.Enabled    = false;

            // 録音開始
            mCapture.StartRecording();
        }
コード例 #8
0
        public void StartRecord()
        {
            try
            {
                _audioCapture = new WasapiLoopbackCapture();
                ffmpegProcess = new Process();
                ffmpegProcess.StartInfo.FileName               = "ffmpeg.exe";
                ffmpegProcess.StartInfo.Arguments              = $"-f f32le -ac 2 -ar 44100 -i - -ar 8000 -ac 1 -f s16le -";
                ffmpegProcess.StartInfo.RedirectStandardInput  = true;
                ffmpegProcess.StartInfo.RedirectStandardOutput = true;
                ffmpegProcess.StartInfo.UseShellExecute        = false;
                ffmpegProcess.StartInfo.CreateNoWindow         = true;
                ffmpegProcess.Start();


                _audioCapture.RecordingStopped += OnRecordingStopped;
                _audioCapture.DataAvailable    += OnDataAvailable;

                InfoMessage?.Invoke(this, "Запись...");

                _audioCapture.StartRecording();
            }
            catch (Exception e)
            {
                InfoMessage?.Invoke(this, $"Ошибка: {e.Message}");
            }
        }
コード例 #9
0
        /// <summary>
        /// 开始与暂停
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void StartStopBtn_Click(object sender, EventArgs e)
        {
            if (ViewModule.IsRecording)
            {
                if (writer != null)
                {
                    writer.Close();
                }
                capture.StopRecording();
                if (IsSaveFile.Checked)
                {
                    FileNameContent.Text += "(Saved)";
                }
            }
            else
            {
                if (bufferedGraphics == null)
                {
                    bufferedGraphics = BufferedGraphicsManager.Current.Allocate(DrawPanel.CreateGraphics(), DrawPanel.ClientRectangle);
                }
                if (IsSaveFile.Checked)
                {
                    string filename = Path.GetFileName(Path.GetRandomFileName()) + ".wav";
                    writer = new WaveFileWriter(filename, capture.WaveFormat);
                    FileNameContent.Text = filename;
                }
                capture.StartRecording();
            }

            ViewModule.IsRecording ^= true;
        }
コード例 #10
0
        /// <summary>
        /// Records the audio for the specified number of buffer fills.
        /// </summary>
        public async Task <Double[]> Record(Int32 bufferFills)
        {
            if (_wasapiCapture != null)
            {
                _wasapiCapture.StartRecording();

                _measured  = -1;
                _measuring = true;

                while (_measured < bufferFills)
                {
                    await Task.Delay(50);
                }

                _measuring = false;
                _wasapiCapture.StopRecording();
                var lengths = _measurements.Take(bufferFills).Select(a => a.Length).ToArray();
                var result  = new Double[lengths.Sum() / 4];
                var k       = 1;

                for (var j = 0; j < bufferFills; j++)
                {
                    for (var i = 0; i < lengths[j]; i += 4, k++)
                    {
                        var value = BitConverter.ToSingle(_measurements[j], i);
                        result[k] = value;
                    }
                }

                return(result);
            }

            return(new Double[0]);
        }
コード例 #11
0
        public override void SetProperties(UserControl[] _controls)
        {
            MMDeviceEnumerator e = new MMDeviceEnumerator();
            var ep = e.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active);

            base.SetProperties(_controls);
            var ctrl = _controls.OfType <PropertiesControls.SchedulerControl>().First();
            var mmi  = _controls.OfType <PropertiesControls.ComboBoxControl>().First();

            AudioDevice = (string)mmi.Value;

            _cancelled = true;
            var device = ep.Where(x => x.FriendlyName == AudioDevice).FirstOrDefault();

            if (device != null)
            {
                _cancelled          = false;
                _capture            = new WasapiCapture(device);
                _capture.WaveFormat = new WaveFormat(44100, 8, 2);

                //lc.WaveFormat = new NAudio.Wave.WaveFormat(44100, 8, 2);
                _capture.DataAvailable += Lc_DataAvailable;
                _capture.StartRecording();
            }


            _events.Clear();
            foreach (var item in ctrl.Events.OrderBy(x => x.A.Ticks))
            {
                _events.Add(item);
            }
        }
コード例 #12
0
 public Task Start()
 {
     return(Tasks.Taskify(() =>
     {
         StopRequested = false;
         Capture.StartRecording();
     }));
 }
コード例 #13
0
    static ShazamResult CaptureAndTag()
    {
        var analysis = new Analysis();
        var finder   = new LandmarkFinder(analysis);

        using (var capture = new WasapiCapture()) {
            var captureBuf = new BufferedWaveProvider(capture.WaveFormat)
            {
                ReadFully = false
            };

            capture.DataAvailable += (s, e) => {
                captureBuf.AddSamples(e.Buffer, 0, e.BytesRecorded);
            };

            capture.StartRecording();

            using (var resampler = new MediaFoundationResampler(captureBuf, new WaveFormat(Analysis.SAMPLE_RATE, 16, 1))) {
                var sampleProvider = resampler.ToSampleProvider();
                var retryMs        = 3000;
                var tagId          = Guid.NewGuid().ToString();

                while (true)
                {
                    while (captureBuf.BufferedDuration.TotalSeconds < 1)
                    {
                        Thread.Sleep(100);
                    }

                    analysis.ReadChunk(sampleProvider);

                    if (analysis.StripeCount > 2 * LandmarkFinder.RADIUS_TIME)
                    {
                        finder.Find(analysis.StripeCount - LandmarkFinder.RADIUS_TIME - 1);
                    }

                    if (analysis.ProcessedMs >= retryMs)
                    {
                        //new Painter(analysis, finder).Paint("c:/temp/spectro.png");
                        //new Synthback(analysis, finder).Synth("c:/temp/synthback.raw");

                        var sigBytes = Sig.Write(Analysis.SAMPLE_RATE, analysis.ProcessedSamples, finder);
                        var result   = ShazamApi.SendRequest(tagId, analysis.ProcessedMs, sigBytes).GetAwaiter().GetResult();
                        if (result.Success)
                        {
                            return(result);
                        }

                        retryMs = result.RetryMs;
                        if (retryMs == 0)
                        {
                            return(result);
                        }
                    }
                }
            }
        }
    }
コード例 #14
0
        public void SetDevice(MMDevice Device)
        {
            CaptureDevice = Device;
            Meter         = Device.AudioMeterInformation;

            WasapiCapture = new WasapiCapture(Device);
            WasapiCapture.DataAvailable += WasapiCapture_DataAvailable;
            WasapiCapture.StartRecording();
        }
コード例 #15
0
ファイル: LSMixer.cs プロジェクト: Wessie/Loopstream
        public void FadeVolume(Slider slider, float vol, double seconds)
        {
            Logger.mix.a("fadeVol " + slider + " to " + vol + " over " + seconds);
            bool micOn  = slider == Slider.Mic && micVol.GetVolume() < 0.1 && vol > 0.1;
            bool micOff = slider == Slider.Mic && micVol.GetVolume() > 0.1 && vol < 0.1;

            if (micOn || micOff)
            {
                if (micOn)
                {
                    killmic.Stop();
                    if (!micVol.OK())
                    {
                        micAdd();
                        if (micVol.OK())
                        {
                            Logger.mix.a("mic.startRec");
                            micCap.StartRecording();
                        }
                    }
                }
                else if (settings.killmic)
                {
                    killmic.Stop();
                    killmic.Interval = (int)(seconds * 1000) + 250;
                    killmic.Start();
                }
                try
                {
                    LSSettings.LSParams[] encs = { settings.mp3, settings.ogg };
                    foreach (LSSettings.LSParams enc in encs)
                    {
                        if (enc.enabled && !string.IsNullOrWhiteSpace(enc.i.filename))
                        {
                            System.IO.File.AppendAllText(
                                enc.i.filename + ".txt",
                                enc.i.timestamp() + " " + (micOn ? "@" : "-") + "\r\n",
                                Encoding.UTF8);
                        }
                    }
                }
                catch { }
            }
            if (slider == Slider.Music)
            {
                recVol.SetVolume(vol, seconds);
            }
            if (slider == Slider.Mic)
            {
                micVol.SetVolume(vol, seconds);
            }
            if (slider == Slider.Out)
            {
                outVol.SetVolume(vol, seconds);
            }
            //Console.WriteLine("VOLFADE " + vol);
        }
コード例 #16
0
ファイル: AudioClientTests.cs プロジェクト: zp9611/NAudio
 public void CanCaptureDefaultDeviceInDefaultFormatUsingWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
     }
 }
コード例 #17
0
        public static CaptureSession FromAudioInput(MMDevice mmDevice)
        {
            var recorder = new WasapiCapture(mmDevice);

            var session = new CaptureSession(mmDevice, recorder);

            recorder.DataAvailable += session.DataAvailableEvent;
            recorder.StartRecording();
            return(session);
        }
コード例 #18
0
 private void StartRecording()
 {
     StartButton.IsEnabled = false;
     StopButton.IsEnabled  = true;
     LoadButton.IsEnabled  = false;
     Capture                   = new WasapiCapture((MMDevice)DeviceBox.SelectedItem);
     Capture.WaveFormat        = new WaveFormat(44100, 16, 2);
     Capture.DataAvailable    += ReceiveWave;
     Capture.RecordingStopped += RecordingStopped;
     Capture.StartRecording();
 }
コード例 #19
0
        public void StartForwarding()
        {
            _recorder                = new WasapiCapture();
            _bufferedWaveProvider    = new BufferedWaveProvider(_recorder.WaveFormat);
            _recorder.DataAvailable += OnRecorderDataAvailable;

            _player = new WasapiOut();
            _player.Init(_bufferedWaveProvider);

            _player.Play();
            _recorder.StartRecording();
        }
コード例 #20
0
        private void InitialiseRecording()
        {
            var enumerator = new MMDeviceEnumerator();

            selectedRecordingDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications);
            microphone = new WasapiCapture(selectedRecordingDevice);
            //microphone.ShareMode
            selectedRecordingDevice.AudioEndpointVolume.MasterVolumeLevelScalar = 1;
            microphone.StartRecording();
            microphone.RecordingStopped += OnRecordingStopped;
            microphone.DataAvailable    += CaptureOnDataAvailable;
        }
コード例 #21
0
ファイル: NAudioHandler.cs プロジェクト: viper3400/Cobaka
 public void StartListen()
 {
     _audioIn = new WasapiCapture();
     _audioIn.DataAvailable    += AudioInDataAvailable;
     _audioIn.RecordingStopped += AudioInRecordingStopped;
     PeakValue = 0;
     _audioIn.StartRecording();
     IsListening = true;
     _isStopAndDisposeRequested = false;
     OnAudioEventAvailable(new AudioEventArgs {
         State = AudioRecordState.ListeningStarted, Information = "Listening started."
     });
 }
コード例 #22
0
        public virtual void PauseRecording()
        {
            if (IsPaused)
            {
                _captureStream.StartRecording();
            }
            else
            {
                _captureStream.StopRecording();
            }

            IsPaused = !IsPaused;
        }
コード例 #23
0
        private void InitMicCapture(string guid, IPAddress ipAddress, int port, InputDeviceManager inputManager)
        {
            if (_audioInputSingleton.MicrophoneAvailable)
            {
                try
                {
                    var device = (MMDevice)_audioInputSingleton.SelectedAudioInput.Value;

                    if (device == null)
                    {
                        device = WasapiCapture.GetDefaultCaptureDevice();
                    }

                    device.AudioEndpointVolume.Mute = false;

                    _wasapiCapture                   = new WasapiCapture(device, true);
                    _wasapiCapture.ShareMode         = AudioClientShareMode.Shared;
                    _wasapiCapture.DataAvailable    += WasapiCaptureOnDataAvailable;
                    _wasapiCapture.RecordingStopped += WasapiCaptureOnRecordingStopped;

                    _udpVoiceHandler =
                        new UdpVoiceHandler(guid, ipAddress, port, this, inputManager);
                    var voiceSenderThread = new Thread(_udpVoiceHandler.Listen);

                    voiceSenderThread.Start();

                    _wasapiCapture.StartRecording();

                    _subs.Add(MessageHub.Instance.Subscribe <SRClient>(RemoveClientBuffer));
                }
                catch (Exception ex)
                {
                    Logger.Error(ex, "Error starting audio Input - Quitting! " + ex.Message);

                    ShowInputError("Problem initialising Audio Input!");

                    Environment.Exit(1);
                }
            }
            else
            {
                //no mic....
                _udpVoiceHandler =
                    new UdpVoiceHandler(guid, ipAddress, port, this, inputManager);
                _subs.Add(MessageHub.Instance.Subscribe <SRClient>(RemoveClientBuffer));
                var voiceSenderThread = new Thread(_udpVoiceHandler.Listen);
                voiceSenderThread.Start();
            }
        }
コード例 #24
0
ファイル: Recorder.cs プロジェクト: cheanizer/wslinphone
 public void startRecordInput()
 {
     recording.StartAt = DateTime.Now;
     if (Prefix != null && Prefix.Equals("") == false)
     {
         Filename = Prefix + "_" + Name + inputPrefix + ".wav";
     }
     else
     {
         Filename = Name + inputPrefix + ".wav";
     }
     inputFilePath = Path.Combine(outputFolder, Filename);
     writerInput   = new WaveFileWriter(inputFilePath, eventInput.WaveFormat);
     eventInput.StartRecording();
 }
コード例 #25
0
        private void AnswerButton_Click(object sender, RoutedEventArgs e)
        {
            modem   = new Modem();
            capture = GetCaptureDevice();
            waveOut = new WaveOutEvent();
            var playProvider = modem.Answer(capture);

            modem.ByteReceived += modem_ByteReceived;
            waveOut.Init(playProvider);
            waveOut.Play();
            capture.StartRecording();
            HangupButton.IsEnabled = true;
            CallButton.IsEnabled   = false;
            AnswerButton.IsEnabled = false;
        }
コード例 #26
0
 // Start capture
 public void Activate()
 {
     _fullBuffer = false;
     _index      = 0;
     _capture.StartRecording();
     if (_renderFix != null)
     {
         // Audio event does not fire if no audio is being played
         // so play silence into output devices.
         if (_renderFix.PlaybackState != PlaybackState.Paused)
         {
             _renderFix.Init(new SilenceProvider(_capture.WaveFormat));
         }
         _renderFix.Play();
     }
 }
コード例 #27
0
        public void Initialize()
        {
            _endpoint = _naudioDeviceEnumerationService.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            // Don't initialize if there is no available device.
            if (_endpoint == null)
            {
                return;
            }

            _capture = _useCustomWasapiCapture
                ? CustomWasapiLoopbackCapture.CreateCustomWasapiLoopbackCapture(_endpoint, false, _logger)
                : new WasapiLoopbackCapture();
            _capture.RecordingStopped += CaptureOnRecordingStopped;

            if (_capture.WaveFormat.Channels != _endpoint.AudioClient.MixFormat.Channels)
            {
                // I want to log this to see how it behave in other setups. Don't know if this can happen, and if it happens, may could lead to exceptions
                _logger?.Verbose($"AudioEndPoint Waveformat has {_endpoint.AudioClient.MixFormat.Channels} channels but WasapiCapture was created for {_capture.WaveFormat.Channels} channels");
            }

            // Handle single-channel by passing the same data for left and right
            if (_capture.WaveFormat.Channels == 1)
            {
                _capture.DataAvailable += ProcessMonoData;
            }
            else if (_capture.WaveFormat.Channels == 4)
            {
                _capture.DataAvailable += ProcessQuadraphonicData;
            }
            // Handle 5.1 by averaging out the extra channels
            else if (_capture.WaveFormat.Channels == 6)
            {
                _capture.DataAvailable += Process51Data;
            }
            // Handle 7.1 by averaging out the extra channels
            else if (_capture.WaveFormat.Channels == 8)
            {
                _capture.DataAvailable += Process71Data;
            }
            // Anything else is limited to two channels
            else
            {
                _capture.DataAvailable += ProcessStereoData;
            }
            _capture.StartRecording();
        }
コード例 #28
0
ファイル: Program.cs プロジェクト: KrzysztofMajor/Recorder
        static void Main(string[] args)
        {
            var now         = DateTime.Now;
            var machineName = Environment.MachineName.ToLower(CultureInfo.CurrentCulture);

            var outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "NAudio");

            Directory.CreateDirectory(outputFolder);
            var outputFilepath = Path.Combine(outputFolder, $"output.wav");
            var mp3Filepath    = Path.Combine(outputFolder, $"{machineName}{now:ddMMdyyyyHHmmss}.mp3");

            var waveIn = new WasapiCapture {
            };
            var writer = new WaveFileWriter(outputFilepath, waveIn.WaveFormat);

            waveIn.StartRecording();

            var tm = new System.Timers.Timer(10 * 1000);

            tm.Elapsed += (sender, eventArgs) => waveIn.StopRecording();
            tm.Start();

            waveIn.DataAvailable += (sender, eventArgs) =>
            {
                Console.Write(".");
                writer.Write(eventArgs.Buffer, 0, eventArgs.BytesRecorded);
            };

            var e = new ManualResetEvent(false);

            waveIn.RecordingStopped += (sender, eventArgs) =>
            {
                writer.Dispose();
                waveIn.Dispose();

                Console.WriteLine("writing mp3");
                using (var reader = new AudioFileReader(outputFilepath))
                    using (var mp3Writer = new LameMP3FileWriter(mp3Filepath, reader.WaveFormat, 128))
                        reader.CopyTo(mp3Writer);

                Console.WriteLine("writing done");
                e.Set();
            };

            e.WaitOne();
        }
コード例 #29
0
        private void btRecord_Click(object sender, EventArgs e)
        {
            if (txtTenFile.Text == string.Empty)
            {
                MessageBox.Show("Please type the name first!");
                txtTenFile.Focus();
                return;
            }

            DisposWave();
            if (!Directory.Exists(txtPathSave.Text))
            {
                Directory.CreateDirectory(txtPathSave.Text);
            }
            sbtStop.Enabled = true;
            sbtStop.Focus();
            //sourceStream.DataAvailable += this.SourceStreamDataAvailable;

            //if ((int)cbMic.SelectedValue == -1) //Speaker

            sourceStream1 = new WasapiLoopbackCapture();
            sourceStream1.DataAvailable += SourceStreamDataAvailable;
            waveWriter = new WaveFileWriter(txtPathSave.Text + @"\" + txtTenFile.Text + ".wav", sourceStream1.WaveFormat);


            var enumerator     = new MMDeviceEnumerator();
            var captureDevices = enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active).ToArray();
            var defaultDevice  = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console);
            var device         = captureDevices.FirstOrDefault(c => c.ID == defaultDevice.ID);

            sourceStream = new WasapiCapture(device);

            sourceStream.DataAvailable += this.SourceStreamDataAvailable2;

            waveWriter2 = new WaveFileWriter(txtPathSave.Text + @"\" + txtTenFile.Text + "mic.wav", sourceStream.WaveFormat);



            sourceStream1.StartRecording();
            sourceStream.StartRecording();
            currentrecord = txtPathSave.Text + @"\" + txtTenFile.Text + ".wav";

            // MessageBox.Show(cbMic.SelectedIndex.ToString());
            this.sbtRecord.Enabled = false;
            this.sbtStop.Enabled   = true;
        }
コード例 #30
0
        public void Start(VoiceNextConnection voiceStream)
        {
            try
            {
                _capture.StartRecording();
                _localOutput?.Play();

                _playing = true;

                Task.Run(async() => await SendToStream(voiceStream));
            }
            catch (Exception e)
            {
                _log.Error(e, $"An error occured in the stream task: {e.Message}");
                Stop();
            }
        }