Beispiel #1
0
        public MainWindow()
        {
            InitializeComponent();

            waveOut = new WasapiOut();
            capture = new WasapiCapture();
            cache   = new CacheAndSave(capture.WaveFormat);

            // WaveFormat include information such as encoding, channels, sampling rate, byterates, etc;
            Box.Items.Add(string.Format("Encoding: {0}; Channels: {1}, Sampling rate: {2}, BytesRate: {3}",
                                        capture.WaveFormat.Encoding, capture.WaveFormat.Channels, capture.WaveFormat.SampleRate, capture.WaveFormat.AverageBytesPerSecond));

            // PCM => int (byte varies) with range (MIN_VAL, MAX_VAL) depends on byte used
            // IeeeFloat => float (4 bytes) with range (-1.0e0, 1.0e0)
            if (capture.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
            {
                throw new FormatException("Format is not IeeeFloat!!");
            }

            // handler that listen to incoming data and put into cache
            capture.DataAvailable += (o, args) => cache.Append(args.Buffer, args.BytesRecorded);

            // Sync input and output WaveFormat;
            waveOut.Init(cache);

            Box.Items.Add("READY:");
        }
Beispiel #2
0
        private void buttonStartRecording_Click(object sender, EventArgs e)
        {
            if (waveIn == null)
            {
                if(outputFilename == null)
                {
                    buttonSelectOutputFile_Click(sender, e);
                }
                if(outputFilename == null)
                {
                    return;
                }
                if (radioButtonWaveIn.Checked)
                {
                    waveIn = new WaveIn();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else
                {
                    waveIn = new WasapiCapture((MMDevice)comboDevices.SelectedItem);
                    // go with the default format as WASAPI doesn't support SRC
                }

                writer = new WaveFileWriter(outputFilename, waveIn.WaveFormat);

                waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable);
                waveIn.RecordingStopped += new EventHandler(waveIn_RecordingStopped);
                waveIn.StartRecording();
                buttonStartRecording.Enabled = false;
            }
        }
Beispiel #3
0
        /// <summary>
        /// Cleanup.
        /// </summary>
        private void CleanUp()
        {
            // If recording.
            Stop();

            // Cleanup.
            if (_waveIn != null)
            {
                _waveIn.Dispose();
            }

            if (_writer != null)
            {
                _writer.Dispose();
            }

            // If the stream was created internally.
            if (_internalStream)
            {
                if (_audioStream != null)
                {
                    _audioStream.Dispose();
                }
            }

            // If the stream was created internally.
            if (_internalStream)
            {
                _audioStream = null;
            }

            _writer = null;
            _waveIn = null;
        }
Beispiel #4
0
 /// <summary>
 /// Create an input
 /// </summary>
 /// <param name="ID">The ID of the input to be created</param>
 public Input(string ID)
 {
     // Set the device ID
     deviceID = ID;
     // Get Device from specified ID
     MMDeviceEnumerator devices = new MMDeviceEnumerator();
     device = devices.GetDevice(ID);
     // Set wave in to WASAPI capture of the specified device
     waveIn = new WasapiCapture(device);
     // Set the number of bytes used by each sample
     sampleByteSize = waveIn.WaveFormat.BitsPerSample / 8;
     // Add event handler to retrieve samples from the device
     waveIn.DataAvailable += waveIn_DataAvailable;
     // Create buffered wave provider
     bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
     bufferedWaveProvider.DiscardOnBufferOverflow = true;
     // Create sample channel
     sampleChannel = new SampleChannel(bufferedWaveProvider);
     // Create sample provider
     sampleChannel.PreVolumeMeter += sampleProvider_StreamVolume;
     // Start recording
     try
     {
         waveIn.StartRecording();
     }
     catch
     {
         throw new ArgumentException("This input device is not supported.");
     }
 }
Beispiel #5
0
        private void buttonStartRecording_Click(object sender, EventArgs e)
        {
            if (waveIn == null)
            {
                if (outputFilename == null)
                {
                    buttonSelectOutputFile_Click(sender, e);
                }
                if (outputFilename == null)
                {
                    return;
                }
                if (radioButtonWaveIn.Checked)
                {
                    waveIn            = new WaveIn();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else
                {
                    waveIn = new WasapiCapture((MMDevice)comboDevices.SelectedItem);
                    // go with the default format as WASAPI doesn't support SRC
                }

                writer = new WaveFileWriter(outputFilename, waveIn.WaveFormat);

                waveIn.DataAvailable    += new EventHandler <WaveInEventArgs>(waveIn_DataAvailable);
                waveIn.RecordingStopped += new EventHandler(waveIn_RecordingStopped);
                waveIn.StartRecording();
                buttonStartRecording.Enabled = false;
            }
        }
Beispiel #6
0
 private void CreateWaveInDevice()
 {
     waveIn                   = new WaveIn();
     waveIn.WaveFormat        = new WaveFormat(44100, 16, 1);
     waveIn.DataAvailable    += OnDataAvailable;
     waveIn.RecordingStopped += OnRecordingStopped;
 }
Beispiel #7
0
        static void Main(string[] args)
        {
            outputFilename = String.Format("CaptingAudio_{0:yyy-MM-dd HH-mm-ss}.mp3", DateTime.Now);
            string outputFolder;
            Console.WriteLine("This program is record in computer. Same loopback record. ");
            Console.WriteLine("Save : " + Path.GetTempPath() + "\\CaptingAudio");
            Console.WriteLine("0: StartRecording, 1: StopRecord");
            outputFolder = Path.Combine(Path.GetTempPath(), "CaptingAudio");
            Directory.CreateDirectory(outputFolder);
            if (int.Parse(Console.ReadLine()) == 0)
            {
                if (waveIn == null)
                {
                    waveIn = new WasapiLoopbackCapture();
                    writer = new LameMP3FileWriter(Path.Combine(outputFolder, outputFilename), waveIn.WaveFormat, 128);

                    waveIn.DataAvailable += OnDataAvailable;
                    waveIn.RecordingStopped += OnRecordingStopped;
                    waveIn.StartRecording();
                }
            }
            if (int.Parse(Console.ReadLine()) == 1)
            {
                StopRecording();
            }
        }
Beispiel #8
0
        private static LiveAudioDtmfAnalyzer InitLiveAudioAnalyzer(IWaveIn waveIn)
        {
            var analyzer = new LiveAudioDtmfAnalyzer(waveIn);

            analyzer.DtmfToneStarted += start => Console.WriteLine($"{start.DtmfTone.Key}");
            return(analyzer);
        }
Beispiel #9
0
 private void CreateWaveInDevice()
 {
     if (radioButtonWaveIn.Checked)
     {
         waveIn = new WaveIn();
         waveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWaveInEvent.Checked)
     {
         waveIn = new WaveInEvent();
         waveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWasapi.Checked)
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         var device = (MMDevice) comboWasapiDevices.SelectedItem;
         waveIn = new WasapiCapture(device);
     }
     else
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         waveIn = new WasapiLoopbackCapture();
     }
     waveIn.DataAvailable += OnDataAvailable;
     waveIn.RecordingStopped += OnRecordingStopped;
 }
Beispiel #10
0
        public MainForm()
        {
            InitializeComponent();

            // TODO: add coreProps.json file selection
            var corePropsPath = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + @"\SteelSeries\SteelSeries Engine 3\coreProps.json";
            using (StreamReader file = File.OpenText(corePropsPath)) {
                JsonSerializer serializer = new JsonSerializer();
                Dictionary<string, string> coreProps = JsonConvert.DeserializeObject<Dictionary<string, string>>(file.ReadToEnd());
                sseUri = new Uri("http://" + coreProps["address"] + "/game_event");
            }

            binner = new FrequencyBinner(bins);

            fftResults = new float[fftSize];

            sampleAggregator = new SampleAggregator(fftSize);
            
            // TODO: add device selection
            waveIn = new WasapiLoopbackCapture();
            waveIn.RecordingStopped += (s, e) => {
                _recording = false;
            };
            waveIn.DataAvailable += OnDataAvailable;
        }
        private void OnButtonStartRecordingClick(object sender, EventArgs e)
        {
            if (writer != null)
            {
                writer.Close();
                writer.Dispose();
                writer = null;
            }

            DisableButtons();
            buttonStopRecording.Enabled = true;

            captureDevice = new WaveIn();

            captureDevice.DataAvailable    += OnDataAvailable;
            captureDevice.RecordingStopped += OnRecordingStopped;

            //if(MainForm.savingWaveProvider != null)
            //{
            //    MainForm.savingWaveProvider.ClearWriterFileHandles();
            //}

            // Forcibly turn on the microphone (some programs (Skype) turn it off).
            inputDevice.AudioEndpointVolume.Mute = false;

            outputFilename = $"{recordingNameText.Text}.wav";
            writer         = new WaveFileWriter(Path.Combine(outputFolder, outputFilename), captureDevice.WaveFormat);
            captureDevice.StartRecording();
            SetControlStates(true);
        }
Beispiel #12
0
        //Once we stop recording, clean up and store recording end date/time
        //INPUT: sender and StoppedEventArgs
        //OUTPUT:
        public void OnRecordingStopped(object sender, StoppedEventArgs e)
        {
            functionResult result = new functionResult();

            try
            {
                //Clean up everytyhing used within the recording
                if (waveIn != null)
                {
                    waveIn.Dispose();
                    waveIn = null;
                }

                if (writer != null)
                {
                    writer.Close();
                    writer = null;
                }

                //Store the date/time the recording ended to be used for calculating the sample length
                recordingEnded = DateTime.Now;
            }
            catch (Exception ex)
            {
                result.Result  = false;
                result.Message = "ERROR - OnDataAvailable - " + ex.ToString();
            }
        }
Beispiel #13
0
 private void CreateWaveInDevice()
 {
     if (radioButtonWaveIn.Checked)
     {
         waveIn            = new WaveIn();
         waveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWaveInEvent.Checked)
     {
         waveIn            = new WaveInEvent();
         waveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWasapi.Checked)
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         var device = (MMDevice)comboWasapiDevices.SelectedItem;
         waveIn = new WasapiCapture(device);
     }
     else
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         waveIn = new WasapiLoopbackCapture();
     }
     waveIn.DataAvailable    += OnDataAvailable;
     waveIn.RecordingStopped += OnRecordingStopped;
 }
Beispiel #14
0
        static void Main(string[] args)
        {
            outputFilename = String.Format("CaptingAudio_{0:yyy-MM-dd HH-mm-ss}.mp3", DateTime.Now);
            string outputFolder;

            Console.WriteLine("This program is record in computer. Same loopback record. ");
            Console.WriteLine("Save : " + Path.GetTempPath() + "\\CaptingAudio");
            Console.WriteLine("0: StartRecording, 1: StopRecord");
            outputFolder = Path.Combine(Path.GetTempPath(), "CaptingAudio");
            Directory.CreateDirectory(outputFolder);
            if (int.Parse(Console.ReadLine()) == 0)
            {
                if (waveIn == null)
                {
                    waveIn = new WasapiLoopbackCapture();
                    writer = new LameMP3FileWriter(Path.Combine(outputFolder, outputFilename), waveIn.WaveFormat, 128);

                    waveIn.DataAvailable    += OnDataAvailable;
                    waveIn.RecordingStopped += OnRecordingStopped;
                    waveIn.StartRecording();
                }
            }
            if (int.Parse(Console.ReadLine()) == 1)
            {
                StopRecording();
            }
        }
Beispiel #15
0
        private void OnButtonStartRecordingClick(object sender, EventArgs e)
        {
            if (waveIn == null)
            {
                outputFilename = String.Format("NAudioDemo {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now);
                if (radioButtonWaveIn.Checked)
                {
                    waveIn            = new WaveIn();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else if (radioButtonWaveInEvent.Checked)
                {
                    waveIn            = new WaveInEvent();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else if (radioButtonWasapi.Checked)
                {
                    // can't set WaveFormat as WASAPI doesn't support SRC
                    var device = (MMDevice)comboWasapiDevices.SelectedItem;
                    waveIn = new WasapiCapture(device);
                }
                else
                {
                    // can't set WaveFormat as WASAPI doesn't support SRC
                    waveIn = new WasapiLoopbackCapture();
                }

                writer = new WaveFileWriter(Path.Combine(outputFolder, outputFilename), waveIn.WaveFormat);

                waveIn.DataAvailable    += OnDataAvailable;
                waveIn.RecordingStopped += OnRecordingStopped;
                waveIn.StartRecording();
                buttonStartRecording.Enabled = false;
            }
        }
Beispiel #16
0
        public SoundCapture(int sampleSize, int buffermilliseconds)
        {
            _waveIn = new WaveInEvent
            {
                BufferMilliseconds = buffermilliseconds,
                WaveFormat         = new WaveFormat(sampleSize, 16, 1),
            };

            var dataAbalableObservable = Observable.FromEvent <EventHandler <WaveInEventArgs>, WaveInEventArgs>(
                h => ((sender, eventArgs) => h(eventArgs))
                , h => _waveIn.DataAvailable += h
                , h => _waveIn.DataAvailable -= h);

            var recordingStopObservable = Observable.FromEvent <EventHandler <StoppedEventArgs>, StoppedEventArgs>(
                h => ((sender, eventArgs) => h(eventArgs))
                , h => _waveIn.RecordingStopped += h, h => _waveIn.RecordingStopped -= h
                );

            dataAbalableObservable.Select(x =>
            {
                var buffer = new byte[x.BytesRecorded];
                Buffer.BlockCopy(x.Buffer, 0, buffer, 0, buffer.Length);
                return(buffer);
            }).Subscribe(_avaibleSubject)
            .AddTo(_compositeDisposable);

            recordingStopObservable
            .Subscribe(x => _avaibleSubject.OnCompleted())
            .AddTo(_compositeDisposable);

            _compositeDisposable.Add(_waveIn);
            _compositeDisposable.Add(_avaibleSubject);
        }
Beispiel #17
0
        public MainForm()
        {
            InitializeComponent();

            // TODO: add coreProps.json file selection
            var corePropsPath = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + @"\SteelSeries\SteelSeries Engine 3\coreProps.json";

            using (StreamReader file = File.OpenText(corePropsPath)) {
                JsonSerializer serializer             = new JsonSerializer();
                Dictionary <string, string> coreProps = JsonConvert.DeserializeObject <Dictionary <string, string> >(file.ReadToEnd());
                sseUri = new Uri("http://" + coreProps["address"] + "/game_event");
            }

            binner = new FrequencyBinner(bins);

            fftResults = new float[fftSize];

            sampleAggregator = new SampleAggregator(fftSize);

            // TODO: add device selection
            waveIn = new WasapiLoopbackCapture();
            waveIn.RecordingStopped += (s, e) => {
                _recording = false;
            };
            waveIn.DataAvailable += OnDataAvailable;
        }
Beispiel #18
0
 /// <summary>
 /// 停止录音
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void OnRecordingStopped(object sender, StoppedEventArgs e)
 {
     if (this.InvokeRequired)
     {
         this.BeginInvoke(new EventHandler <StoppedEventArgs>(OnRecordingStopped), sender, e);
     }
     else
     {
         if (waveIn != null) // 关闭录音对象
         {
             waveIn.Dispose();
             waveIn = null;
         }
         if (writer != null)//关闭文件流
         {
             writer.Close();
             writer = null;
         }
         if (e.Exception != null)
         {
             MessageBox.Show(String.Format("出现问题 {0}",
                                           e.Exception.Message));
         }
     }
 }
Beispiel #19
0
        /// <summary>Creates a new instance of LiveAudioDtmfAnalyzer.</summary>
        /// <param name="waveIn">The audio data source.</param>
        /// <param name="forceMono">Indicates whether the audio data should be converted to mono
        /// first. Default is true.</param>
        public LiveAudioDtmfAnalyzer(IWaveIn waveIn, bool forceMono = true)
        {
            this.waveIn = waveIn;
            var config = new DetectorConfig();

            dtmfAudio = DtmfAudio.CreateFrom(new StreamingSampleSource(config, Buffer(waveIn), forceMono), config);
        }
Beispiel #20
0
 public void close()
 {
     if (!_online)
     {
         return;
     }
     // Close Device
     if (openDev.audioDeviceType == AudioDeviceType.Windows)
     {
         try
         {
             waveIn.StopRecording();
             waveIn.Dispose();
         }
         catch (Exception E)
         {
         }
         waveIn  = null;
         openDev = null;
     }
     else     // WASAPI Device
     {
         try
         {
             WASAPIwaveIn.StopRecording();
             WASAPIwaveIn.Dispose();
         }
         catch (Exception E)
         {
         }
         WASAPIwaveIn = null;
         openDev      = null;
     }
     _online = false;
 }
 /// <summary>
 ///
 /// </summary>
 /// <param name="owner">音频设备必须在主线程初始化,需要UI控件引用。</param>
 /// <param name="config"></param>
 /// <param name="recog_grammar"></param>
 /// <param name="recog_params"></param>
 public LocalAudioIsrEngine(IWin32Window owner, IsrConfig config, string recog_grammar, string recog_params = null)
     : base(config, recog_grammar, recog_params ?? "sub=iat,ssm=1,auf=audio/L16;rate=16000,aue=speex,ent=sms16k,rst=plain")
 {
     this.owner     = owner;
     wis            = new WaveIn(owner.Handle);
     wis.WaveFormat = new WaveFormat(16000, 16, 1);
 }
Beispiel #22
0
        private void MainForm_Load(object sender, EventArgs e)
        {
            //Add fft event handler
            sampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(FftCalculated);
            sampleAggregator.PerformFFT     = true;

            //Setting up the chart1
            chart1.ChartAreas[0].AxisY.Minimum               = 0;
            chart1.ChartAreas[0].AxisY.Maximum               = 0.01;
            chart1.ChartAreas[0].AxisX.LineWidth             = 0;
            chart1.ChartAreas[0].AxisY.LineWidth             = 0;
            chart1.ChartAreas[0].AxisX.LabelStyle.Enabled    = false;
            chart1.ChartAreas[0].AxisY.LabelStyle.Enabled    = false;
            chart1.ChartAreas[0].AxisX.MajorGrid.Enabled     = false;
            chart1.ChartAreas[0].AxisY.MajorGrid.Enabled     = false;
            chart1.ChartAreas[0].AxisX.MinorGrid.Enabled     = false;
            chart1.ChartAreas[0].AxisY.MinorGrid.Enabled     = false;
            chart1.ChartAreas[0].AxisX.MajorTickMark.Enabled = false;
            chart1.ChartAreas[0].AxisY.MajorTickMark.Enabled = false;
            chart1.ChartAreas[0].AxisX.MinorTickMark.Enabled = false;
            chart1.ChartAreas[0].AxisY.MinorTickMark.Enabled = false;
            chart1.Series[0].IsVisibleInLegend               = false;
            chart1.Series[0].ChartType     = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Column;
            chart1.Series[0]["PointWidth"] = "1";

            //Set waveIn to WasapiLoopbackCapture to capture the system Audio
            waveIn = new WasapiLoopbackCapture();

            waveIn.DataAvailable += OnDataAvailable;

            waveIn.StartRecording();
        }
Beispiel #23
0
 public void Stop()
 {
     _waveIn.StopRecording();
     _waveIn.Dispose();
     _waveIn = null;
     //_buffer = null;
 }
Beispiel #24
0
        protected NAudioProvider(IWaveIn WaveIn)
        {
            this.WaveIn = WaveIn;

            NAudioWaveFormat = WaveIn.WaveFormat;
            WaveFormat       = WaveIn.WaveFormat.ToCaptura();
        }
Beispiel #25
0
 /// <summary>
 /// Configures this player.
 /// </summary>
 public override void Configure()
 {
     // Config values
     if (this.Arguments.HasSignalDuration <= 0)
     {
         this.Arguments.HasSignalDuration = DEFAULT_HAS_SIGNAL_PERIOD;
     }
     if (this.Arguments.NoSignalDuration <= 0)
     {
         this.Arguments.NoSignalDuration = DEFAULT_NO_SIGNAL_PERIOD;
     }
     if (this.Arguments.BufferDuration <= 0)
     {
         this.Arguments.BufferDuration = DEFAULT_WAVEIN_BUFFER_TIME;
     }
     if (this.Arguments.SignalThreshold <= 0)
     {
         this.Arguments.SignalThreshold = DEFAULT_SIGNAL_THRESHOLD;
     }
     // Configure the Wave input
     this.waveIn = this.Arguments.Source;
     this.waveIn.DataAvailable    += this.waveIn_DataAvailable;
     this.waveIn.RecordingStopped += this.waveIn_RecordingStopped;
     // create wave provider
     this.waveProvider = new BufferedWaveProvider(this.waveIn.WaveFormat)
     {
         DiscardOnBufferOverflow = true,
         BufferDuration          = TimeSpan.FromMilliseconds(this.Arguments.BufferDuration)
     };
 }
Beispiel #26
0
        private void Btn_start_Click(object sender, EventArgs e)
        {
            config.rows   = (int)num_rows.Value;
            config.cols   = (int)num_cols.Value;
            serial_buffer = new byte[config.cols];

            sampleAggregator = new SampleAggregator(config.fftLen);
            sampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(FftCalculated);
            sampleAggregator.PerformFFT     = true;

            if (_serialPort == null)
            {
                _serialPort = new SerialPort(sel_serial.SelectedItem.ToString(), 19200, Parity.None, 8, StopBits.One);
            }

            if (waveIn != null)
            {
                waveIn.StopRecording();
            }

            if (device != null)
            {
                waveIn = new WasapiLoopbackCapture(device);
            }

            waveIn.DataAvailable += OnDataAvailable;
            waveIn.StartRecording();
        }
Beispiel #27
0
        public void BeginCapture(Callback fftCallback, int deviceIndex)
        {
            if (!isRecording)
            {
                MMDevice device = GetActiveDevices()[deviceIndex];

                if (device.DataFlow == DataFlow.Render)
                {
                    waveIn = new WasapiLoopbackCapture(device);
                }
                else
                {
                    waveIn = new WasapiCapture(device);
                }

                if (waveIn.WaveFormat.SampleRate != 44100)
                {
                    MessageBox.Show("Device: " + device.DeviceFriendlyName + "\n" + "has its sample rate set to: " + waveIn.WaveFormat.SampleRate.ToString() + " Hz.\n" + "Please set it to 44100 Hz.");
                    StopRecording();
                }
                else
                {
                    waveIn.DataAvailable    += OnDataAvailable;
                    waveIn.RecordingStopped += WaveIn_RecordingStopped;

                    waveIn.StartRecording();
                    isRecording = true;
                }
            }
        }
Beispiel #28
0
 /// <summary>断开与音频输入设备的连接</summary>
 public void Stop()
 {
     if (this.IsRunning)
     {
         if (waveIn == null)
         {
             return;
         }
         try
         {
             waveIn.StopRecording();
             waveIn.Dispose();
         }
         catch (Exception ex)
         {
             Console.WriteLine(" -> audio.stop.error");
             Console.WriteLine(ex.ToString());
         }
         finally
         {
             waveIn = null;
         }
     }
     this.IsRunning = false;
 }
 /// <summary>
 /// Creates a new WaveInProvider
 /// n.b. Should make sure the WaveFormat is set correctly on IWaveIn before calling
 /// </summary>
 /// <param name="waveIn">The source of wave data</param>
 public JoeWaveInProvider(IWaveIn waveIn)
 {
     this.waveIn           = waveIn;
     waveIn.DataAvailable += waveIn_DataAvailable;
     bufferedWaveProvider  = new BufferedWaveProvider(this.WaveFormat);
     bufferedWaveProvider.BufferDuration = new TimeSpan(0, 0, 1);
 }
Beispiel #30
0
        public SoundSpectrum()
        {
            this.DoubleBuffered = true;
            maxbricks           = 40;
            // delta = new float[zero_sectors];
            y               = new float[fftLength];
            last_y          = new float[fftLength];
            catch_harmonics = new Timer()
            {
                Interval = 50
            };
            slowly_change = new Timer()
            {
                Interval = 5
            };
            slowly_change.Tick             += Slowly_change_Tick;
            catch_harmonics.Tick           += timer1_Tick;
            sampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(FftCalculated);
            sampleAggregator.PerformFFT     = true;
            waveIn = new WasapiLoopbackCapture();
            waveIn.DataAvailable += OnDataAvailable;
            setdelta              = new bool[fftLength];


            this.DoubleBuffered = true;
        }
        public AudioLogic(IServiceProvider serviceProvider, LaserAnimationStatus laserAnimationStatus, AudioSettings settings)
        {
            _serviceProvider      = serviceProvider;
            _laserAnimationStatus = laserAnimationStatus;

            AudioCalibrationValue = settings.AudioCalibrationValue;

            SetTimer();
            sampleAggregator.FftCalculated += FftCalculated;
            sampleAggregator.PerformFFT     = true;
            waveIn = new WasapiLoopbackCapture();
            waveIn.DataAvailable += OnDataAvailable;

            try
            {
                var patterns = AppDomain.CurrentDomain.GetAssemblies().SelectMany(e => e.GetTypes())
                               .Where(x => typeof(ILaserPattern).IsAssignableFrom(x) && !x.IsInterface);

                foreach (var pattern in patterns)
                {
                    _patterns.Add((ILaserPattern)ActivatorUtilities.CreateInstance(_serviceProvider, pattern));
                }
            }

            catch (Exception) { /* catch windows forms not found exception */ }
        }
Beispiel #32
0
 void OnRecordingStopped(object sender, EventArgs e)
 {
     IsCapturing = false;
     CloseRecording();
     captureDevice.Dispose();
     captureDevice = null;
 }
Beispiel #33
0
 public void Setup(Device device)
 {
     _waveFile   = null;
     _waveSource = WaveInDeviceFactory.GetWaveInDevice(device);
     _waveSource.DataAvailable    += waveSource_DataAvailable;
     _waveSource.RecordingStopped += waveSource_RecordingStopped;
 }
Beispiel #34
0
        private void recordButton_Click(object sender, EventArgs e)
        {
            try
            {
                var device = (MMDevice)audioDeviceComboBox.SelectedItem;
                if (!device.Equals(null))
                {
                    device.AudioEndpointVolume.Mute = false;
                    //use wasapi by default
                    waveIn = new WasapiCapture(device);
                    waveIn.DataAvailable    += OnDataAvailable;
                    waveIn.RecordingStopped += OnRecordingStopped;

                    tempFilename = String.Format("{0}-{1:yyy-MM-dd-HH-mm-ss}.wav", MainForm.self.AllUsers.getCurrentUser().getName(), DateTime.Now);
                    //initially, outputname is the same as tempfilename
                    outputFileName = tempFilename;
                    writer         = new WaveFileWriter(Path.Combine(tempFolder, tempFilename), waveIn.WaveFormat);
                    waveIn.StartRecording();
                    SetControlStates(true);
                }
            }
            catch (Exception exp)
            {
#if DEBUG
                MessageBox.Show(exp.Message, "Warning", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
#endif
            }
        }
Beispiel #35
0
        /// <summary>
        /// Dispose(bool disposing) executes in two distinct scenarios.
        /// If disposing equals true, the method has been called directly
        /// or indirectly by a user's code. Managed and unmanaged resources
        /// can be disposed.
        /// If disposing equals false, the method has been called by the
        /// runtime from inside the finalizer and you should not reference
        /// other objects. Only unmanaged resources can be disposed.
        /// </summary>
        protected virtual void Dispose(bool disposing)
        {
            // Check to see if Dispose has already been called.
            if (!this._disposed)
            {
                // Note disposing has been done.
                _disposed = true;

                // If disposing equals true, dispose all managed
                // and unmanaged resources.
                if (disposing)
                {
                    CleanUp();
                }

                // If the stream was created internally.
                if (_internalStream)
                {
                    _audioStream = null;
                }

                _writer = null;
                _waveIn = null;
            }
        }
 public void Init(string path)
 {
     durationStopwatch = new Stopwatch();
     waveIn = new WasapiLoopbackCapture();
     wri = new LameMP3FileWriter(@path + ".mp3", waveIn.WaveFormat, 32);
     waveOut = new WaveOut();
     waveOut.Init(new SilenceGenerator());
 }
Beispiel #37
0
 private void EnsureDeviceIsCreated()
 {
     if (captureDevice == null)
     {
         captureDevice = new WaveIn();
         captureDevice.RecordingStopped += OnRecordingStopped;
         captureDevice.DataAvailable += OnDataAvailable;
     }
 }
Beispiel #38
0
 private void Cleanup()
 {
     if (waveIn != null)
     {
         waveIn.Dispose();
         waveIn = null;
     }
     FinalizeWaveFile();
 }
Beispiel #39
0
        public SoundCardRecorder(MMDevice device, string filePath, string song)
        {
            Device = device;
            FilePath = filePath;
            Song = song;

            _waveIn = new WasapiCapture(Device);
            _writer = new WaveFileWriter(FilePath, _waveIn.WaveFormat);
            _waveIn.DataAvailable += OnDataAvailable;
        }
Beispiel #40
0
 private void ButtonStartCapture(object sender, RoutedEventArgs e)
 {
     mute(true);
     udpSender = new UdpClient();
     waveIn = new WasapiLoopbackCapture();       //assigning waveIn
     waveIn.DataAvailable += OnDataAvailable;
     waveIn.RecordingStopped += OnRecordingStopped;
     waveIn.StartRecording();
     udpSender.Connect(ReceiverAudioAddress);
 }
Beispiel #41
0
        public void Run()
        {
            var filename = "test.wav";
            waveIn = new WasapiLoopbackCapture();
            waveIn.DataAvailable += OnDataAvailable;
            //waveIn.RecordingStopped += waveIn_RecordingStopped;

            _writer = new WaveFileWriter(filename, waveIn.WaveFormat);

            waveIn.StartRecording();
        }
Beispiel #42
0
 private void Record()
 {
     if (recorder == null)
     {
         recorder = new WasapiCaptureRT();
         recorder.RecordingStopped += RecorderOnRecordingStopped;
         recorder.DataAvailable += RecorderOnDataAvailable;
     }
     recorder.StartRecording();
     RecordCommand.IsEnabled = false;
     StopRecordingCommand.IsEnabled = true;
 }
Beispiel #43
0
 private void Cleanup()
 {
     if (waveIn != null) // working around problem with double raising of RecordingStopped
     {
         waveIn.Dispose();
         waveIn = null;
     }
     if (writer != null)
     {
         writer.Close();
         writer = null;
     }
 }
Beispiel #44
0
 public _soundReceive()
 {
     waveOut = new WaveOut();
     waveIn = new WasapiLoopbackCapture();                           //this is only to get proper format of recording
     try
     {
         Thrd = new Thread(this.Receiver);
         Thrd.IsBackground = true;
         Thrd.Name = "SoundReceive";
         Thrd.Start();
     }
     catch { };
 }
Beispiel #45
0
        public void StartCapture(Stream stream)
        {
            // can't set WaveFormat as WASAPI doesn't support SRC
            waveIn = new WasapiLoopbackCapture();

            outputStream = stream;

            //Console.WriteLine(waveIn.WaveFormat);

            waveIn.DataAvailable += OnDataAvailable;
            waveIn.RecordingStopped += OnRecordingStopped;
            waveIn.StartRecording();
        }
Beispiel #46
0
 public void Dispose()
 {
     if (_waveIn != null)
     {
         _waveIn.StopRecording();
         _waveIn.Dispose();
         _waveIn = null;
     }
     if (_writer != null)
     {
         _writer.Close();
         _writer = null;
     }
 }
Beispiel #47
0
        private void OnButtonStartRecordingClick(object sender, EventArgs e)
        {
            if (radioButtonWaveIn.Checked)
                Cleanup(); // WaveIn is still unreliable in some circumstances to being reused

            if (waveIn == null)
            {
                waveIn = CreateWaveInDevice();
            }
            // Forcibly turn on the microphone (some programs (Skype) turn it off).
            var device = (MMDevice)comboWasapiDevices.SelectedItem;
            device.AudioEndpointVolume.Mute = false;

            outputFilename = String.Format("NAudioDemo {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now);
            writer = new WaveFileWriter(Path.Combine(outputFolder, outputFilename), waveIn.WaveFormat);
            waveIn.StartRecording();
            SetControlStates(true);
        }
Beispiel #48
0
        public MemoryStream GetWavSample(int seconds, string filepath)
        {
            var stream = new MemoryStream();
            _fileName = filepath;
            _waveIn = new WasapiLoopbackCapture();
            _writer = new WaveFileWriter(stream, _waveIn.WaveFormat);
            _waveIn.DataAvailable += OnDataAvailable;
            _waveIn.RecordingStopped += OnRecordingStopped;
            _waveIn.StartRecording();
            _isRecording = true;
            System.Threading.Thread.Sleep(seconds * 1000);
            _waveIn.StopRecording();

            if (filepath != "")
                using (var file = new FileStream(filepath, FileMode.Create, FileAccess.Write))
                    stream.WriteTo(file);

            return stream;
        }
Beispiel #49
0
        private void buttonStartRecording_Click(object sender, EventArgs e)
        {
            if (waveIn == null)
            {
                if(outputFilename == null)
                {
                    buttonSelectOutputFile_Click(sender, e);
                }
                if(outputFilename == null)
                {
                    return;
                }
                if (radioButtonWaveIn.Checked)
                {
                    waveIn = new WaveIn();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else if (radioButtonWaveInEvent.Checked)
                {
                    waveIn = new WaveInEvent();
                    waveIn.WaveFormat = new WaveFormat(8000, 1);
                }
                else if (radioButtonWasapi.Checked)
                {
                    // can't set WaveFormat as WASAPI doesn't support SRC
                    var device = (MMDevice)comboWasapiDevices.SelectedItem;
                    waveIn = new WasapiCapture(device);
                }
                else
                {
                    // can't set WaveFormat as WASAPI doesn't support SRC
                    waveIn = new WasapiLoopbackCapture();
                }
                
                writer = new WaveFileWriter(outputFilename, waveIn.WaveFormat);

                waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable);
                waveIn.RecordingStopped += waveIn_RecordingStopped;
                waveIn.StartRecording();
                buttonStartRecording.Enabled = false;
            }
        }
Beispiel #50
0
		/// <summary>
		/// Start recording
		/// </summary>
		private void btnStart_Click(object sender, EventArgs e) {
			if (waveIn == null) {
				// Start new recording
				waveIn = new WaveIn();
				waveIn.WaveFormat = new WaveFormat();
				waveIn.DataAvailable += OnDataAvailable;
				writer = new WaveFileWriter(txtFile.Text, waveIn.WaveFormat);
				waveControl1.CreateBuffer(waveIn.WaveFormat);
			}
			recording = true;
			waveIn.StartRecording();
			// Disable controls
			cbSource.Enabled = false;
			txtFile.Enabled = false;
			btnBrowse.Enabled = false;
			btnStart.Enabled = false;
			// Enable pause & stop
			btnPause.Enabled = true;
			btnStop.Enabled = true;
		}
Beispiel #51
0
 void waveIn_RecordingStopped(object sender, EventArgs e)
 {
     if (this.InvokeRequired)
     {
         this.BeginInvoke(new EventHandler(waveIn_RecordingStopped), sender, e);
     }
     else
     {
         waveIn.Dispose();
         waveIn = null;
         writer.Close();
         writer = null;
         buttonStartRecording.Enabled = true;
         progressBar1.Value = 0;
         if (checkBoxAutoPlay.Checked)
         {
             Process.Start(outputFilename);
         }
     }
 }
Beispiel #52
0
        //Constructor
        public Recorder()
        {
            this._Ready         = false;
            this._Recording     = false;
            this._Debug         = false;
            this.RecordSilence  = false;
            this.Paused         = false;

            this._Hertz     = 0; //auto
            this._BitRate   = 128;
          
            this._MP3Writer = null;
            this._WAVWriter = null;
            this._Capturer  = null;

            this._RecordingFormat = OutputFormat.WAV;

            this._DestinationFileName = String.Empty;
            this._DestinationRoot     = String.Empty;
        }
Beispiel #53
0
 // Освобождение памяти от объекта Wave
 private void WaveIn_RecordingStopped(object sender, StoppedEventArgs e)
 {
     waveIn.Dispose();
     waveIn = null;
 }
        private async Task TryStartRecording()
        {
            if (_recorder == null)
            {
                var microphoneIsAvailabe = await IsMicrophoneAvailable();
                if (microphoneIsAvailabe)
                {
                    _sendBuffer = new List<short>();

                    _recorder = new WasapiCaptureRT
                    {
                        WaveFormat = new WaveFormat(_samplingRate, 16, 1)
                    };
                    _recorder.DataAvailable += DataAvailableHandler;

                    _recorder.StartRecording();

                    await _dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                        () => { IsMuted = false; });
                }
                else
                {
                    await _dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                        () => { IsMuted = true; });
                }
            }
        }
Beispiel #55
0
 public void Dispose()
 {
     if (captureDevice != null)
     {
         captureDevice.Dispose();
         captureDevice = null;
     }
 }
        private void StopRecording()
        {
            if (_recorder == null)
                return;

            _recorder.StopRecording();
            _recorder.Dispose();
            _recorder = null;
        }
        private void NStartRecordAudio(string fileName, int rate, int bits, int channel, bool isBackground)
        {
            try
            {
                // prepare wave header and wav output file
                if (isBackground)
                {
                    _waveInStream = new WaveInEvent();
                }
                else
                {
                    _waveInStream = new WaveIn();
                }

                _waveInStream.WaveFormat = new WaveFormat(rate, bits, channel);
                _waveFileWriter = new WaveFileWriter(fileName, _waveInStream.WaveFormat);

                _waveInStream.DataAvailable += WaveInStreamOnDataAvailable;
                //_waveInStream.RecordingStopped += WaveInStreamOnRecordingStopped;

                // start recording here
                _waveInStream.StartRecording();
            }
            catch (Exception e)
            {
                ErrorDialogWrapper.ShowDialog("Error during recording", "Audio record cannot be started.", e);
                throw;
            }
        }
Beispiel #58
0
        // Обработка нажатия кнопки подключения/отключения
        private void connectBtn_Click(object sender, EventArgs e)
        {
            if (!connected)
            {
                try
                {
                    connectedPort.Open();
                }
                catch (System.IO.IOException)
                {
                    MessageBox.Show("Последовательный порт " + connectedPort.PortName + " недоступен.", "Ошибка",
                        MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }

                if (connectedPort.IsOpen)
                {
                    connected = true;

                    waveIn = new WasapiLoopbackCapture();

                    waveIn.DataAvailable += WaveIn_DataAvailable; ;
                    waveIn.RecordingStopped += WaveIn_RecordingStopped; ;

                    waveIn.StartRecording();
                }
            }
            else
            {
                waveIn.StopRecording();

                if (connectedPort.IsOpen)
                {
                    connectedPort.Write("set rgb 0 0 0\n\r");
                    connectedPort.Close();
                }

                connected = false;
            }
        }
Beispiel #59
0
 void OnRecordingStopped(object sender, EventArgs e)
 {
     IsCapturing = false;
     CloseRecording();
     captureDevice.Dispose();
     captureDevice = null;
 }
        private void NCleanup()
        {
            try
            {
                _currentLength = 0;

                if (_waveInStream != null)
                {
                    _waveInStream.Dispose();
                    _waveInStream = null;
                }

                if (_waveFileWriter != null)
                {
                    try
                    {
                        _waveFileWriter.Dispose();
                        _waveFileWriter = null;
                    }
                    catch (Exception e)
                    {
                        ErrorDialogWrapper.ShowDialog("Error when stopping", "File writing stops with error.", e);
                        // eat exception locally
                    }
                }
            }
            catch (Exception e)
            {
                ErrorDialogWrapper.ShowDialog("Error when resource releasing",
                                              "Resources cannot be released successfully.", e);
                throw;
            }
        }