Ejemplo n.º 1
0
 // Кнопка записи звука с микрофона и отправляя его на выходной порт (воспроизведение)
 private void ListenToThisDevice_Click(object sender, EventArgs e)
 {
     if (sourceList.SelectedItems.Count == 0) return;
     int deviceNumber = sourceList.SelectedItems[0].Index;
     _sourceStream = new WaveIn();
     _sourceStream.DeviceNumber = deviceNumber;
     _sourceStream.WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(deviceNumber).Channels);
     WaveInProvider waveIn = new WaveInProvider(_sourceStream);
     _waveOut = new DirectSoundOut();
     _waveOut.Init(waveIn);
     _sourceStream.StartRecording();
     _waveOut.Play();
 }
Ejemplo n.º 2
0
        public void editorSesDinlemeBaslat()
        {
            int deviceNumber = sesCihazComboBox.SelectedIndex;

            sourceStream = new NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = deviceNumber;
            sourceStream.WaveFormat   = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream);
            waveOut = new NAudio.Wave.DirectSoundOut();
            waveOut.Init(waveIn);

            sourceStream.StartRecording();
            waveOut.Play();
            onbellekSayiLabel.Text = "Kullanılacak Önbellek Sayısı: " + sourceStream.NumberOfBuffers;
        }
Ejemplo n.º 3
0
        private void button1_Click(object sender, EventArgs e)
        {
            /*
             * A saját hangomat adja vissza a default mikrofonból
             */
            int deviceNumber = 0;

            source = new NAudio.Wave.WaveIn();
            source.DeviceNumber = deviceNumber;
            source.WaveFormat   = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(source);

            waveout = new NAudio.Wave.DirectSoundOut();
            waveout.Init(waveIn);

            source.StartRecording();
            waveout.Play();
        }
        private static void Main()
        {
            var kernel = new NinjectKernelHandler().Kernel;
              var waveIn = kernel.Get<WasapiLoopbackCapture>();
              waveIn.StartRecording();

              WriteCaptureSettings(waveIn.WaveFormat);

              var streamRaw = new WaveInProvider(waveIn);

              var streamPitch = new PitchGeneratorProvider(streamRaw, new FftPitchDetector(), new FftPitchDetector(), new FloatDataStereoSplitter(), new PitchResultSummaryWriter());
              var compositeLightSectionBuilder = kernel.Get<FrameBuilder>();
              var notifiationClient = kernel.Get<NotificationClientBase>();
              var streamScene = new SceneGenerator(streamPitch, compositeLightSectionBuilder, notifiationClient);

              var task = new ChromesthesiaTask(streamScene);

              task.Run();
        }
Ejemplo n.º 5
0
        private void button2_Click(object sender, EventArgs e)
        {
            if (SourceList.SelectedItems.Count == 0)
            {
                return;
            }

            int deviceNumber = SourceList.SelectedItems[0].Index;

            sourceStream = new NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = deviceNumber;
            sourceStream.WaveFormat   = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream);

            waveOut = new NAudio.Wave.DirectSoundOut();
            waveOut.Init(waveIn);

            sourceStream.StartRecording();
            waveOut.Play();
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (!IsRunning)
            {
                // check source

                int i = 0, selind = -1;
                for (int n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    //device no longer connected or not configured
                    if (i > 0)
                        selind = 0;
                    else
                    {
                        //if (AudioSourceError != null)
                        //    AudioSourceError(this, new AudioSourceErrorEventArgs("not connected"));
                        AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                        return;    
                    }
                    
                }

                _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                
                if (LevelChanged != null)
                {
                    _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                }
                _waveIn.StartRecording();

            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (string.IsNullOrEmpty(_source))
                throw new ArgumentException("Audio source is not specified.");

            if (_started) return;

            // check source
            lock (_lock)
            {
                if (_started)
                    return;

                int i = 0, selind = -1;
                for (var n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                    return;
                }

                _started = true;
                _waveIn = new WaveInEvent
                          {
                              BufferMilliseconds = 200,
                              DeviceNumber = selind,
                              WaveFormat = RecordingFormat
                          };
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                _waveIn.StartRecording();
            }
        }
Ejemplo n.º 8
0
        public void Enable()
        {
            _processing = true;
            _sampleRate = Micobject.settings.samples;
            _bitsPerSample = Micobject.settings.bits;
            _channels = Micobject.settings.channels;

            RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels);

            //local device
            int i = 0, selind = -1;
            for (int n = 0; n < WaveIn.DeviceCount; n++)
            {
                if (WaveIn.GetCapabilities(n).ProductName == Micobject.settings.sourcename)
                    selind = i;
                i++;
            }
            if (selind == -1)
            {
                //device no longer connected
                Micobject.settings.active = false;
                NoSource = true;
                _processing = false;
                return;
            }

            _waveIn = new WaveIn { BufferMilliseconds = 40, DeviceNumber = selind, WaveFormat = RecordingFormat };
            _waveIn.DataAvailable += WaveInDataAvailable;
            _waveIn.RecordingStopped += WaveInRecordingStopped;

            _waveProvider = new WaveInProvider(_waveIn);
            _sampleChannel = new SampleChannel(_waveProvider);

            _meteringProvider = new MeteringSampleProvider(_sampleChannel);
            _meteringProvider.StreamVolume += _meteringProvider_StreamVolume;

            try
            {
                _waveIn.StartRecording();
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                MessageBox.Show(LocRM.GetString("AudioMonitoringError") + ": " + ex.Message, LocRM.GetString("Error"));
                _processing = false;
                return;
            }

            NoSource = false;
            Micobject.settings.active = true;

            MainForm.NeedsSync = true;
            Invalidate();
            _processing = false;
        }
Ejemplo n.º 9
0
 /// <summary>
 /// Метод назначает прямой вывод захваченых байт выводящему устройству
 /// </summary>
 public static void BindingTranslation()
 {
     StopTranslation();
     var provider = new WaveInProvider(WaveIn);
     WaveOut.Init(provider);
     WaveIn.StartRecording();
     WaveOut.Play();
     Support.Logger.Info("Self-translation created");
 }
Ejemplo n.º 10
0
        /// <summary>
        /// Start audio source.
        /// </summary>
        /// 
        /// <remarks>Starts audio source and return execution to caller. audio source
        /// object creates background thread and notifies about new frames with the
        /// help of <see cref="DataAvailable"/> event.</remarks>
        /// 
        /// <exception cref="ArgumentException">audio source is not specified.</exception>
        /// 
        public void Start()
        {
            if (!IsRunning)
            {
                // check source
                if (string.IsNullOrEmpty(_source))
                    throw new ArgumentException("Audio source is not specified.");

                int i = 0, selind = -1;
                for (int n = 0; n < WaveIn.DeviceCount; n++)
                {
                    if (WaveIn.GetCapabilities(n).ProductName == _source)
                        selind = i;
                    i++;
                }
                if (selind == -1)
                {
                    //device no longer connected
                    if (AudioSourceError!=null)
                        AudioSourceError(this, new AudioSourceErrorEventArgs("not connected"));
                    return;
                }

                _waveIn = new WaveIn {BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat};
                _waveIn.DataAvailable += WaveInDataAvailable;
                _waveIn.RecordingStopped += WaveInRecordingStopped;

                _waveProvider = new WaveInProvider(_waveIn);
                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter+=SampleChannelPreVolumeMeter;
                _waveIn.StartRecording();

            }
        }
Ejemplo n.º 11
0
        // NAudio.Wave.WaveStream stream = null;
        private void button2_Click(object sender, EventArgs e)
        {
            if (listView1.SelectedItems.Count == 0) return;

            int deviceNumber = listView1.SelectedItems[0].Index;

            //waveOut = new NAudio.Wave.WaveOut(); ;
            sourceStream = new NAudio.Wave.WaveIn();
            sourceStream.DeviceNumber = deviceNumber;
            sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);

            sourceStream.DataAvailable += new EventHandler<WaveInEventArgs>(sourceStream_DataAvailable);
            //waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat);

            sourceStream.BufferMilliseconds = 100;
            //wavebuffer = new NAudio.Wave.WaveBuffer();
            //bwp = new NAudio.Wave.BufferedWaveProvider(sourceStream.WaveFormat);
               // bwp.DiscardOnBufferOverflow = true;

            NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream);
            waveOut = new NAudio.Wave.DirectSoundOut();
            waveOut.Init(waveIn);

            sourceStream.StartRecording();
            //waveOut.Init(bwp);
            waveOut.Play();
               // sourceStream.StopRecording();
               // Start(sender,e);
            timer1.Enabled=true;

            ++count;
        }
Ejemplo n.º 12
0
        public MainWindow(string[] args)
        {
            InitializeComponent();

            sliderMinFreq.Maximum = sliderMaxFreq.Value = sliderMaxFreq.Maximum = SampleRate;

            if (args.Length >= 1)
            {
                int.TryParse(args[0], out _inputDeviceNo);
            }
            if (args.Length >= 2)
            {
                int.TryParse(args[1], out _outputDeviceNo);
            }

            /* Input waveform */
            IWaveProvider inputWaveform;

            if (args.Length >= 3) /* TODO: button for this */
            {
                _waveInFileStream = new WaveFileReader(args[2]);
                if (_waveInFileStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
                {
                    _waveInFileStream = WaveFormatConversionStream.CreatePcmStream(_waveInFileStream);
                    _waveInFileStream = new BlockAlignReductionStream(_waveInFileStream);
                }

                if (_waveInFileStream.WaveFormat.BitsPerSample != 16)
                {
                    var format = new WaveFormat(_waveInFileStream.WaveFormat.SampleRate, 16, _waveInFileStream.WaveFormat.Channels);
                    _waveInFileStream = new WaveFormatConversionStream(format, _waveInFileStream);
                }
                inputWaveform = new LoopingStream(new WaveChannel32(_waveInFileStream));
            }
            else
            {
                _waveInDevice = new WaveIn
                {
                    DeviceNumber = _inputDeviceNo,
                    WaveFormat = new WaveFormat(SampleRate, 16, 1)
                };
                _waveInDevice.StartRecording();

                inputWaveform = new WaveInProvider(_waveInDevice);
            }

            /* Input processing pipeline */
            ISampleProvider sampleChannel = new SampleChannel(inputWaveform); //TODO: when we're not using recorder any more, this stay as a wave provider, I think
            _bandPassProvider = new BandPass2(sampleChannel);
            NotifyingSampleProvider sampleStream = new NotifyingSampleProvider(_bandPassProvider);
            _polygonSpectrumControl.bp2 = _bandPassProvider;

            /* Output */
            _waveOutDevice = new WaveOut
            {
                DeviceNumber = _outputDeviceNo,
                Volume = 1
            };
            _waveOutDevice.Init(new SampleToWaveProvider(sampleStream));
            _waveOutDevice.Play();

            /* UI Events */
            sliderMinFreq.ValueChanged += SliderMinFreqValueChanged;
            sliderMaxFreq.ValueChanged += SliderMaxFreqValueChanged;
            textBoxPistonDiameter.TextChanged += textBoxPistonDiameter_TextChanged;
            textBoxEpsilon.TextChanged += textBoxEpsilon_TextChanged;

            UpdateShit();
        }