Esempio n. 1
0
        public virtual void Initialize()
        {
            _wasapiCapture = new WasapiCapture
            {
                Device = _captureDevice
            };
            _wasapiCapture.Initialize();

            var soundInSource = new SoundInSource(_wasapiCapture);

            if (_triggerSingleBlockRead)
            {
                var notificationStream =
                    new SingleBlockNotificationStream(soundInSource.ChangeSampleRate(48000).ToMono().ToSampleSource());
                notificationStream.SingleBlockRead += NotificationStreamOnSingleBlockRead;
                _captureSource = notificationStream.ToWaveSource(16);
            }
            else
            {
                _captureSource = soundInSource
                                 .ChangeSampleRate(48000)
                                 .ToMono()
                                 .ToSampleSource()
                                 .ToWaveSource(16);
            }

            soundInSource.DataAvailable += SoundInSourceOnDataAvailable;
            _wasapiCapture.Start();
        }
Esempio n. 2
0
        private void GetCapture(bool isController, string deviceId = null)
        {
            if (!isController)
            {
                if (_soundCapture != null)
                {
                    _soundCapture?.Stop();
                    _soundCapture?.Dispose();
                }
                using (MMDeviceEnumerator enumerator = new MMDeviceEnumerator())
                {
                    //using (MMDevice device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications))
                    //{
                    MMDevice device;
                    if (deviceId != null)
                    {
                        device = enumerator.GetDevice(deviceId);
                    }
                    else
                    {
                        device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, _currentCaptureRole);
                    }

                    _meter        = AudioMeterInformation.FromDevice(device);
                    _soundCapture = new WasapiCapture(true, AudioClientShareMode.Shared, 250)
                    {
                        Device = device
                    };
                    _soundCapture.Initialize();
                    _soundCapture.Start();
                    //}
                }
            }
        }
Esempio n. 3
0
        public void SoundInToSoundOutTest_Wasapi()
        {
            for (int i = 0; i < 10; i++)
            {
                var waveIn = new WasapiCapture();
                waveIn.Initialize();
                waveIn.Start();

                var waveInToSource = new SoundInSource(waveIn)
                {
                    FillWithZeros = true
                };

                var soundOut = new WasapiOut();
                soundOut.Initialize(waveInToSource);
                soundOut.Play();

                Thread.Sleep(2000);

                Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState);

                soundOut.Dispose();
                waveIn.Dispose();
            }
        }
Esempio n. 4
0
        //most of this code is stolen from the example in the CSCore github so idk what it does 40% of the time
        public void Initialize(FFTSize _size = FFTSize._4096)
        {
            size     = _size;
            _soundIn = new WasapiLoopbackCapture();

            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);

            var source = soundInSource.ToSampleSource();

            _fft = new FftProvider(source.WaveFormat.Channels, (FftSize)size);

            var n = new SingleBlockNotificationStream(source);

            n.SingleBlockRead += (s, a) => _fft.Add(a.Left, a.Right);

            _source = n.ToWaveSource(16);
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _soundIn.Start();
        }
Esempio n. 5
0
    // Use this for initialization
    void Start()
    {
        dataSize     = GetComponent <AudioCapture>().numBars;
        meshFilter   = gameObject.AddComponent <MeshFilter>() as MeshFilter;
        meshRenderer = gameObject.AddComponent <MeshRenderer>() as MeshRenderer;
        mesh         = meshFilter.mesh;
        //each data point is represented by one vertex. trailing down the z axis distance by 1 unit per fft calculation
        //2048 rows
        dataQueue = new List <float>();
        //an extra row of verts for the first row
        verts = new Vector3[dataSize * (dataRowCount + 1)];
        //initialize values to zero
        for (int i = 0; i < verts.Length; i++)
        {
            verts[i] = Vector3.zero;
        }
        uvs                   = new Vector2[dataSize * (dataRowCount + 1)];
        tris                  = new int[(dataSize - 1) * (dataSize - 1) * 2 * 3 * 2];
        backgroundMat         = Resources.Load("Material/FourierMaterial") as Material;
        meshRenderer.material = backgroundMat;
        gaussianKernel        = Convoluter.GetGaussian(gaussianKernelSize, gaussianKernelStdDev, gaussianKernelAlpha);

        aCapture = gameObject.GetComponent <AudioCapture>();
        capture.Initialize();
        // Get our capture as a source
        IWaveSource source = new SoundInSource(capture);

        frameCounter = 0;
    }
Esempio n. 6
0
        private void OpenDefault()
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();

            timer.Start();
        }
Esempio n. 7
0
        public void CSCoreAudioRecording()
        {
            using (capture = new WasapiLoopbackCapture())
            {
                aTimer          = new Timer();
                aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent); //jak czas minie, wyłącz nagrywanie
                aTimer.Interval = 8000;                                  //czas nagrywania

                //inicjalizacja urządzenia do nagrywania
                capture.Initialize();

                using (writer = new WaveWriter("dump.wav", capture.WaveFormat))
                {
                    capture.DataAvailable += (s, e) =>
                    {
                        //save the recorded audio
                        writer.Write(e.Data, e.Offset, e.ByteCount);
                    };
                    //start recording
                    capture.Start();
                    aTimer.Enabled = true;
                    Console.WriteLine("Rozpoczęto nagrywanie.");
                    Console.ReadKey();
                }
            }
        }
        private void button_Click(object sender, RoutedEventArgs e)
        {
            MMDevice dev = (MMDevice)comboBox.SelectedItem;

            if (mmdevicesOut.Contains(dev))
            {
                capture = new WasapiLoopbackCapture();
            }
            else
            {
                capture = new WasapiCapture();
            }
            capture.Device = dev;

            capture.Initialize();

            w = new WasapiOut();

            w.Device = (MMDevice)comboBox_Copy.SelectedItem;

            w.Initialize(new SoundInSource(capture)
            {
                FillWithZeros = true
            });

            capture.Start();
            w.Play();
        }
Esempio n. 9
0
        public Visualization()
        {
            new Thread(() =>
            {
                Stop();

                //open the default device
                _soundIn = new WasapiLoopbackCapture();
                _soundIn.Initialize();

                var soundInSource = new SoundInSource(_soundIn);

                SetupSampleSource(soundInSource.ToSampleSource());

                // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
                byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
                soundInSource.DataAvailable += (s, aEvent) =>
                {
                    int read;
                    while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        ;
                    }
                };

                _soundIn.Start(); //play the audio

                _Timer.Elapsed += new ElapsedEventHandler(GenerateEvent);
                _Timer.Start();
            }).Start();
        }
Esempio n. 10
0
        /// <summary>
        /// Initializes the visualizer and audio capture.
        /// </summary>
        protected override void Initialize()
        {
            IsMouseVisible = true;

            graphics.HardwareModeSwitch = false;

            graphics.PreferredBackBufferWidth = ViewportWidth;
            graphics.PreferredBackBufferHeight = ViewportHeight;
            //graphics.IsFullScreen = true;
            graphics.ApplyChanges();

            viewportAdapter = new BoxingViewportAdapter(Window, GraphicsDevice, ViewportWidth, ViewportHeight);
            Camera = new Camera2D(viewportAdapter);

            soundIn = new WasapiLoopbackCapture();
            //soundIn = new WasapiCapture();
            soundIn.Initialize();

            SoundInSource inSource = new SoundInSource(soundIn);
            ISampleSource sampleSource = inSource.ToSampleSource();

            SetupSampleSource(sampleSource);

            byte[] buffer = new byte[source.WaveFormat.BytesPerSecond / 2];

            inSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = source.Read(buffer, 0, buffer.Length)) > 0) ;
            };

            soundIn.Start();

            base.Initialize();
        }
Esempio n. 11
0
        private void StartCapture()
        {
            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == "Capture")
            {
                _soundIn = new WasapiCapture();
            }

            _soundIn.Device = SelectedDevice;
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter("tmp.wav", _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; // visualization

            _soundIn.Start();
        }
Esempio n. 12
0
            public SoundCapture()
            {
                // This uses the wasapi api to get any sound data played by the computer
                capture = new WasapiLoopbackCapture();

                capture.Initialize();

                // Get our capture as a source
                IWaveSource source = new SoundInSource(capture);


                // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

                // This is the typical size, you can change this for higher detail as needed
                fftSize = FftSize.Fft4096;

                // Actual fft data
                fftBuffer = new float[(int)fftSize];


                // Tells us when data is available to send to our spectrum
                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

                notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

                // We use this to request data so it actualy flows through (figuring this out took forever...)
                finalSource = notificationSource.ToWaveSource();

                capture.DataAvailable += Capture_DataAvailable;
                capture.Start();
            }
Esempio n. 13
0
        public AudioCapture(int sampleRate, int sampleSize)
        {
            this.sampleRate = sampleRate;
            this.sampleSize = sampleSize;

            if (sampleSize <= 0)
            {
                throw new ArgumentException("Sample size must be > 0, instead it is " + sampleSize);
            }

            resSamples = new float[this.sampleSize];
            var ayy = new MMDeviceEnumerator();

            // This uses the wasapi api to get any sound data played by the computer
            capture        = new WasapiCapture(false, AudioClientShareMode.Shared);
            capture.Device = ayy.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Multimedia);
            capture.Initialize();
            capture.DataAvailable += Capture_DataAvailable;

            IWaveSource source = new SoundInSource(capture);


            dataSource             = new PureDataSource(new WaveFormat(sampleRate, 8, 1), source.ToSampleSource());
            dataSource.OnDataRead += DataSource_OnDataRead;

            finalSource = dataSource.ToWaveSource();

            capture.Start();
        }
Esempio n. 14
0
        /// <summary>
        /// Begin the audio input
        /// </summary>
        public static void InitAudioSource(MMDevice device)
        {
            Stop();

            //open default audio device
            m_SoundIn = new WasapiLoopbackCapture();

            m_SoundIn.Device = device;
            m_SoundIn.Initialize();

            var           soundInSource = new SoundInSource(m_SoundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            SetupSampleSource(source);

            byte[] buffer = new byte[m_Source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = m_Source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            m_SoundIn.Start();

            MainWindow.StartTimer();
        }
Esempio n. 15
0
        public void Initialize()
        {
            if (Initialized)
            {
                return;
            }

            // get default device.
            var deviceCapture = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eCapture, Role);
            var deviceRender  = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eRender, Role);

            if (deviceCapture == null || deviceRender == null)
            {
                OnStateChanged?.Invoke(EMicState.InitializeFailed);
                return;
            }

            capture = new WasapiCapture(deviceCapture);                   // Captureデバイスの準備
            render  = new WasapiRender(deviceRender, ShareMode, true, 0); // Renderデバイスの準備

            capture.Initialize();
            render.Initialize(capture.WaveProvider);

            capture.StoppedEvent += OnCaptureStopped;
            render.StoppedEvent  += OnCaptureStopped;

            Debug.WriteLine(string.Format("capture format:{0}", capture.WaveFormat));
            Debug.WriteLine(string.Format("render  format:{0}", render.WaveFormat));

            deviceEnumerator.OnDefaultDeviceChanged += DeviceChanged;

            Initialized = true;
            OnStateChanged?.Invoke(EMicState.Initialized);
        }
        private void button_start_Click(object sender, EventArgs e)
        {
            wavein        = null;
            wavein        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
            wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
            wavein.Initialize();
            wavein.Start();

            source = new SoundInSource(wavein)
            {
                FillWithZeros = true
            };
            //add my special effects in the chain
            efxProcs             = new EfxProcs(source.ToSampleSource().ToMono());
            efxProcs.gain        = linearGain; //keep track of this changing value
            efxProcs.pitchFactor = pitchShift; //keep track of pitch

            waveout        = null;
            waveout        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
            waveout.Device = outputDevices[comboBox_speaker.SelectedIndex];
            waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());//
            waveout.Play();
            //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource());
            timer1.Enabled = true;
        }
Esempio n. 17
0
    void Start()
    {
        fftData = new float[fftSize];

        persistentSamples = new FixedQueue <float> [PersSampleUpperIndex - PersSampleLowerIndex];
        smoothedSamples   = new float[persistentSamples.Length];
        for (int i = 0; i < persistentSamples.Length; i++)
        {
            persistentSamples[i] = new FixedQueue <float>(PersistenSampleLength);
        }

        line        = GetComponent <LineRenderer>();
        leftChannel = new float[TotalSamples];

        capture = new WasapiLoopbackCapture();
        capture.Initialize();
        var soundInSource = new SoundInSource(capture);
        var source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter);

        fft1 = new FftTransform(source.WaveFormat.Channels, fftSize);
        fft2 = new FftProvider(source.WaveFormat.Channels, FftSize.Fft2048);

        stream = new SingleBlockNotificationStream(pitchShifter);
        stream.SingleBlockRead += SingleBlockRead;

        waveSource = stream.ToWaveSource(16);
        buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

        soundInSource.DataAvailable += DataAvailable;

        capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
        capture.Start();
    }
        public void StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                MessageBox.Show(Properties.Strings.MessageBox_NoRecordingDevices);
                Console.WriteLine("No devices found.");
                return;
            }

            soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
            {
                Device = recordingDevice
            };

            soundIn.Initialize();
            soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };
            convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
            convertedSource              = convertedSource.ToStereo();
            soundInSource.DataAvailable += OnDataAvailable;
            soundIn.Start();

            var format = convertedSource.WaveFormat;

            waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
        }
Esempio n. 19
0
        static void Test()
        {
            var soundIn = new WasapiCapture();

            soundIn.Initialize();
            var soundInSource = new RealTimeSoundInSource(soundIn);

            var soundOut = new WasapiOut();

            soundOut.Initialize(soundInSource);
            soundIn.Start();
            soundOut.Play();

            soundOut.Stopped += (s, e) =>
            {
                Console.WriteLine("I'm dead but not dead, P.P.A.P");
                Task.Run(() =>
                {
                    soundOut.Play();
                });
            };

            while (true)
            {
                Console.ReadLine();
            }
        }
Esempio n. 20
0
        protected override void OnInitializeDevice()
        {
            base.OnInitializeDevice();

            _capture = CreateCapture();
            _capture.DataAvailable += OnCaptureDataAvailable;
            _capture.Initialize();
            _capture.Start();
        }
 private void comboBox_mic_SelectedIndexChanged(object sender, EventArgs e)
 {
     if (comboBox_mic.SelectedIndex != 0)
     {
         wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
         wavein.Initialize();
         wavein.Start();
     }
 }
Esempio n. 22
0
        public Class1()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...

            soundIn.Device = new DeviceService().InputDevices().First();

            //initialize the soundIn instance
            soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            IWaveSource convertedSource = soundInSource
                                          .ToStereo()             //2 channels (for example)
                                          .ChangeSampleRate(8000) // 8kHz sample rate
                                          .ToSampleSource()
                                          .ToWaveSource(16);      //16 bit pcm

            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //your logic follows here
                    //for example: stream.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            soundIn.Start();
        }
        static void Main(string[] args)

        {
            MMDevice dev = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            capture        = new WasapiLoopbackCapture();
            capture.Device = dev;
            capture.Initialize();

            SoundInSource soundInSource = new SoundInSource(capture);

            nStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
            final   = nStream.ToWaveSource();
            nStream.SingleBlockRead     += NStream_SingleBlockRead;
            soundInSource.DataAvailable += encode;
            trashBuf = new byte[final.WaveFormat.BytesPerSecond / 2];

            Console.WriteLine($"sample rate:{capture.WaveFormat.SampleRate}");
            Console.WriteLine($"bits per sample:{capture.WaveFormat.BitsPerSample }");
            Console.WriteLine($"channels:{capture.WaveFormat.Channels }");
            Console.WriteLine($"bytes per sample:{capture.WaveFormat.BytesPerSample }");
            Console.WriteLine($"bytes per second:{capture.WaveFormat.BytesPerSecond }");
            Console.WriteLine($"AudioEncoding:{capture.WaveFormat.WaveFormatTag  }");


            EncodingContext context = FrameEncoder.GetDefaultsContext();

            context.Channels        = 6;
            context.SampleRate      = capture.WaveFormat.SampleRate;
            context.AudioCodingMode = AudioCodingMode.Front3Rear2;
            context.HasLfe          = true;
            context.SampleFormat    = A52SampleFormat.Float;
            enc = new FrameEncoderFloat(ref context);

            //_writer = new WaveWriter("test.ac3", final.WaveFormat);


            capture.Start();

            wBuffSrc = new WriteableBufferingSource(new WaveFormat(capture.WaveFormat.SampleRate, capture.WaveFormat.BitsPerSample, capture.WaveFormat.Channels, AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF), (int)capture.WaveFormat.MillisecondsToBytes(20));

            w = new WasapiOut2(false, AudioClientShareMode.Shared, 20);

            w.Device = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active).Where(x => x.FriendlyName.Contains("Digital")).Single();
            AudioClient a = AudioClient.FromMMDevice(w.Device);

            w.Initialize(wBuffSrc);
            w.Play();


            Task.Run(async() => await encoderThread());
            //encodeSinus();

            Console.ReadLine();

            System.Environment.Exit(0);
        }
Esempio n. 24
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Stop();
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();
            try
            {
                MSI = new Lighting(Allowed);
            }
            catch (MSIRGB.Lighting.Exception ex)
            {
                if (ex.GetErrorCode() == Lighting.ErrorCode.DriverLoadFailed)
                {
                    MessageBox.Show("Please run program as administrator.");
                    Close();
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardModelNotSupported)
                {
                    if (MessageBox.Show("Your motherboard not supported but it will work, Application will run at your own risk", "MSI Magic Light", MessageBoxButtons.OKCancel) == DialogResult.OK)
                    {
                        Allowed = true;
                    }
                    else
                    {
                        Close();
                    }
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardVendorNotSupported)
                {
                    MessageBox.Show("Your motherboard not supported at all, We are sorry :(");
                    this.Close();
                }
                else
                {
                    MessageBox.Show("Unknown Error, Please report it on github");
                    this.Close();
                }
            }
        }
Esempio n. 25
0
        public static async Task RecordSample()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...
            soundIn.Device = new DeviceService().InputDevices().First();
            soundIn.Initialize();

            var waveWriter = new WaveWriter(@"C:\Users\Cedric Lampron\Desktop\Test Record\dump.wav", soundIn.WaveFormat);;

            await Task.Run(() =>
            {
                //create a SoundSource around the the soundIn instance
                //this SoundSource will provide data, captured by the soundIn instance
                var soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the
                //soundInSource to any other format
                //in this case the "Fluent"-extension methods are being used
                IWaveSource convertedSource = soundInSource
                                              .ToStereo()             //2 channels (for example)
                                              .ChangeSampleRate(8000) // 8kHz sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(16);      //16 bit pcm

                //register an event handler for the DataAvailable event of
                //the soundInSource
                //Important: use the DataAvailable of the SoundInSource
                //If you use the DataAvailable event of the ISoundIn itself
                //the data recorded by that event might won't be available at the
                //soundInSource yet
                soundInSource.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };

                //we've set everything we need -> start capturing data
                soundIn.Start();
            });

            await Task.Delay(5000);

            soundIn.Stop();
            waveWriter.Dispose();
            waveWriter = null;
            soundIn.Dispose();
            soundIn = null;
        }
Esempio n. 26
0
        private void StartRec(string path)
        {
            form2.Opacity = .1;

            //form2.Hide();

            if (_isRecording == false)
            {
                this.SetScreenArea();

                this.SetVisible(true);

                this._frameCount        = 0;
                this.fps                = 10;
                this.tb_SaveFolder.Text = path;
                fullName                = string.Format(@"{0}\{1}", path, DateTime.Now.ToString("MM_dd_HH_mm_ss"));
                if (checkBox2.Checked)
                {
                    fullName += "_News";
                }
                //string fullName2 = string.Format(@"{0}\{1}_{2}.wav", path, Environment.UserName.ToUpper(), DateTime.Now.ToString("d_MMM_yyyy_HH_mm_ssff"));
                // Save File option
                _writer.Open(
                    fullName + ".avi",
                    this._width,
                    this._height,
                    fps,
                    (VideoCodec)cb_VideoCodec.SelectedValue,
                    (int)(BitRate)this.cb_BitRate.SelectedValue);

                // Start main work
                this.StartRecord();
                this._isRecording = true;
                if (checkBox1.Checked == true)
                {
                    //start audio

                    //wri = new LameMP3FileWriter(@"C:\Temp\test_output.mp3", waveIn.WaveFormat, 32);
                    // wri = new LameMP3FileWriter(fullName + ".wav", waveIn.WaveFormat, 32);

                    capture = new CSCore.SoundIn.WasapiLoopbackCapture();
                    capture.Initialize();
                    w = new WaveWriter(fullName + ".wav", capture.WaveFormat);
                    //setup an eventhandler to receive the recorded data
                    capture.DataAvailable += (s, capData) =>
                    {
                        //save the recorded audio
                        w.Write(capData.Data, capData.Offset, capData.ByteCount);
                    };
                    //start recording
                    capture.Start();
                }
            }
        }
        /// <summary>
        /// Start recording on the device in the parameter.
        /// </summary>
        /// <param name="recordingDevice">the device to start recording</param>
        /// <returns>true if the recording is started, or false</returns>
        public bool StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                logger.Log(Properties.Strings.MessageBox_NoRecordingDevices);
                return(false);
            }

            try
            {
                soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
                {
                    Device = recordingDevice
                };

                soundIn.Initialize();
                soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };
                convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
                convertedSource              = convertedSource.ToStereo();
                soundInSource.DataAvailable += OnDataAvailable;
                soundIn.Stopped             += OnRecordingStopped;
                soundIn.Start();

                var format = convertedSource.WaveFormat;
                waveFormat     = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                isRecording    = true;
                bufferCaptured = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };
                bufferSend = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };

                eventThread = new Thread(EventThread)
                {
                    Name         = "Loopback Event Thread",
                    IsBackground = true
                };
                eventThread.Start(new WeakReference <LoopbackRecorder>(this));

                return(true);
            }
            catch (Exception ex)
            {
                logger.Log(ex, "Error initializing the recording device:");
            }

            return(false);
        }
Esempio n. 28
0
 private void startRecording(string name)
 {
     capture = new WasapiLoopbackCapture();
     capture.Initialize();
     //var writer = MediaFoundationEncoder.CreateMP3Encoder(capture.WaveFormat, name + ".mp3", 320000);
     writer = new WaveWriter("./recorded/" + name + ".wav", capture.WaveFormat);
     capture.DataAvailable += (s, capData) =>
     {
         writer.Write(capData.Data, capData.Offset, capData.ByteCount);
     };
     capture.Start();
 }
Esempio n. 29
0
    void Awake()
    {
        barData = new float[numBars];
        // This uses the wasapi api to get any sound data played by the computer
        switch (audioType)
        {
        case AudioSourceType.Microphone: capture = new WasapiCapture();
            break;

        case AudioSourceType.Speakers: capture = new WasapiLoopbackCapture();
            break;
        }

        capture.Initialize();

        // Get our capture as a source
        IWaveSource source = new SoundInSource(capture);


        // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

        // This is the typical size, you can change this for higher detail as needed
        fftSize = FftSize.Fft4096;

        // Actual fft data
        fftBuffer = new float[(int)fftSize];

        // These are the actual classes that give you spectrum data
        // The specific vars of lineSpectrum are changed below in the editor so most of these aren't that important here
        spectrumProvider = new BasicSpectrumProvider(capture.WaveFormat.Channels,
                                                     capture.WaveFormat.SampleRate, fftSize);

        lineSpectrum = new LineSpectrum(fftSize)
        {
            SpectrumProvider = spectrumProvider,
            UseAverage       = isAverage,
            BarCount         = numBars,
            BarSpacing       = 2,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        // Tells us when data is available to send to our spectrum
        var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        // We use this to request data so it actualy flows through (figuring this out took forever...)
        finalSource = notificationSource.ToWaveSource();

        capture.DataAvailable += Capture_DataAvailable;
        capture.Start();
    }
Esempio n. 30
0
        public static void StartRecording(String fileName, int bitRate = 192000)
        {
            capture = new WasapiLoopbackCapture();

            capture.Initialize();

            wasapiCaptureSource = new SoundInSource(capture);
            stereoSource        = wasapiCaptureSource.ToStereo();

            switch (System.IO.Path.GetExtension(fileName))
            {
            case ".mp3":
                encoderWriter = MediaFoundationEncoder.CreateMP3Encoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wma":
                encoderWriter = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".aac":
                encoderWriter = MediaFoundationEncoder.CreateAACEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wav":
                waveWriter = new WaveWriter(fileName, capture.WaveFormat);
                writerType = WriterType.WaveWriter;
                break;
            }

            switch (writerType)
            {
            case WriterType.EncoderWriter:
                capture.DataAvailable += (s, e) =>
                {
                    encoderWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;

            case WriterType.WaveWriter:
                capture.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;
            }

            // Start recording
            capture.Start();
        }