Audio Capture using Wasapi See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx
Наследование: IWaveIn
 /// <summary>
 /// Creates a new stereo waveprovider based on a quad input
 /// </summary>
 /// <param name="sourceProvider">Quad 32 bit PCM input</param>
 public QuadToStereoStream32(WasapiCapture sourceProvider)
 {
     //if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
     //{
     //    throw new ArgumentException("Source must be PCM");
     //}
     if (sourceProvider.WaveFormat.Channels != 4)
     {
         throw new ArgumentException("Source must be quad");
     }
     if (sourceProvider.WaveFormat.BitsPerSample != 32)
     {
         throw new ArgumentException("Source must be 32 bit");
     }
     this.FSource = sourceProvider;
     FOutputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 32, 2);
 }
Пример #2
0
 public void CanCaptureDefaultDeviceInDefaultFormatUsingWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
     }
 }
Пример #3
0
        private WasapiCapture GetCaptureDevice()
        {
            var device  = lineInBox.SelectedItem as MMDevice;
            var capture = new NAudio.CoreAudioApi.WasapiCapture(device);

            capture.ShareMode  = AudioClientShareMode.Shared;
            capture.WaveFormat = new WaveFormat();
            return(capture);
        }
    public RealTimeSoundData() {

      var enumerator = new MMDeviceEnumerator();
      var captureDevices = enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active).ToArray();
      var defaultDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console);
      device = captureDevices.FirstOrDefault(c => c.ID == defaultDevice.ID);
      capture = new WasapiCapture(device);
      context = SynchronizationContext.Current;
      capture.DataAvailable += Capture_DataAvailable;
    }
Пример #5
0
 public void Start()
 {
     if (_isStarting) return;
     _isStarting = true;
     ResetSampleAggregator();
     _waveIn = new WasapiCapture(Device);
     _waveIn.DataAvailable +=OnDataAvailable;
     //_waveIn.RecordingStopped += new EventHandler<StoppedEventArgs>(OnRecordingStopped)
     _waveIn.StartRecording();
 }
Пример #6
0
        public AudioRecorder(ISettings settings, MMDevice device, Song song)
        {
            this._settings = settings;

            this.Song = song;

            this._capture = new WasapiCapture(device);
            this._capture.DataAvailable += this.CaptureOnDataAvailable;

            this._fileName = Path.GetTempFileName();
            this._writer = new WaveFileWriter(this._fileName, this._capture.WaveFormat);
        }
Пример #7
0
        /// <summary>
        /// Creates a new microphone.
        /// </summary>
        public Microphone()
        {
            _measurements = new List<Byte[]>();

            try
            {
                _wasapiCapture = new WasapiCapture();
                _wasapiCapture.DataAvailable += OnDataAvailable;
            }
            catch
            { }

            _measuring = false;
        }
 private void Record()
 {
     try
     {
         capture = new WasapiCapture(SelectedDevice);
         capture.ShareMode = ShareModeIndex == 0 ? AudioClientShareMode.Shared : AudioClientShareMode.Exclusive;
         capture.WaveFormat =
             SampleTypeIndex == 0 ? WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channelCount) :
             new WaveFormat(sampleRate, bitDepth, channelCount);
         currentFileName = String.Format("NAudioDemo {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now);
         RecordLevel = SelectedDevice.AudioEndpointVolume.MasterVolumeLevelScalar;
         capture.StartRecording();
         capture.RecordingStopped += OnRecordingStopped;
         capture.DataAvailable += CaptureOnDataAvailable;
         RecordCommand.IsEnabled = false;
         StopCommand.IsEnabled = true;
         Message = "Recording...";
     }
     catch (Exception e)
     {
         MessageBox.Show(e.Message);
     }
 }
Пример #9
0
        static void WaveInExample()
        {
            var waveIn = new WasapiCapture();
            waveIn.WaveFormat = new WaveFormat(44100, 8, 2);

            Console.WriteLine("Device format: " + waveIn.WaveFormat.ToString());
            FDecoder = new LTCSharp.Decoder(waveIn.WaveFormat.SampleRate, 25, 32);
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.StartRecording();

            Stopwatch timer = new Stopwatch();
            timer.Start();
            while (true) //timer.Elapsed < new TimeSpan(0, 0, 60))
            {
                lock (FDecoder)
                {
                    if (FDecoder.GetQueueLength() > 0)
                    {
                        try
                        {
                            var frame = FDecoder.Read();
                            var timecode = frame.getTimecode();
                            Console.WriteLine(timecode.ToString());
                        }
                        catch (Exception e)
                        {
                            Console.Write(e);
                        }
                    }
                    else
                    {
                        Thread.Sleep(10);
                    }
                }
            }
            timer.Stop();
        }
Пример #10
0
 public WasapiCapture() : this(WasapiCapture.GetDefaultCaptureDevice())
 {
 }
Пример #11
0
        /// <summary>
        /// Adds a device to the monitoring list
        /// </summary>
        /// <param name="device">Device to add</param>
        public void Add(MMDevice device)
        {
            if (Devices.Contains(device)) return;

            if (device.DataFlow == DataFlow.Capture)
            {
                WasapiCapture deviceCapture = new WasapiCapture(device);
                deviceCapture.StartRecording();
                deviceCaptures.Add(device, deviceCapture);
            }

            timer.Enabled = true;

            Devices.Add(device);
        }
Пример #12
0
 private void GetDefaultRecordingFormat(MMDevice value)
 {
     using (var c = new WasapiCapture(value))
     {
         SampleTypeIndex = c.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat ? 0 : 1;
         SampleRate = c.WaveFormat.SampleRate;
         BitDepth = c.WaveFormat.BitsPerSample;
         ChannelCount = c.WaveFormat.Channels;
         Message = "";
     }
 }
Пример #13
0
        /*
         * Recupere les devices d'entrer micro
         */
        private void CheckDevice()
        {
            DevicesListBox.ItemsSource = CaptureDevices;

            using (var c = new WasapiCapture((MMDevice)CaptureDevices.First()))
            {
                SampleTypeIndex = c.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat ? 0 : 1;
                SampleRate = c.WaveFormat.SampleRate;
                BitDepth = c.WaveFormat.BitsPerSample;
                ChannelCount = c.WaveFormat.Channels;
            }
        }
Пример #14
0
        private void StartRecording(bool shouldTryUseExclusive = true)
        {
            if (waveIn != null)
                StopRecording();
            if (SelectedInputSource != null)
            {
                this.codec = SelectedCodec.Codec;

                var deviceFormat = WaveFormat.CreateIeeeFloatWaveFormat(codec.RecordFormat.SampleRate, codec.RecordFormat.Channels);
                bool canUseExclusive = false;

                if (SelectedInputSource.Provider == DeviceProvider.Wasapi)
                {
                    var mmdevice = SelectedInputSource.MMDevice;

                    WaveFormatExtensible bestMatch;
                    canUseExclusive = mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, deviceFormat, out bestMatch);
                    if (canUseExclusive && shouldTryUseExclusive)
                    {
                        if (bestMatch != null)
                            deviceFormat = bestMatch;
                    }
                    else
                    {
                        mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, deviceFormat, out bestMatch);
                        if (bestMatch != null)
                            deviceFormat = bestMatch;
                    }

                    if (deviceFormat.Encoding != WaveFormatEncoding.IeeeFloat && deviceFormat.BitsPerSample != 16)
                    {
                        deviceFormat = mmdevice.AudioClient.MixFormat;

                        if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, codec.RecordFormat))
                        {
                            canUseExclusive = true;
                            deviceFormat = codec.RecordFormat;
                        }
                        else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, codec.RecordFormat))
                        {
                            canUseExclusive = false;
                            deviceFormat = codec.RecordFormat;
                        }
                        else
                        {
                            WaveFormat newFormat;
                            WaveFormat altWaveFormat = new WaveFormat(deviceFormat.SampleRate, 16, deviceFormat.Channels);
                            WaveFormat altFloatFormat = WaveFormat.CreateIeeeFloatWaveFormat(mmdevice.AudioClient.MixFormat.SampleRate, mmdevice.AudioClient.MixFormat.Channels);

                            if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, altFloatFormat))
                            {
                                canUseExclusive = true;
                                newFormat = altFloatFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Exclusive, altWaveFormat))
                            {
                                canUseExclusive = true;
                                newFormat = altWaveFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, altFloatFormat))
                            {
                                canUseExclusive = false;
                                newFormat = altFloatFormat;
                            }
                            else if (mmdevice.AudioClient.IsFormatSupported(AudioClientShareMode.Shared, altWaveFormat))
                            {
                                canUseExclusive = false;
                                newFormat = altWaveFormat;
                            }
                            else
                                throw new Exception("Device does not support 16bit PCM, or device is in use");

                            deviceFormat = newFormat;

                            Console.WriteLine("Initializing Wasapi\n  Device: {0}\n  Format: {1}\n  Mode: {2}\n  Resampling: {3}",
                                mmdevice.FriendlyName,
                                deviceFormat,
                                canUseExclusive ? "Exclusive" : "Shared",
                                deviceFormat.Equals(codec.RecordFormat) ? "NO" : "YES");
                        }
                    }

                    AudioClientShareMode shareMode;
                    if (canUseExclusive && shouldTryUseExclusive)
                        shareMode = AudioClientShareMode.Exclusive;
                    else
                        shareMode = AudioClientShareMode.Shared;

                    Guid audioSessionGuid = Guid.NewGuid();
                    try
                    {
                        mmdevice.AudioClient.Reset();
                    }
                    catch { }

                    BufferTargetMs = Math.Max(BufferTargetMs, mmdevice.MinBufferDurationMs);
                    var w = new WasapiCapture(mmdevice, BufferTargetMs);
                    w.RecordingStopped += wasapi_RecordingStopped;
                    waveIn = w;
                    waveIn.WaveFormat = deviceFormat;
                    w.ShareMode = shareMode;

                    LevelManager = new AudioInLevelManager(w, mmdevice);
                }
                else
                {
                    Console.WriteLine("Initializing WaveIn{0}. Buffer:{1}ms Device:{2} Format:{3}", UseWaveEvent ? "Event" : "", BufferTargetMs, SelectedInputSource.WavDeviceNumber, deviceFormat);
                    if (UseWaveEvent)
                    {
                        var w = new WaveInEvent();
                        w.BufferMilliseconds = BufferTargetMs;
                        w.DeviceNumber = SelectedInputSource.WavDeviceNumber;
                        LevelManager = new AudioInLevelManager(w);
                        waveIn = w;
                    }
                    else
                    {
                        var w = new WaveIn();
                        w.BufferMilliseconds = BufferTargetMs;
                        w.DeviceNumber = SelectedInputSource.WavDeviceNumber;
                        LevelManager = new AudioInLevelManager(w);
                        waveIn = w;
                    }
                    waveIn.WaveFormat = deviceFormat;
                    canUseExclusive = false;
                }

                waveIn.DataAvailable += waveIn_DataAvailable;
                waveIn.RecordingStopped += waveIn_RecordingStopped;

                try
                {
                    waveIn.StartRecording();
                    ControlsEnabled = false;
                }
                catch (NAudio.MmException ex)
                {
                    Console.WriteLine("Audio Error: Couldn't open recording device\n{0}", ex.Message);
                    waveIn = null;
                    IsRecording = false;
                }
                catch (ArgumentException ex)
                {
                    Console.WriteLine("Couldn't start recording: {0}", ex.Message);
                    IsRecording = false;
                    return;
                }
                catch (Exception ex)
                {
                    Console.WriteLine("Couldn't start recording: {0}", ex);
                    IsRecording = false;
                    return;
                }
            }
            else
                IsRecording = false;
        }
Пример #15
0
        public static void Record()
        {
            Init();
            _filename = Core.UserDir + "\\" + Path.GetRandomFileName().Replace(".", "") + ".wav";

            if (_recorddevice.DataFlow == DataFlow.Render)
            {
                cap = new WasapiLoopbackCapture(_recorddevice);
                writer = new WaveFileWriter(_filename, cap.WaveFormat);
                cap.RecordingStopped += new EventHandler<StoppedEventArgs>(cap_RecordingStopped);
                cap.StartRecording();
                cap.DataAvailable += new EventHandler<WaveInEventArgs>(cap_DataAvailable);
                _running = true;
            }
            else
            {
                cap = new WasapiCapture(_recorddevice);
                writer = new WaveFileWriter(_filename, cap.WaveFormat);
                cap.RecordingStopped += new EventHandler<StoppedEventArgs>(cap_RecordingStopped);
                cap.StartRecording();
                cap.DataAvailable += new EventHandler<WaveInEventArgs>(cap_DataAvailable);
                _running = true;
               }
        }
Пример #16
0
 public static string Stop()
 {
     Out.WriteLine("SoundCapture stopped, aborting and flushing...");
     cap.StopRecording();
     cap.Dispose();
     cap = null;
     writer.Close();
     writer = null;
     _running = false;
     return _filename;
 }
Пример #17
0
 private IWaveIn CreateWaveInDevice()
 {
     IWaveIn newWaveIn;
     if (radioButtonWaveIn.Checked)
     {
         newWaveIn = new WaveIn();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWaveInEvent.Checked)
     {
         newWaveIn = new WaveInEvent();
         newWaveIn.WaveFormat = new WaveFormat(8000, 1);
     }
     else if (radioButtonWasapi.Checked)
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         var device = (MMDevice) comboWasapiDevices.SelectedItem;
         newWaveIn = new WasapiCapture(device);
     }
     else
     {
         // can't set WaveFormat as WASAPI doesn't support SRC
         newWaveIn = new WasapiLoopbackCapture();
     }
     newWaveIn.DataAvailable += OnDataAvailable;
     newWaveIn.RecordingStopped += OnRecordingStopped;
     return newWaveIn;
 }
Пример #18
0
 /// <summary>
 /// Returns a list of all the currently available devices
 /// </summary>
 public static List<Device> GetDevices()
 {
     // Create the list to return
     List<Device> devices = new List<Device>();
     // Create the device enumerator
     MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
     // Enumerate all the plugged in input devices and add them to the list
     foreach (MMDevice endPoint in enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active))
     {
         IWaveIn captureDevice = new WasapiCapture(endPoint);
         Device device = new Device(endPoint.FriendlyName, endPoint.ID, captureDevice.WaveFormat.SampleRate, captureDevice.WaveFormat.Channels);
         devices.Add(device);
     }
     // Enumerate all the output loopback devices and add them to the list
     foreach (MMDevice endPoint in enumerator.EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active))
     {
         IWaveIn captureDevice = new WasapiCapture(endPoint);
         Device device = new Device("Loopback: " + endPoint.FriendlyName, endPoint.ID, captureDevice.WaveFormat.SampleRate, captureDevice.WaveFormat.Channels);
         devices.Add(device);
     }
     // Return the list
     return devices;
 }
Пример #19
0
 public void CanReuseWasapiCapture()
 {
     using (var wasapiClient = new WasapiCapture())
     {
         wasapiClient.StartRecording();
         Thread.Sleep(1000);
         wasapiClient.StopRecording();
         Thread.Sleep(1000);
         wasapiClient.StartRecording();
     }
 } 
Пример #20
0
        private void setDevice(MMDevice dev)
        {
            if (FIsRecording)
                return;

            FDevice = dev;
            closeDevice();

            if (FPinInDevice[0] == null)
            {
                FPinOutStatus[0] = "No device selected";
                return;
            }

            FWaveIn = new WasapiCapture(FPinInDevice[0]);

            //if (FWaveIn.WaveFormat.Channels > 2)
            //{
            //    FWaveQuadToStereo = new QuadToStereoStream32(FWaveIn);
            //    FDownsampleQuad = true;
            //}

            FWaveIn.StartRecording();
            handleData = new EventHandler<WaveInEventArgs>(waveInStream_DataAvailable);
            FWaveIn.DataAvailable += handleData;

            FPinOutStatus[0] = "Device opened";
        }
Пример #21
0
            public DecodeInstance(MMDevice device, uint channels, uint channel, double framerate)
            {
                if (device == null)
                    throw (new Exception("No device selected"));

                FChannel = (int) channel;
                FCapture = new WasapiCapture(device);
                FCapture.WaveFormat = new WaveFormat(44100, 8, (int) channels);
                channels = (uint) FCapture.WaveFormat.Channels;

                if (channel >= channels)
                {
                    throw (new Exception("Capture channel index out of range"));
                }

                FDecoder = new LTCSharp.Decoder(FCapture.WaveFormat.SampleRate, (int) framerate, 32);

                FCapture.DataAvailable += FCapture_DataAvailable;
                FCapture.StartRecording();
            }
Пример #22
0
 public AudioInLevelManager(WasapiCapture waspicapture, MMDevice mmdevice)
     : base(mmdevice, DeviceType.In)
 {
     LoadSettings();
     this.wasapiInDevice = waspicapture;
 }
Пример #23
0
 void OnRecordingStopped(object sender, StoppedEventArgs e)
 {
     writer.Dispose();
     writer = null;
     RecordingsViewModel.Recordings.Add(currentFileName);
     RecordingsViewModel.SelectedRecording = currentFileName;
     if (e.Exception == null)
         Message = "Recording Stopped";
     else
         Message = "Recording Error: " + e.Exception.Message;
     capture.Dispose();
     capture = null;
     RecordCommand.IsEnabled = true;
     StopCommand.IsEnabled = false;
 }
Пример #24
0
 private void closeDevice()
 {
     if (FWaveIn != null)
     {
         FWaveIn.StopRecording();
         FWaveIn.Dispose();
         FWaveIn = null;
     }
 }