コード例 #1
0
ファイル: AudioCapture.cs プロジェクト: romanchom/Luna
        public AudioCapture(int samplingRate, int channelCount, int sampleCount)
        {
            this.channelCount = channelCount;

            channels = new CircularBuffer <float> [channelCount];
            for (int c = 0; c < channelCount; ++c)
            {
                channels[c] = new CircularBuffer <float>(sampleCount);
            }

            var enumerator    = new MMDeviceEnumerator();
            var captureDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            audioClient = captureDevice.AudioClient;

            long requestedDuration = (long)((double)sampleCount * 20000000 / samplingRate);               // number of 100ns intervals

            audioClient.Initialize(AudioClientShareMode.Shared,
                                   AudioClientStreamFlags.Loopback,
                                   requestedDuration,
                                   0,
                                   WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channelCount),
                                   Guid.Empty);

            audioCapClient = audioClient.AudioCaptureClient;
            audioClient.Start();
        }
コード例 #2
0
        private void InitializeInternal()
        {
            var defaultFormat = _waveFormat;

            _audioClient = AudioClient.FromMMDevice(Device);

            /*if (_shareMode == AudioClientShareMode.Exclusive)
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }
             * else
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }*/
            _waveFormat = _waveFormat ?? _audioClient.MixFormat;

            _waveFormat = SetupWaveFormat(_waveFormat, _audioClient);

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None | GetStreamFlags(), _latency * ReftimesPerMillisecond, 0, _waveFormat, Guid.Empty);
            }
            else
            {
                if (_shareMode == AudioClientShareMode.Exclusive)
                {
                    try
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), _latency * ReftimesPerMillisecond, _latency * ReftimesPerMillisecond, _waveFormat, Guid.Empty);
                    }
                    catch (CoreAudioAPIException e)
                    {
                        if (e.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                        {
                            int bufferSize = _audioClient.BufferSize;
                            _audioClient.Dispose();
                            long hnsRequestedDuration = (long)(((double)ReftimesPerMillisecond * 1000 / _waveFormat.SampleRate * bufferSize) + 0.5);
                            _audioClient = AudioClient.FromMMDevice(Device);
                            if (defaultFormat == null)
                            {
                                _waveFormat = _audioClient.MixFormat;
                            }
                            _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), hnsRequestedDuration, hnsRequestedDuration, _waveFormat, Guid.Empty);
                        }
                    }
                }
                else
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), 0, 0, _waveFormat, Guid.Empty);
                    _latency = (int)(_audioClient.StreamLatency / ReftimesPerMillisecond);
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _audioCaptureClient = AudioCaptureClient.FromAudioClient(_audioClient);
        }
コード例 #3
0
 public void CanCreateCaptureClientExclusive()
 {
     using (var audioClient = Utils.CreateDefaultCaptureClient())
     {
         audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.None, 1000, 0, audioClient.MixFormat, Guid.Empty);
         using (var captureClient = AudioCaptureClient.FromAudioClient(audioClient))
         {
             Assert.IsNotNull(captureClient);
         }
     }
 }
コード例 #4
0
        private void selectDeviceImpl(string devId)
        {
            releaseDevice();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
            }
            _capDeviceId = _capDevice.Id;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            if (_capDevice.DataFlow == EDataFlow.eRender)
            {
                streamFlags = AudioClientStreamFlags.Loopback |
                              AudioClientStreamFlags.EventCallback; // 実際は発生してくれない
            }
            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;
                _pitchAnalyzer.SampleFrequency = (double)(_capFormat.nSamplesPerSec);

                // 初期化
                _audioClient.Initialize(AudioClientShareMode.Shared,
                                        streamFlags, 300 /*ms*/ * 10000, 0, _capFormat, Guid.Empty);
                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                DeviceSelectedEventHandler del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
#warning エラー処理
                _audioClient = null;
                _capClient   = null;
            }
        }
コード例 #5
0
        private void StartRecording()
        {
            if (IsRunning)
            {
                return;
            }

            InitializeCaptureDevice();

            Debug.WriteLine("[capture]Task starting...");
            this.stop   = false;
            captureTask = Task.Run(() => {
                Exception exception = null;
                var client          = audioClient;

                try
                {
                    int bufferFrameCount = client.BufferSize;
                    // Calculate the actual duration of the allocated buffer.
                    long actualDuration = (long)((double)REFTIMES_PER_SEC *
                                                 bufferFrameCount / WaveFormat.SampleRate);
                    int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);

                    Debug.WriteLine(string.Format("num Buffer Frames: {0}", client.BufferSize));
                    Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));


                    AudioCaptureClient capture = client.AudioCaptureClient;
                    client.Start();

                    while (!this.stop)
                    {
                        //Task.Delay(sleepMilliseconds);        // 待機
                        frameEventWaitHandle.WaitOne(sleepMilliseconds);    // 待機
                        ReadNextPacket(capture);
                    }
                }
                catch (Exception e)
                {
                    exception = e;
                }
                finally
                {
                    client.Stop();
                    client.Reset();
                    Debug.WriteLine("[capture]Task stop detected.");
                    RaiseRecordingStopped(exception);
                }
            });
            Debug.WriteLine("[capture]Task started");
        }
コード例 #6
0
        private void ReadNextPacket(AudioCaptureClient capture)
        {
            IntPtr buffer;
            int    framesAvailable;
            AudioClientBufferFlags flags;
            int packetSize         = capture.GetNextPacketSize();
            int recordBufferOffset = 0;

            //Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4));

            while (packetSize != 0)
            {
                buffer = capture.GetBuffer(out framesAvailable, out flags);

                int bytesAvailable = framesAvailable * bytesPerFrame;

                // apparently it is sometimes possible to read more frames than we were expecting?
                // fix suggested by Michael Feld:
                int spaceRemaining = Math.Max(0, recordBuffer.Length - recordBufferOffset);
                if (spaceRemaining < bytesAvailable && recordBufferOffset > 0)
                {
                    if (DataAvailable != null)
                    {
                        DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
                    }
                    recordBufferOffset = 0;
                }

                // if not silence...
                if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent)
                {
                    Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable);
                }
                else
                {
                    Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable);
                }
                recordBufferOffset += bytesAvailable;
                capture.ReleaseBuffer(framesAvailable);
                packetSize = capture.GetNextPacketSize();
            }
            if (DataAvailable != null)
            {
                DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
            }
        }
コード例 #7
0
        private void DoRecording(AudioClient client)
        {
            Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize));
            int bufferFrameCount = client.BufferSize;

            // Calculate the actual duration of the allocated buffer.
            long actualDuration = (long)((double)REFTIMES_PER_SEC *
                                         bufferFrameCount / waveFormat.SampleRate);
            int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);
            int waitMilliseconds  = (int)(3 * actualDuration / REFTIMES_PER_MILLISEC);

            AudioCaptureClient capture = client.AudioCaptureClient;

            client.Start();

            if (isUsingEventSync)
            {
                Debug.WriteLine(string.Format("wait: {0} ms", waitMilliseconds));
            }
            else
            {
                Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));
            }

            while (!this.requestStop)
            {
                bool readBuffer = true;
                if (isUsingEventSync)
                {
                    readBuffer = frameEventWaitHandle.WaitOne(waitMilliseconds, false);
                }
                else
                {
                    Thread.Sleep(sleepMilliseconds);
                }

                // If still playing and notification is ok
                if (!this.requestStop && readBuffer)
                {
                    ReadNextPacket(capture);
                }
            }
        }
コード例 #8
0
ファイル: WasapiCapture.cs プロジェクト: aljordan/NAJAudio
        private void DoRecording(AudioClient client)
        {
            Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize));
            int bufferFrameCount = client.BufferSize;

            // Calculate the actual duration of the allocated buffer.
            long actualDuration = (long)((double)REFTIMES_PER_SEC *
                                         bufferFrameCount / WaveFormat.SampleRate);
            int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);

            AudioCaptureClient capture = client.AudioCaptureClient;

            client.Start();
            Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));
            while (!this.requestStop)
            {
                Thread.Sleep(sleepMilliseconds);
                ReadNextPacket(capture);
            }
        }
コード例 #9
0
ファイル: WasapiCapture.cs プロジェクト: zhang19/cscore
        private void UninitializeAudioClients()
        {
            if (_audioClient != null)
            {
                _audioClient.Dispose();
                _audioClient = null;
            }
            if (_audioCaptureClient != null)
            {
                _audioCaptureClient.Dispose();
                _audioCaptureClient = null;
            }
            if (_eventWaitHandle != null)
            {
                _eventWaitHandle.Close();
                _eventWaitHandle = null;
            }

            _isInitialized = false;
        }
コード例 #10
0
ファイル: WasapiCapture.cs プロジェクト: zhang19/cscore
        private void ReadData(byte[] buffer, AudioCaptureClient captureClient, uint frameSize)
        {
            int nextPacketSize = captureClient.GetNextPacketSize();
            int read           = 0;
            int offset         = 0;

            while (nextPacketSize != 0)
            {
                int framesAvailable;
                AudioClientBufferFlags flags;

                IntPtr nativeBuffer = captureClient.GetBuffer(out framesAvailable, out flags);

                int bytesAvailable = (int)(framesAvailable * frameSize);
                int bytesToCopy    = Math.Min(bytesAvailable, buffer.Length);

                if (Math.Max(buffer.Length - read, 0) < bytesAvailable && read > 0)
                {
                    RaiseDataAvailable(buffer, 0, read);
                    read = offset = 0;
                }

                if ((flags & AudioClientBufferFlags.Silent) == AudioClientBufferFlags.Silent)
                {
                    Array.Clear(buffer, offset, bytesToCopy);
                }
                else
                {
                    Marshal.Copy(nativeBuffer, buffer, offset, bytesToCopy);
                }

                read   += bytesToCopy;
                offset += bytesToCopy;

                captureClient.ReleaseBuffer(framesAvailable);
                nextPacketSize = captureClient.GetNextPacketSize();
            }

            RaiseDataAvailable(buffer, 0, read);
        }
コード例 #11
0
 private void releaseDeviceImpl()
 {
     if (_capDevice != null)
     {
         if (_capturing)
         {
             stopCaptureImpl();
         }
         _capDevice.Dispose();
     }
     _capDevice = null;
     if (_capClient != null)
     {
         _capClient.Dispose();
     }
     _capClient = null;
     if (_audioClient != null)
     {
         _audioClient.Dispose();
     }
     _audioClient = null;
 }
コード例 #12
0
ファイル: WasapiCaptureRT.cs プロジェクト: zhouyong69/NAudio
        private void DoRecording()
        {
            Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize);

            var buf = new Byte[audioClient.BufferSize * bytesPerFrame];

            int bufLength     = 0;
            int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms

            try
            {
                AudioCaptureClient capture = audioClient.AudioCaptureClient;
                audioClient.Start();

                int packetSize = capture.GetNextPacketSize();

                while (captureState == WasapiCaptureState.Recording)
                {
                    IntPtr pData                   = IntPtr.Zero;
                    int    numFramesToRead         = 0;
                    AudioClientBufferFlags dwFlags = 0;

                    if (packetSize == 0)
                    {
                        if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0)
                        {
                            throw new Exception("Capture event timeout");
                        }
                    }

                    pData = capture.GetBuffer(out numFramesToRead, out dwFlags);

                    if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
                    {
                        pData = IntPtr.Zero;
                    }

                    if (numFramesToRead == 0)
                    {
                        continue;
                    }

                    int capturedBytes = numFramesToRead * bytesPerFrame;

                    if (pData == IntPtr.Zero)
                    {
                        Array.Clear(buf, bufLength, capturedBytes);
                    }
                    else
                    {
                        Marshal.Copy(pData, buf, bufLength, capturedBytes);
                    }

                    bufLength += capturedBytes;

                    capture.ReleaseBuffer(numFramesToRead);

                    if (bufLength >= minPacketSize)
                    {
                        if (DataAvailable != null)
                        {
                            DataAvailable(this, new WaveInEventArgs(buf, bufLength));
                        }
                        bufLength = 0;
                    }

                    packetSize = capture.GetNextPacketSize();
                }
            }
            catch (Exception ex)
            {
                RaiseRecordingStopped(ex);
                Debug.WriteLine("stop wasapi");
            }
            finally
            {
                RaiseRecordingStopped(null);

                audioClient.Stop();
            }
            Debug.WriteLine("stop wasapi");
        }
コード例 #13
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
                _audioClient = null;
                _capClient   = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
            case AudioClientShareMode.Shared:
                switch (_capDevice.DataFlow)
                {
                case EDataFlow.eCapture:
                    streamFlags = 0;
                    break;

                case EDataFlow.eRender:
                    streamFlags = AudioClientStreamFlags.Loopback;
                    break;
                }
                break;

            case AudioClientShareMode.Exclusive:
                streamFlags = AudioClientStreamFlags.NoPersist;
                break;
            }

            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            // ボリューム
            _masterVolume   = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode,
                                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                                                streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
コード例 #14
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient   = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode   shareMode   = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode, streamFlags,
                                            tmp2, tmp2,
                                            _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                        case AudioClientError.BufferSizeNotAligned:
                            uint bufSize = _audioClient.BufferSize;
                            tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                            _audioClient.Initialize(shareMode,
                                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                            break;

                        case AudioClientError.UnsupportedFormat:

                            break;
                        }
                    }
                    catch (InvalidCastException)
                    {
                    }
                }

                _capClient = _audioClient.AudioCaptureClient;
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
コード例 #15
0
        private void DoRecording(AudioClient client)
        {
            Debug.WriteLine(client.BufferSize);

            var buf = new Byte[client.BufferSize * bytesPerFrame];

            int bufLength     = 0;
            int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms

            IntPtr hEvent = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);

            client.SetEventHandle(hEvent);

            try
            {
                AudioCaptureClient capture = client.AudioCaptureClient;
                client.Start();

                int packetSize = capture.GetNextPacketSize();

                while (!this.stop)
                {
                    IntPtr pData                   = IntPtr.Zero;
                    int    numFramesToRead         = 0;
                    AudioClientBufferFlags dwFlags = 0;

                    if (packetSize == 0)
                    {
                        if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0)
                        {
                            throw new Exception("Capture event timeout");
                        }
                    }

                    pData = capture.GetBuffer(out numFramesToRead, out dwFlags);

                    if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
                    {
                        pData = IntPtr.Zero;
                    }

                    if (numFramesToRead == 0)
                    {
                        continue;
                    }

                    int capturedBytes = numFramesToRead * bytesPerFrame;

                    System.Runtime.InteropServices.Marshal.Copy(pData, buf, bufLength, capturedBytes);
                    bufLength += capturedBytes;

                    capture.ReleaseBuffer(numFramesToRead);

                    if (bufLength >= minPacketSize)
                    {
                        if (DataAvailable != null)
                        {
                            DataAvailable(this, new WaveInEventArgs(buf, bufLength));
                        }
                        bufLength = 0;
                    }

                    packetSize = capture.GetNextPacketSize();
                }
            }
            catch (Exception ex)
            {
                RaiseRecordingStopped(ex);
                Debug.WriteLine("stop wasapi");
            }
            finally
            {
                RaiseRecordingStopped(null);

                NativeMethods.CloseHandle(hEvent);
                client.Stop();
                client.Dispose();
            }
        }
コード例 #16
0
 private void releaseDeviceImpl()
 {
     if (_capDevice != null)
     {
         if (_capturing) stopCaptureImpl();
         _capDevice.Dispose();
     }
     _capDevice = null;
     if (_capClient != null) _capClient.Dispose();
     _capClient = null;
     if (_audioClient != null) _audioClient.Dispose();
     _audioClient = null;
 }
コード例 #17
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
                _capDevice.ReleaseAudioClient();

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                        _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode, streamFlags,
                        tmp2, tmp2,
                        _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                            case AudioClientError.BufferSizeNotAligned:
                                uint bufSize = _audioClient.BufferSize;
                                tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                                _audioClient.Initialize(shareMode,
                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                                break;

                            case AudioClientError.UnsupportedFormat:

                                break;
                        }
                    }
                    catch (InvalidCastException)
                    {

                    }
                }

                _capClient = _audioClient.AudioCaptureClient;

            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
コード例 #18
0
ファイル: WasapiCapture.cs プロジェクト: dmitten/HoloToolkit
        /// <summary>
        /// This is the loop that does all of the hard work, each packet is recorded here and then dispatched.
        /// </summary>
        /// <param name="client"></param>
        private void DoRecording(AudioClient2 audioClient)
        {
            this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Information, 0x01,
                                          "WasapiCapture::DoRecording => has a buffer size of " + audioClient.BufferSize);

            int sampleBufferByteSize = audioClient.BufferSize * bytesPerFrame;

            byte[] sampleBuffer = new byte[sampleBufferByteSize];

            IntPtr audioSamplesReadyEventHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);

            audioClient.SetEventHandle(audioSamplesReadyEventHandle);

            try
            {
                AudioCaptureClient captureClient = audioClient.AudioCaptureClient;
                audioClient.Start();


                while (!this.pendingStopRequest)
                {
                    NativeMethods.WaitForSingleObjectEx(audioSamplesReadyEventHandle, 1000, true);
                    int packetSize = captureClient.GetNextPacketSize();

                    if (packetSize > 0)
                    {
                        int numFramesToRead            = 0;
                        int numBytesInSampleBuffer     = 0;
                        AudioClientBufferFlags dwFlags = 0;
                        IntPtr micDataIn;

                        micDataIn = captureClient.GetBuffer(out numFramesToRead, out dwFlags);

                        int capturedBytes = numFramesToRead * bytesPerFrame;

                        if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0)
                        {
                            int maxBytes = Math.Min(capturedBytes, sampleBufferByteSize);
                            while (maxBytes-- > 0)
                            {
                                sampleBuffer[numBytesInSampleBuffer++] = 0;
                            }
                        }
                        else
                        {
                            System.Runtime.InteropServices.Marshal.Copy(micDataIn, sampleBuffer, 0, capturedBytes);
                            numBytesInSampleBuffer = capturedBytes;
                        }

                        captureClient.ReleaseBuffer(numFramesToRead);

                        if (DataAvailableEventHandler != null)
                        {
                            if (this.waveFormat.Channels == 2)
                            {
                                // convert stereo to mono inline!
                                ConvertStereoToMono(sampleBuffer, numBytesInSampleBuffer);
                                numBytesInSampleBuffer /= 2;
                            }
                            else if (this.waveFormat.Channels == 6)
                            {
                                // convert 6 to mono inline!
                                Convert6ToMono(sampleBuffer, numBytesInSampleBuffer, Channel2 | Channel3, 2);
                                numBytesInSampleBuffer /= 6;
                            }

                            DataAvailableEventHandler(this, new WaveInEventArgs(sampleBuffer, numBytesInSampleBuffer));
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::DoRecording => Exception: " + ex.ToString());
            }
            finally
            {
                NativeMethods.CloseHandle(audioSamplesReadyEventHandle);
                audioClient.Stop();
                audioClient.Dispose();
                this.pendingStopRequest = false;
            }
        }
コード例 #19
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return di.DeviceId == devId; });
            if (_capDevice == null)
            {
            #warning 例外
                _audioClient = null;
                _capClient = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
                case AudioClientShareMode.Shared:
                    switch (_capDevice.DataFlow)
                    {
                        case EDataFlow.eCapture:
                            streamFlags = 0;
                            break;
                        case EDataFlow.eRender:
                            streamFlags = AudioClientStreamFlags.Loopback;
                            break;
                    }
                    break;
                case AudioClientShareMode.Exclusive:
                    streamFlags = AudioClientStreamFlags.NoPersist;
                    break;
            }

            // フォーマット
            if (_audioClient != null) _capDevice.ReleaseAudioClient();

            // ボリューム
            _masterVolume = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt)) _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }