Initialize() public method

Initialize the Audio Client
public Initialize ( AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat waveFormat, System.Guid audioSessionGuid ) : void
shareMode AudioClientShareMode Share Mode
streamFlags AudioClientStreamFlags Stream Flags
bufferDuration long Buffer Duration
periodicity long Periodicity
waveFormat NAudio.Wave.WaveFormat Wave Format
audioSessionGuid System.Guid Audio Session GUID (can be null)
return void
コード例 #1
0
ファイル: SoundScript.cs プロジェクト: romanchom/Luna
        private void InitializeAudioClient()
        {
            var enumerator = new MMDeviceEnumerator();
            var captureDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            audioClient = captureDevice.AudioClient;

            int recordBufferLength = samplingRate; // 20ms worth of recording
            recordBuffer = new float[recordBufferLength * channelCount];

            long requestedDuration = 10000 * period * 2;

            audioClient.Initialize(AudioClientShareMode.Shared,
                AudioClientStreamFlags.Loopback,
                requestedDuration,
                0,
                WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channelCount),
                Guid.Empty);

            capClient = audioClient.AudioCaptureClient;
            audioClient.Start();
        }
コード例 #2
0
        private void InitializeCaptureDevice(IAudioClient audioClientInterface)
        {
            var audioClient = new AudioClient((IAudioClient)audioClientInterface);
            if (waveFormat == null)
            {                
                this.waveFormat = audioClient.MixFormat;
            }         

            long requestedDuration = REFTIMES_PER_MILLISEC * 100;

            
            if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }
            
            var streamFlags = GetAudioClientStreamFlags();

            audioClient.Initialize(AudioClientShareMode.Shared,
                streamFlags,
                requestedDuration,
                0,
                this.waveFormat,
                Guid.Empty);
           

            int bufferFrameCount = audioClient.BufferSize;
            this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
            this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
            Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            // Get back the effective latency from AudioClient
            latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
        }
コード例 #3
0
        private void InitializeCaptureDevice()
        {
            if (initialized)
            {
                return;
            }

            if (isMixed)
            {
                long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

                if (!audioClientList[0].IsFormatSupported(ShareMode, waveFormatList[0]) || !audioClientList[1].IsFormatSupported(ShareMode, waveFormatList[1]))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }

                var streamFlags = GetAudioClientStreamFlags();

                // Normal setup for both sharedMode
                audioClientList[0].Initialize(ShareMode,
                                              streamFlags,
                                              requestedDuration,
                                              0,
                                              waveFormatList[0],
                                              Guid.Empty);

                audioClientList[1].Initialize(ShareMode,
                                              AudioClientStreamFlags.None,
                                              requestedDuration,
                                              0,
                                              waveFormatList[1],
                                              Guid.Empty);

                int bufferFrameCount = audioClientList[0].BufferSize;
                bytesPerFrame = waveFormatList[0].Channels * waveFormatList[0].BitsPerSample / 8;
                int bufferFrameCount2 = audioClientList[1].BufferSize;
                bytesPerFrame2 = waveFormatList[1].Channels * waveFormatList[1].BitsPerSample / 8;

                //recordBuffer = new byte[bufferFrameCount * bytesPerFrame + bufferFrameCount2 * bytesPerFrame2];
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
                recordBuffer2 = new byte[bufferFrameCount2 * bytesPerFrame2];
            }
            else
            {
                long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

                if (!audioClient.IsFormatSupported(ShareMode, waveFormat))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }

                var streamFlags = GetAudioClientStreamFlags();

                // If using EventSync, setup is specific with shareMode
                if (isUsingEventSync)
                {
                    // Init Shared or Exclusive
                    if (ShareMode == AudioClientShareMode.Shared)
                    {
                        // With EventCallBack and Shared, both latencies must be set to 0
                        audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, 0,
                                               waveFormat, Guid.Empty);
                    }
                    else
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, requestedDuration,
                                               waveFormat, Guid.Empty);
                    }

                    // Create the Wait Event Handle
                    frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                    audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
                }
                else
                {
                    // Normal setup for both sharedMode
                    audioClient.Initialize(ShareMode,
                                           streamFlags,
                                           requestedDuration,
                                           0,
                                           waveFormat,
                                           Guid.Empty);
                }

                int bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
            }

            //Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            initialized = true;
        }