コード例 #1
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Initialize(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            var approFormat = audioClient.CheckSupportFormat(shareMode, outputFormat);        // 規定フォーマットが対応しているかどうかのCheck.

            if (approFormat != null)
            {
                // 対応していない場合は、システムから提示のあった近似フォーマットで再度確認する。
                if (!audioClient.IsFormatSupported(shareMode, approFormat))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }
                outputFormat = approFormat;
            }
            //audioClient.Initialize(shareMode, EAudioClientStreamFlags.None, 1000000, 0, outputFormat, Guid.Empty);

            //ResamplerStream = new ResamplerDmoStream(waveProvider, outputFormat);
            //this.sourceProvider = ResamplerStream;
            //this.sourceProvider = waveProvider;
            this.sourceProvider = new ResampleWaveProvider(waveProvider, outputFormat);

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == EAudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0
                    audioClient.Initialize(shareMode, EAudioClientStreamFlags.EventCallback, 0, 0, outputFormat, Guid.Empty);
                    // Get back the effective latency from AudioClient
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, EAudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes, outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle);
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, EAudioClientStreamFlags.None, latencyRefTimes, 0, outputFormat, Guid.Empty);
            }

            Debug.WriteLine(string.Format("RenderAudioClient: {0}", audioClient.ToString()));
            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
コード例 #2
0
        private void InitializeCaptureDevice()
        {
            if (isInitialized)
            {
                return;
            }

            long requestedDuration = REFTIMES_PER_MILLISEC * 100;
            var  streamFlags       = GetAudioClientStreamFlags();
            var  shareMode         = EAudioClientShareMode.Shared;

            var approFormat = audioClient.CheckSupportFormat(shareMode, WaveFormat);        // 規定フォーマットが対応しているかどうかのCheck.

            if (approFormat != null)
            {
                // 対応していない場合は、システムから提示のあった近似フォーマットで再度確認する。
                if (!audioClient.IsFormatSupported(shareMode, approFormat))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }
                // 近似フォーマットを採用して続行。
                WaveFormat = approFormat;
            }

            audioClient.Initialize(
                shareMode,
                streamFlags,
                requestedDuration,
                0,
                WaveFormat,
                Guid.Empty);
            Debug.WriteLine(string.Format("CaptureAudioClient: {0}", audioClient.ToString()));

            var bufferFrameCount = audioClient.BufferSize;

            this.bytesPerFrame = this.WaveFormat.Channels * this.WaveFormat.BitsPerSample / 8;
            this.recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
            Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            //WaveProvider = new InnerWaveProvider(this);
            waveProvider = new BufferedWaveProvider(WaveFormat);
            //waveProvider = new ResampleWaveProvider(WaveFormat, WaveFormat);

            frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
            audioClient.SetEventHandle(frameEventWaitHandle);

            isInitialized = true;
        }