コード例 #1
0
ファイル: WasapiOut.cs プロジェクト: aljordan/NAJAudio
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency"></param>
 public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency)
 {
     this.audioClient = device.AudioClient;
     this.mmDevice = device;
     this.shareMode = shareMode;
     this.isUsingEventSync = useEventSync;
     this.latencyMilliseconds = latency;
     this.syncContext = SynchronizationContext.Current;
 }
コード例 #2
0
ファイル: WasapiCapture.cs プロジェクト: aljordan/NAJAudio
        /// <summary>
        /// Initialises a new instance of the WASAPI capture class
        /// </summary>
        /// <param name="captureDevice">Capture device to use</param>
        public WasapiCapture(MMDevice captureDevice)
        {
            syncContext = SynchronizationContext.Current;
            audioClient = captureDevice.AudioClient;
            ShareMode = AudioClientShareMode.Shared;

            waveFormat = audioClient.MixFormat;
            var wfe = waveFormat as WaveFormatExtensible;
            if (wfe != null)
            {
                try
                {
                    waveFormat = wfe.ToStandardWaveFormat();
                }
                catch (InvalidOperationException)
                {
                    // couldn't convert to a standard format
                }
            }
        }
コード例 #3
0
ファイル: WasapiCapture.cs プロジェクト: aljordan/NAJAudio
        private void DoRecording(AudioClient client)
        {
            Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize));
            int bufferFrameCount = client.BufferSize;

            // Calculate the actual duration of the allocated buffer.
            long actualDuration = (long)((double)REFTIMES_PER_SEC *
                             bufferFrameCount / WaveFormat.SampleRate);
            int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);

            AudioCaptureClient capture = client.AudioCaptureClient;
            client.Start();
            Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));
            while (!this.requestStop)
            {
                Thread.Sleep(sleepMilliseconds);
                ReadNextPacket(capture);
            }
        }
コード例 #4
0
ファイル: WasapiCapture.cs プロジェクト: aljordan/NAJAudio
 private void CaptureThread(AudioClient client)
 {
     Exception exception = null;
     try
     {
         DoRecording(client);
     }
     catch (Exception e)
     {
         exception = e;
     }
     finally
     {
         client.Stop();
         // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed
     }
     captureThread = null;
     RaiseRecordingStopped(exception);
     Debug.WriteLine("Stop wasapi");
 }
コード例 #5
0
ファイル: WasapiCapture.cs プロジェクト: aljordan/NAJAudio
 /// <summary>
 /// Dispose
 /// </summary>
 public void Dispose()
 {
     StopRecording();
     if (captureThread != null)
     {
         captureThread.Join();
         captureThread = null;
     }
     if (audioClient != null)
     {
         audioClient.Dispose();
         audioClient = null;
     }
 }
コード例 #6
0
ファイル: WasapiOut.cs プロジェクト: aljordan/NAJAudio
        /// <summary>
        /// Dispose
        /// </summary>
        public void Dispose()
        {
            if (audioClient != null)
            {
                Stop();

                audioClient.Dispose();
                audioClient = null;
                renderClient = null;
            }
        }