コード例 #1
0
ファイル: WasApiLayer.cs プロジェクト: Cryru/Emotion
        /// <summary>
        /// Set the audio device the layer will output into.
        /// </summary>
        public void SetDevice(WasApiAudioDevice device)
        {
            if (device == null)
            {
                _layerContext = null;
                return;
            }

            _layerContext         = device.CreateLayerContext(out uint bufferSize);
            _bufferLengthInFrames = (int)bufferSize;
        }
コード例 #2
0
        /// <summary>
        /// Late initialization for the WasApi backend's representation of this device.
        /// Todo: Catch and handle all the possible COM exceptions here :/
        /// </summary>
        public WasApiLayerContext CreateLayerContext(out uint bufferSize)
        {
            var context = new WasApiLayerContext(this);

            // Activate the device.
            int error       = ComHandle.Activate(ref IdAudioClient, ClsCtx.ALL, IntPtr.Zero, out object audioDevice);
            var audioClient = (IAudioClient)audioDevice;

            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't activate audio device of name {Name}.", true);
            }

            context.AudioClient = audioClient;

            // Get device format.
            error = audioClient.GetMixFormat(out IntPtr deviceFormat);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't detect the mix format of the audio client of {Name}.", true);
            }
            var audioClientFormat = Marshal.PtrToStructure <WaveFormat>(deviceFormat);

            if (audioClientFormat !.ExtraSize >= 22)
            {
                audioClientFormat = Marshal.PtrToStructure <WaveFormatExtensible>(deviceFormat);
            }
            context.AudioClientFormat = audioClientFormat !.ToEmotionFormat();

            long ticks = TimeSpan.FromMilliseconds(AudioLayer.BackendBufferExpectedAhead).Ticks;

            error = audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.None, ticks, 0, deviceFormat, Guid.Empty);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't initialize the audio client of device {Name}. Mix format is of the {audioClientFormat.Tag} type.", true);
            }

            error = audioClient.GetBufferSize(out bufferSize);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't get device {Name} buffer size.", true);
            }

            error = audioClient.GetService(IdAudioRenderClient, out object audioRenderClient);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't get the audio render client for device {Name}.", true);
            }
            context.RenderClient = (IAudioRenderClient)audioRenderClient;

            return(context);
        }
コード例 #3
0
        private void LayerThread()
        {
            if (Thread.CurrentThread.Name == null)
            {
                Thread.CurrentThread.Name = $"Audio Layer - {Name}";
            }
            Engine.Log.Trace($"Layer {Name} started.", MessageSource.Audio);
            while (_alive && Engine.Status != EngineStatus.Stopped)
            {
                // Check if the device has changed.
                if (_updateDevice)
                {
                    SetDevice(_parent.DefaultDevice);
                    _updateDevice = false;
                }

                // If not playing, wait for it to start playing.
                if (Status != PlaybackStatus.Playing)
                {
                    _playWait.WaitOne();
                    continue;
                }

                if (_playlist.Count == 0 || _currentTrack == -1 || _currentTrack > _playlist.Count - 1)
                {
                    Debug.Assert(false);
                }

                try
                {
                    // Get the number of frames the buffer can hold total.
                    var frameCount = (int)_layerContext.BufferSize;

                    // Check if the context is initialized.
                    if (!_layerContext.Initialized)
                    {
                        FillBuffer(_layerContext.RenderClient, (int)_layerContext.BufferSize);
                        _layerContext.Start();
                    }

                    // Start if not started.
                    if (!_layerContext.Started)
                    {
                        _layerContext.Start();
                    }

                    // Wait until more of the buffer is requested.
                    bool success = _layerContext.WaitHandle.WaitOne(_layerContext.TimeoutPeriod);
                    if (!success)
                    {
                        Engine.Log.Warning($"Layer {Name} audio context timeout.", MessageSource.WasApi);
                        continue;
                    }

                    // Get more frames.
                    int error = _layerContext.AudioClient.GetCurrentPadding(out int padding);
                    if (error != 0)
                    {
                        Engine.Log.Warning($"Couldn't get device padding, error {error}.", MessageSource.WasApi);
                    }
                    if (!FillBuffer(_layerContext.RenderClient, frameCount - padding))
                    {
                        continue;
                    }
                }
                catch (COMException ex)
                {
                    // Audio device has disappeared or whatever.
                    if ((uint)ex.ErrorCode == 0x88890004)
                    {
                        _updateDevice = true;
                        continue;
                    }

                    Engine.Log.Error(ex.ToString(), MessageSource.WasApi);
                }

                // If done, reset the audio client.
                Task.Delay(_layerContext.TimeoutPeriod).Wait();
                _layerContext.Stop();
                _layerContext.Reset();
            }

            Engine.Log.Trace($"Layer {Name} exited.", MessageSource.Audio);
            _layerContext.Stop();
            _layerContext = null;
        }
コード例 #4
0
        /// <summary>
        /// Late initialization for the WasApi backend's representation of this device.
        /// </summary>
        public WasApiLayerContext CreateLayerContext()
        {
            var context = new WasApiLayerContext(this);

            // Activate the device.
            int error       = ComHandle.Activate(ref IdAudioClient, ClsCtx.ALL, IntPtr.Zero, out object audioDevice);
            var audioClient = (IAudioClient)audioDevice;

            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't activate audio device of name {Name}.", true);
            }

            context.AudioClient = audioClient;

            // Get device format.
            error = audioClient.GetMixFormat(out IntPtr deviceFormat);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't detect the mix format of the audio client of {Name}.", true);
            }

            var audioClientFormat = Marshal.PtrToStructure <WaveFormat>(deviceFormat);

            if (audioClientFormat.ExtraSize >= 22)
            {
                audioClientFormat = Marshal.PtrToStructure <WaveFormatExtensible>(deviceFormat);
            }
            context.AudioClientFormat = audioClientFormat.ToEmotionFormat();

            error = audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.EventCallback, 0, 0, deviceFormat, Guid.Empty);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't initialize the audio client of device {Name}. Mix format is of the {audioClientFormat.Tag} type.", true);
            }

            // Get data.
            error = audioClient.GetDevicePeriod(out long _, out long minPeriod);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't get device {Name} period.", true);
            }
            context.UpdatePeriod = minPeriod;

            error = audioClient.GetBufferSize(out context.BufferSize);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't get device {Name} buffer size.", true);
            }

            // Set wait handle for when the client is ready to process a buffer.
            context.WaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
            error = audioClient.SetEventHandle(context.WaitHandle.SafeWaitHandle.DangerousGetHandle());
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't set audio wait handle for device {Name}.", true);
            }

            error = audioClient.GetService(IdAudioRenderClient, out object audioRenderClient);
            if (error != 0)
            {
                Win32Platform.CheckError($"Couldn't get the audio render client for device {Name}.", true);
            }

            context.RenderClient = (IAudioRenderClient)audioRenderClient;

            return(context);
        }
コード例 #5
0
 private void SetDevice(WasApiAudioDevice device)
 {
     _layerContext = device.CreateLayerContext();
 }