private unsafe void PlatformSubmitBuffer <T>(ReadOnlySpan <T> data, AudioDepth depth)
            where T : unmanaged
        {
            int dataByteCount = data.Length * sizeof(T);
            int sampleCount   = dataByteCount / ((int)depth / 8);

            // The XAudio voice is always 16-bit, but we support 16-bit and 32-bit data.
            int bufferByteCount = sampleCount * sizeof(short);

            byte[] pooledBuffer = _pool.Rent(bufferByteCount);

            // we need to copy so datastream does not pin the buffer that the user might modify later
            if (depth == AudioDepth.Float)
            {
                // we need to convert to 16-bit
                var srcSpan = MemoryMarshal.Cast <T, float>(data);
                var dstSpan = MemoryMarshal.Cast <byte, short>(pooledBuffer.AsSpan(0, bufferByteCount));
                AudioLoader.ConvertSingleToInt16(srcSpan, dstSpan);
            }
            else
            {
                // the data was 16-bit, so just copy over
                var srcSpan = MemoryMarshal.AsBytes(data);
                srcSpan.CopyTo(pooledBuffer);
            }

            var stream      = DataStream.Create(pooledBuffer, true, false, 0, true);
            var audioBuffer = new AudioBuffer(stream)
            {
                AudioBytes = bufferByteCount
            };

            _voice.SubmitSourceBuffer(audioBuffer, null);
            _queuedItems.Enqueue(new DataItem(audioBuffer, pooledBuffer));
        }
        private void PlatformSubmitBuffer <T>(ReadOnlySpan <T> data, AudioDepth depth)
            where T : unmanaged
        {
            // Bind the data
            ALFormat alFormat = ALHelper.GetALFormat(_channels, depth);
            var      buffer   = ALBufferPool.Rent();

            buffer.BufferData(data, alFormat, _sampleRate);

            // Queue the buffer
            AL.SourceQueueBuffer(SourceId.Value, buffer.BufferId);
            ALHelper.CheckError("Failed to queue buffer.");

            lock (_queuedBuffers)
                _queuedBuffers.Enqueue(buffer);

            // If the source has run out of buffers, restart it
            var sourceState = AL.GetSourceState(SourceId.Value);

            if (_state == SoundState.Playing && sourceState == ALSourceState.Stopped)
            {
                AL.SourcePlay(SourceId.Value);
                ALHelper.CheckError("Failed to resume source playback.");
            }
        }
        public static ALFormat GetALFormat(AudioChannels channels, AudioDepth depth)
        {
            switch (channels)
            {
            case AudioChannels.Mono:
                switch (depth)
                {
                case AudioDepth.Short: return(ALFormat.Mono16);

                case AudioDepth.Float: return(ALFormat.MonoFloat32);
                }
                break;

            case AudioChannels.Stereo:
                switch (depth)
                {
                case AudioDepth.Short: return(ALFormat.Stereo16);

                case AudioDepth.Float: return(ALFormat.StereoFloat32);
                }
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(channels), "Only mono and stereo channels are supported.");
            }
            throw new ArgumentOutOfRangeException(nameof(depth), "Audio format is not supported.");
        }
Exemple #4
0
 internal static void AssertValidAudioDepth(AudioDepth depth)
 {
     if (depth != AudioDepth.Short &&
         depth != AudioDepth.Float)
     {
         throw new ArgumentOutOfRangeException(nameof(depth), "Invalid audio depth.");
     }
 }