예제 #1
0
 // use sound buffer
 private static void UseSoundBuffer(int SoundBufferIndex)
 {
     if (OpenAlContext != ContextHandle.Zero)
     {
         if (SoundBufferIndex >= 0)
         {
             if (!SoundBuffers[SoundBufferIndex].TriedLoading)
             {
                 SoundBuffers[SoundBufferIndex].TriedLoading = true;
                 if (!SoundBuffers[SoundBufferIndex].OpenAlBufferIndex.Valid)
                 {
                     try{
                         WaveParser.WaveData data = WaveParser.LoadFromFile(SoundBuffers[SoundBufferIndex].FileName);
                         data = WaveParser.ConvertToMono8Or16(data);
                         if (data.Format.BitsPerSample == 8)
                         {
                             int buffer;
                             AL.GenBuffers(1, out buffer);
                             AL.BufferData(buffer, ALFormat.Mono8, data.Bytes, data.Bytes.Length, data.Format.SampleRate);
                             SoundBuffers[SoundBufferIndex].OpenAlBufferIndex = new OpenAlIndex(buffer, true);
                             SoundBuffers[SoundBufferIndex].Duration          = (double)data.Bytes.Length / (double)(data.Format.SampleRate);
                         }
                         else if (data.Format.BitsPerSample == 16)
                         {
                             int buffer;
                             AL.GenBuffers(1, out buffer);
                             AL.BufferData(buffer, ALFormat.Mono16, data.Bytes, data.Bytes.Length, data.Format.SampleRate);
                             SoundBuffers[SoundBufferIndex].OpenAlBufferIndex = new OpenAlIndex(buffer, true);
                             SoundBuffers[SoundBufferIndex].Duration          = (double)data.Bytes.Length / (double)(2 * data.Format.SampleRate);
                         }
                         else
                         {
                             SoundBuffers[SoundBufferIndex].OpenAlBufferIndex = new OpenAlIndex(0, false);
                         }
                     } catch {
                         SoundBuffers[SoundBufferIndex].OpenAlBufferIndex = new OpenAlIndex(0, false);
                     }
                 }
             }
         }
     }
 }
예제 #2
0
        public AudioStream LoadAudioOggVorbis(Stream stream, string name = null)
        {
            var vorbis = _readOggVorbis(stream);

            var buffer = AL.GenBuffer();

            ALFormat format;

            // NVorbis only supports loading into floats.
            // If this becomes a problem due to missing extension support (doubt it but ok),
            // check the git history, I originally used libvorbisfile which worked and loaded 16 bit LPCM.
            if (vorbis.Channels == 1)
            {
                format = ALFormat.MonoFloat32Ext;
            }
            else if (vorbis.Channels == 2)
            {
                format = ALFormat.StereoFloat32Ext;
            }
            else
            {
                throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
            }

            unsafe
            {
                fixed(float *ptr = vorbis.Data.Span)
                {
                    AL.BufferData(buffer, format, (IntPtr)ptr, vorbis.Data.Length * sizeof(float),
                                  (int)vorbis.SampleRate);
                }
            }

            _checkAlError();

            var handle = new ClydeHandle(_audioSampleBuffers.Count);

            _audioSampleBuffers.Add(new LoadedAudioSample(buffer));
            var length = TimeSpan.FromSeconds(vorbis.TotalSamples / (double)vorbis.SampleRate);

            return(new AudioStream(handle, length, (int)vorbis.Channels, name));
        }
예제 #3
0
        public static int LoadAudio(string filename)
        {
            if (String.IsNullOrEmpty(filename))
            {
                throw new ArgumentException(filename);
            }

            int buffer;

            audioDictionary.TryGetValue(filename, out buffer);

            if (buffer == 0)
            {
                buffer = AL.GenBuffer();

                int    channels, bits_per_sample, sample_rate;
                byte[] sound_data = LoadWave(
                    File.Open(filename, FileMode.Open),
                    out channels,
                    out bits_per_sample,
                    out sample_rate
                    );
                ALFormat sound_format =
                    channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                    channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                    channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                    channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                    (ALFormat)0; // unknown
                AL.BufferData(buffer, sound_format, sound_data, sound_data.Length, sample_rate);
                if (AL.GetError() != ALError.NoError)
                {
                    // respond to load error etc.
                    Debug.Assert(true, "Failed to load audio");
                }
                else
                {
                    audioDictionary.Add(filename, buffer);
                }
            }

            return(buffer); //returns 0 if error?
        }
예제 #4
0
 public bool FillBuffer(OggStream stream, int bufferId)
 {
     OggStreamer.decodeLock.EnterWriteLock();
     if (stream.IsDisposed)
     {
         OggStreamer.decodeLock.ExitWriteLock();
         return(true);
     }
     else
     {
         int num = stream.Reader.ReadSamples(this.readSampleBuffer, 0, this.BufferSize);
         for (int index = 0; index < num; ++index)
         {
             this.castBuffer[index] = (short)((double)short.MaxValue * (double)this.readSampleBuffer[index]);
         }
         AL.BufferData <short>(bufferId, stream.Reader.Channels == 1 ? ALFormat.Mono16 : ALFormat.Stereo16, this.castBuffer, num * 2, stream.Reader.SampleRate);
         OggStreamer.decodeLock.ExitWriteLock();
         return(num != this.BufferSize);
     }
 }
예제 #5
0
            public unsafe void WriteBuffer(uint handle, ReadOnlySpan <ushort> data)
            {
                _checkDisposed();

                if (_float)
                {
                    throw new InvalidOperationException("Can't write ushort numbers to buffers when buffer type is float!");
                }

                if (handle >= BufferHandles.Length)
                {
                    throw new ArgumentOutOfRangeException(nameof(handle),
                                                          $"Got {handle}. Expected less than {BufferHandles.Length}");

                    fixed(ushort *ptr = data)
                    {
                        AL.BufferData(BufferHandles[handle], _mono ? ALFormat.Mono16 : ALFormat.Stereo16, (IntPtr)ptr,
                                      _mono ? data.Length / 2 * sizeof(ushort) : data.Length * sizeof(ushort), SampleRate);
                    }
            }
예제 #6
0
 public void RegisterSoundEffect(SoundEffect soundEffect)
 {
     if (this.allocatedBuffers.ContainsKey(soundEffect))
     {
         return;
     }
     if (this.freeBuffers.Count == 0)
     {
         this.ExpandBuffers();
     }
     Trace.WriteLine("[OpenAL] Pre-allocating buffer for " + soundEffect.Name);
     OpenALSoundController.BufferAllocation bufferAllocation;
     this.allocatedBuffers.Add(soundEffect, bufferAllocation = new OpenALSoundController.BufferAllocation()
     {
         BufferId    = this.freeBuffers.Pop(),
         SinceUnused = -1f
     });
     AL.BufferData <byte>(bufferAllocation.BufferId, soundEffect.Format, soundEffect._data, soundEffect.Size, soundEffect.Rate);
     ALHelper.Check();
 }
        private void QueueBuffer(int buffer)
        {
            if (!_needsrefill)
            {
                return;
            }
            int len = (Speed > 0 ? _stream.ReadBuffer() : _stream.ReadBufferReversed());

            if (len > 0)
            {
                AL.BufferData(buffer, _stream.Channels == 1 ? ALFormat.Mono16 : ALFormat.Stereo16, _stream.Buffer, len * sizeof(short), _stream.SampleRate);
                AudioDevice.Check();
                AL.SourceQueueBuffer(_alsourceid, buffer);
                AudioDevice.Check();
            }
            if (len != _stream.SamplesPerBuffer)//we've reached the end
            {
                _needsrefill = false;
            }
        }
예제 #8
0
        protected static Sound LoadWaveFileAbsolute(string title, string file_path)
        {
            if (!System.IO.File.Exists(file_path))
            {
                throw new FileNotFoundException("File missing: ", file_path);
            }
            int buffer_id = AL.GenBuffer();
            int source_id = AL.GenSource();
//            if(XRam.IsInitialized)
//                XRam.SetBufferMode(1, ref buffer_id, XRamExtension.XRamStorage.Hardware);

            int channels, bits_per_sample, sample_rate;

            byte[] sound_data = LoadWave(File.Open(file_path, FileMode.Open), out channels, out bits_per_sample, out sample_rate);
            AL.BufferData(buffer_id, GetSoundFormat(channels, bits_per_sample), sound_data, sound_data.Length, sample_rate);
            AL.Source(source_id, ALSourcei.Buffer, buffer_id);
            double duration = (double)sound_data.Length / (double)(sample_rate * channels * bits_per_sample / 8);

            return((Sound)(Sounds[title] = new Sound(buffer_id, source_id, duration)));
        }
예제 #9
0
        public void Enqueue(string id, long packNumber, SoundPack pack)
        {
            if (string.IsNullOrEmpty(id))
            {
                throw new ArgumentException("id");
            }

            if (!IsInited)
            {
                return;
            }

            lock (_syncObject)
            {
                SourceDescription source;
                if (!_sources.TryGetValue(id, out source))
                {
                    int sourceId = AL.GenSource();
                    source = new SourceDescription(sourceId);
                    _sources.Add(id, source);
                }

                if (source.LastPlayedNumber > packNumber)
                {
                    return;
                }

                source.LastPlayedNumber = packNumber;

                int bufferId = AL.GenBuffer();
                AL.BufferData(bufferId, source.GetFormat(pack), pack.Data, pack.Data.Length, pack.Frequency);
                AL.SourceQueueBuffer(source.Id, bufferId);

                if (AL.GetSourceState(source.Id) != ALSourceState.Playing)
                {
                    AL.SourcePlay(source.Id);
                }

                ClearBuffers(source, 0);
            }
        }
예제 #10
0
 public void SubmitBuffer(byte[] buffer, int offset, int count)
 {
     if (bufferIdsToFill != null)
     {
         AL.BufferData(bufferIdsToFill [currentBufferToFill], format, buffer, count, sampleRate);
         AL.SourceQueueBuffer(sourceId, bufferIdsToFill [currentBufferToFill]);
         currentBufferToFill++;
         if (currentBufferToFill >= bufferIdsToFill.Length)
         {
             bufferIdsToFill = null;
         }
         else
         {
             OnBufferNeeded(EventArgs.Empty);
         }
     }
     else
     {
         throw new  Exception("Buffer already full.");
     }
 }
예제 #11
0
            public static unsafe uint BufferFromWav(byte[] wave)
            {
                uint result;

                AL.GenBuffers(1, out result);

                byte[]   data;
                ALFormat format;
                uint     sampleRate;

                AL.Utils.LoadWav(wave, out data, out format, out sampleRate);

                fixed(byte *dataPtr = &data[0])
                {
                    IntPtr dataIntPtr = new IntPtr(dataPtr);

                    AL.BufferData(result, format, dataIntPtr, data.Length, (int)sampleRate);
                }

                return(result);
            }
예제 #12
0
        private void Initialize(string filePath)
        {
            using (var fileStream = File.OpenRead(filePath))
            {
                using (var binaryReader = new BinaryReader(fileStream, Encoding.Unicode))
                {
                    int      channel;
                    int      bit;
                    int      rate;
                    ALFormat format;
                    byte[]   data;
                    int      dataLength;

                    binaryReader.ReadBytes(22);
                    channel = binaryReader.ReadUInt16();
                    rate    = binaryReader.ReadInt32();
                    binaryReader.ReadBytes(6);
                    bit = binaryReader.ReadUInt16();
                    if (channel == 1)
                    {
                        format = bit == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
                    }
                    else
                    {
                        format = bit == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;
                    }
                    binaryReader.ReadBytes(4);
                    dataLength = binaryReader.ReadInt32();
                    data       = binaryReader.ReadBytes(dataLength);
                    BufferId   = AL.GenBuffer();
                    AL.BufferData(BufferId, format, data, data.Length, rate);
                    SourceIndex = 0;
                    SourceIds   = AL.GenSources(SourceIdMax);
                    for (int i = 0; i < SourceIds.Length; i++)
                    {
                        AL.Source(SourceIds[i], ALSourcei.Buffer, BufferId);
                    }
                }
            }
        }
예제 #13
0
        /// <summary>
        /// Loads the provided audio file into game storage. If it already exists will return object.
        /// </summary>
        /// <param name="filename">File to load from</param>
        /// <returns>Audio provide or Audio found if already exists</returns>
        public static int LoadAudio(string filename)
        {
            int buffer;

            audioDictionary.TryGetValue(filename, out buffer);
            if (buffer == 0)
            {
                try
                {
                    int channels, bits_per_sample, sample_rate;
                    // reserve a Handle for the audio file
                    buffer = AL.GenBuffer();

                    // Load a .wav file from disk.
                    //int channels, bits_per_sample, sample_rate;
                    byte[] sound_data = LoadWave(
                        File.Open(filename, FileMode.Open),
                        out channels,
                        out bits_per_sample,
                        out sample_rate);
                    ALFormat sound_format =
                        channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                        channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                        channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                        channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                        (ALFormat)0; // unknown
                    AL.BufferData(buffer, sound_format, sound_data, sound_data.Length, sample_rate);
                    string error = AL.GetError().ToString();
                    if (error != "NoError")
                    {
                        Console.WriteLine(error);
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.ToString());
                }
            }
            return(buffer);
        }
예제 #14
0
        public void Load(Stream stream)
        {
            context = new AudioContext();
            int state;

            buffer = AL.GenBuffer();
            source = AL.GenSource();

            WaveData waveFile = new WaveData(stream);

            AL.BufferData(buffer, waveFile.SoundFormat, waveFile.SoundData, waveFile.SoundData.Length, waveFile.SampleRate);

            AL.Source(source, ALSourcei.Buffer, buffer);
            AL.SourcePlay(source);

            Console.WriteLine("Playing");

            // Query the source to find out when it stops playing.
            do
            {
                Thread.Sleep(250);
                Console.Write(".");
                AL.GetSource(source, ALGetSourcei.SourceState, out state);
            }while ((ALSourceState)state == ALSourceState.Playing);

            Console.WriteLine("FIN");

            AL.SourceStop(source);
            AL.DeleteSource(source);
            AL.DeleteBuffer(buffer);

            /*        waveFile.dispose();
             *
             *      AL.Source(source, ALSourcei.Buffer, buffer);
             *      AL.Source(source, ALSourceb.Looping, true);
             *      AL.GenSources(source);
             *
             *      Console.WriteLine($"context {AudioContext.AvailableDevices.Count > 0}");
             *      Console.WriteLine($"GENERATING WAV SRC {source} {waveFile.SoundFormat} {waveFile.SampleRate}");*/
        }
예제 #15
0
        public void MainLoop(int samples, bool reverse)
        {
            unsafe
            {
                if (reverse)
                {
                    short temp;
                    for (int i = 0; i < samples / 2; i++)
                    {
                        temp                = buffer[i];
                        buffer[i]           = buffer[samples - i];
                        buffer[samples - i] = temp;
                    }
                }
                int processedBuf = -1;
                if (processed > 0)
                {
                    AL.SourceUnqueueBuffers(sourceName, 1, ref processedBuf);
                }
                else
                {
                    processedBuf = AL.GenBuffer();
                    activeBuffers.Add(processedBuf);
                }

                fixed(short *bufferPtr = buffer)
                AL.BufferData(processedBuf, ALFormat.Mono16, (IntPtr)bufferPtr, samples * 2, sampleRate);

                AL.SourceQueueBuffer(sourceName, processedBuf);

                if (AL.GetSourceState(sourceName) != ALSourceState.Playing)
                {
                    AL.SourcePlay(sourceName);
                    if (frame > frameDelay) //If the buffer runs out of stuff to play, frames are obviously being pushed too slow, but by the time that happens the audio will already be terrible so this isnt a good time to detect it.
                    {
                        tooSlow = true;
                    }
                }
            }
        }
예제 #16
0
        public void BufferData <T>(
            ReadOnlySpan <T> data, ALFormat format, int sampleRate, int sampleAlignment = 0)
            where T : unmanaged
        {
            AssertNotDisposed();

            var controller = ALController.Get();

            if (!controller.SupportsFloat32 && (format == ALFormat.MonoFloat32 || format == ALFormat.StereoFloat32))
            {
                throw new InvalidOperationException("Float data is not supported by this OpenAL driver.");
            }

            if (!controller.SupportsAdpcm && (format == ALFormat.MonoMSAdpcm || format == ALFormat.StereoMSAdpcm))
            {
                throw new InvalidOperationException("MS-ADPCM is not supported by this OpenAL driver.");
            }

            if (!controller.SupportsIma4 && (format == ALFormat.MonoIma4 || format == ALFormat.StereoIma4))
            {
                throw new InvalidOperationException("IMA/ADPCM is not supported by this OpenAL driver.");
            }

            if (BufferId != 0)
            {
                ClearBuffer();
            }

            BufferId = AL.GenBuffer();
            ALHelper.CheckError("Failed to generate OpenAL data buffer.");

            if (sampleAlignment > 0)
            {
                AL.Bufferi(BufferId, ALBufferi.UnpackBlockAlignmentSoft, sampleAlignment);
                ALHelper.CheckError("Failed to set buffer alignment.");
            }

            AL.BufferData(BufferId, format, MemoryMarshal.AsBytes(data), sampleRate);
            ALHelper.CheckError("Failed to fill buffer.");
        }
예제 #17
0
        /// <summary>
        /// (Internal) Constructor.
        /// </summary>
        /// <param name="waveFileBytes">Bytes for a PCM Wave file to use for this sound</param>
        internal AudioPlayerSound(byte[] waveFileBytes)
        {
            Buffer  = AL.GenBuffer();
            IsValid = true;

            using (MemoryStream ms = new MemoryStream(waveFileBytes))
            {
                byte[] soundData = LoadWave(ms, out int channels, out int bitsPerSample, out int sampleRate);
                if (soundData == null)
                {
                    IsValid = false; Dispose(); return;
                }

                ALFormat?soundFormat = GetSoundFormat(channels, bitsPerSample);
                if (!soundFormat.HasValue)
                {
                    IsValid = false; Dispose(); return;
                }

                AL.BufferData(Buffer, soundFormat.Value, soundData, soundData.Length, sampleRate);
            }
        }
예제 #18
0
        private void Update(int channelCount, int sampleRate)
        {
            // Check parameters
            if (channelCount <= 0 || sampleRate <= 0)
            {
                throw new ArgumentException();
            }

            // Find the best format according to the number of channels
            var format = AudioDevice.GetFormat(channelCount);

            if (format == 0)
            {
                throw new Exception("Failed to load sound buffer (unsupported number of channels: " + channelCount.ToString() + ")");
            }

            // First make a copy of the list of sounds so we can reattach later
            var sounds = new List <Sound>(_sounds);

            // Detach the buffer from the sounds that use it (to avoid OpenAL errors)
            foreach (var sound in sounds)
            {
                sound.ResetBuffer();
            }

            // fill the buffer
            int size = _samples.Length * sizeof(short);

            ALChecker.Check(() => AL.BufferData(_buffer, format, _samples, size, sampleRate));

            // Compute the duration
            _duration = TimeSpan.FromSeconds((float)_samples.Length / sampleRate / channelCount);

            // Now reattach the buffer to the sounds that use it
            foreach (var sound in sounds)
            {
                sound.Buffer = this;
            }
        }
예제 #19
0
        /// <summary>
        /// Playback a file
        /// </summary>
        /// <param name="unencodedData">Raw byte data</param>
        /// <param name="recordingFormat">Format of data</param>
        /// <param name="sampleFrequency">Frequency sampling of data</param>
        public void PlayFile(byte[] unencodedData, ALFormat recordingFormat, int sampleFrequency)
        {
            int[] uiBuffers = new int[4];
            int   uiSource;

            uiBuffers = AL.GenBuffers(uiBuffers.Length);

            uiSource = AL.GenSource();

            alError = AL.GetError();

            AL.BufferData(uiBuffers[0], recordingFormat, unencodedData, unencodedData.Length, sampleFrequency);

            alError = AL.GetError();

            AL.SourceQueueBuffers(uiSource, 1, uiBuffers);

            alError = AL.GetError();
            AL.SourcePlay(uiSource);

            alError = AL.GetError();
        }
예제 #20
0
        private static void PrepareOpenAL()
        {
            audioDevice  = ALC.OpenDevice(null);
            audioContext = ALC.CreateContext(audioDevice, null);

            if (!ALC.MakeContextCurrent(audioContext))
            {
                throw new InvalidOperationException("Unable to make context current");
            }

            Console.WriteLine("Created audio context.");

            CheckALErrors();

            const string AudioFile = "Audio.raw";

            if (!File.Exists(AudioFile))
            {
                Console.WriteLine($"No {AudioFile} found.");
                return;
            }

            //Buffer
            AL.GenBuffer(out uint bufferId);

            byte[] data = File.ReadAllBytes(AudioFile);

            AL.BufferData(bufferId, BufferFormat.Stereo16, data, data.Length, 44100);

            //Source
            AL.GenSource(out uint sourceId);

            AL.Source(sourceId, SourceInt.Buffer, (int)bufferId);
            AL.Source(sourceId, SourceBool.Looping, true);

            AL.SourcePlay(sourceId);

            Console.WriteLine($"Played {AudioFile}.");
        }
예제 #21
0
        public void LoadSound(Stream stream)
        {
            int channels;
            int bitsPerSample;
            int sampleRate;

            byte[] data = IOUtils.LoadSound(stream, out channels, out bitsPerSample, out sampleRate);

            ALFormat format = channels == 1 && bitsPerSample == 8 ? ALFormat.Mono8 : channels == 1 && bitsPerSample == 16 ? ALFormat.Mono16 : channels == 2 && bitsPerSample == 8 ? ALFormat.Stereo8 : channels == 2 && bitsPerSample == 16 ? ALFormat.Stereo16 : (ALFormat)0;

            int buffer = AL.GenBuffer();

            AL.BufferData(buffer, format, data, data.Length, sampleRate);

            sound = AL.GenSource();

            AL.Source(sound, ALSourcei.Buffer, buffer);

            AL.DeleteBuffer(buffer);

            initialized = true;
        }
예제 #22
0
        public static int LoadSound(string filename)
        {
            if (String.IsNullOrEmpty(filename))
            {
                throw new ArgumentException(filename);
            }

            int sound;

            soundDictionary.TryGetValue(filename, out sound);

            if (sound == 0)
            {
                // reserve a Handle for the audio file
                sound = AL.GenBuffer();
                soundDictionary.Add(filename, sound);

                // Load a .wav file from disk.
                int    channels, bits_per_sample, sample_rate;
                byte[] sound_data = LoadWave(
                    File.Open(filename, FileMode.Open),
                    out channels,
                    out bits_per_sample,
                    out sample_rate);
                ALFormat sound_format =
                    channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                    channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                    channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                    channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                    (ALFormat)0; // unknown
                AL.BufferData(sound, sound_format, sound_data, sound_data.Length, sample_rate);
                if (AL.GetError() != ALError.NoError)
                {
                    Console.WriteLine("Error");
                    // respond to load error etc.
                }
            }
            return(sound);
        }
예제 #23
0
        public unsafe void SetData <T>(T[] data, int channelsNum, int bytesPerSample, int sampleRate) where T : unmanaged
        {
            if (data == null)
            {
                throw new ArgumentNullException(nameof(data));
            }

            this.channelsNum    = channelsNum;
            this.bytesPerSample = bytesPerSample;
            this.sampleRate     = sampleRate;

            LengthInSeconds = data.Length / (float)sampleRate / channelsNum;

            var format = GetSoundFormat(channelsNum, bytesPerSample);

            fixed(T *ptr = data)
            {
                AL.BufferData(bufferId, format, (IntPtr)ptr, data.Length * Marshal.SizeOf <T>(), sampleRate);
            }

            AudioEngine.CheckALErrors();
        }
예제 #24
0
            private unsafe static uint BufferFromOgg(VorbisReader vorbis)
            {
                uint result;

                AL.GenBuffers(1, out result);

                byte[]   data;
                ALFormat format;
                uint     sampleRate;
                TimeSpan len;

                LoadOgg(vorbis, out data, out format, out sampleRate, out len);

                fixed(byte *dataPtr = &data[0])
                {
                    IntPtr dataIntPtr = new IntPtr(dataPtr);

                    AL.BufferData(result, format, dataIntPtr, data.Length, (int)sampleRate);
                }

                return(result);
            }
예제 #25
0
        public override Buffer CreateResource(ResourceManager resourceManager)
        {
            var buffer = base.CreateResource(resourceManager);

            using (var reader = new BinaryReader(OpenResource(FileName)))
            {
                RiffHeader header;
                RiffReader.ReadHeader(reader, out header);

                var format     = header.GetFormat();
                var sampleData = new byte[header.DataLength];
                var bytesRead  = reader.Read(sampleData, 0, sampleData.Length);
                if (bytesRead < sampleData.Length)
                {
                    throw new InvalidOperationException("Unable to read audio data. Sound WAV file may be corrupted or truncated.");
                }

                AL.BufferData(buffer.Id, format, sampleData, sampleData.Length, (int)header.SampleRate);
            }

            return(buffer);
        }
예제 #26
0
        public static int CreateBufferFromFile(string file)
        {
            using (System.IO.Stream stream = FileSystem.Open(file))
            {
                WavFile wav = new WavFile(stream);

                int buffer;
                AL.GenBuffers(1, out buffer);

                ALFormat format;
                if (wav.Channels == 1)
                {
                    if (wav.SampleSize == 8)
                    {
                        format = ALFormat.Mono8;
                    }
                    else
                    {
                        format = ALFormat.Mono16;
                    }
                }
                else
                {
                    if (wav.SampleSize == 8)
                    {
                        format = ALFormat.Stereo8;
                    }
                    else
                    {
                        format = ALFormat.Stereo16;
                    }
                }

                AL.BufferData(buffer, format, wav.PcmData, wav.Length, wav.SampleRate);

                return(buffer);
            }
        }
예제 #27
0
        void INativeAudioBuffer.LoadData <T>(int sampleRate, T[] data, int dataLength, AudioDataLayout dataLayout, AudioDataElementType dataElementType)
        {
            ALFormat format = ALFormat.Mono16;

            if (dataLayout == AudioDataLayout.Mono)
            {
                if (dataElementType == AudioDataElementType.Byte)
                {
                    format = ALFormat.Mono8;
                }
                else if (dataElementType == AudioDataElementType.Short)
                {
                    format = ALFormat.Mono16;
                }
            }
            else if (dataLayout == AudioDataLayout.LeftRight)
            {
                if (dataElementType == AudioDataElementType.Byte)
                {
                    format = ALFormat.Stereo8;
                }
                else if (dataElementType == AudioDataElementType.Short)
                {
                    format = ALFormat.Stereo16;
                }
            }

            int sizeOfElement = System.Runtime.InteropServices.Marshal.SizeOf(typeof(T));

            AL.BufferData(
                this.handle,
                format,
                data,
                dataLength * sizeOfElement,
                sampleRate);

            //AudioBackend.CheckOpenALErrors();
        }
예제 #28
0
        // New for Audio
        public static int LoadAudio(string filename)
        {
            int audioBuffer;

            //checking to see if buffer exists
            audioDictionary.TryGetValue(filename, out audioBuffer);

            if (audioBuffer == 0)
            {
                // reserve a Handle for the audio file
                audioBuffer = AL.GenBuffer();
                audioDictionary.Add(filename, audioBuffer);


                // Load a .wav file from disk.
                int    channels, bits_per_sample, sample_rate;
                byte[] sound_data = LoadWave(
                    File.Open(filename, FileMode.Open),
                    out channels,
                    out bits_per_sample,
                    out sample_rate);
                ALFormat sound_format =
                    channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                    channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                    channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                    channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                    (ALFormat)0; // unknown
                AL.BufferData(audioBuffer, sound_format, sound_data, sound_data.Length, sample_rate);
                if (AL.GetError() != ALError.NoError)
                {
                    // respond to load error etc.
                }
            }



            return(audioBuffer);
        }
예제 #29
0
        public override void QueueBuffer(AudioBuffer buffer)
        {
            lock (_lock)
            {
                OpenALAudioBuffer driverBuffer = new OpenALAudioBuffer
                {
                    DriverIdentifier = buffer.DataPointer,
                    BufferId         = AL.GenBuffer(),
                    SampleCount      = GetSampleCount(buffer)
                };

                AL.BufferData(driverBuffer.BufferId, _targetFormat, buffer.Data, (int)RequestedSampleRate);

                _queuedBuffers.Enqueue(driverBuffer);

                AL.SourceQueueBuffer(_sourceId, driverBuffer.BufferId);

                if (_isActive)
                {
                    StartIfNotPlaying();
                }
            }
        }
        internal void SubmitFloatBuffer(float[] buffer, int channels, int sampleRate)
        {
            /* For more on why this delicious copypasta is here, see the
             * internal constructor.
             * -flibit
             */

            // Generate a buffer if we don't have any to use.
            if (availableBuffers.Count == 0)
            {
                availableBuffers.Enqueue(AL.GenBuffer());
            }

            // Push the data to OpenAL.
            int newBuf = availableBuffers.Dequeue();

            AL.BufferData(
                newBuf,
                (channels == 2) ? ALFormat.StereoFloat32Ext : ALFormat.MonoFloat32Ext,
                buffer,
                buffer.Length * 2 * channels,
                sampleRate
                );

            // If we're already playing, queue immediately.
            if (State == SoundState.Playing)
            {
                AL.SourceQueueBuffer(INTERNAL_alSource, newBuf);
                queuedBuffers.Enqueue(newBuf);
            }
            else
            {
                buffersToQueue.Enqueue(newBuf);
            }

            PendingBufferCount += 1;
        }