Exemplo n.º 1
0
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
            Rate = (float)sampleRate;
            Size = (int)buffer.Length;

#if OPENAL && !(MONOMAC || IOS)
            _data  = buffer;
            Format = (channels == AudioChannels.Stereo) ? ALFormat.Stereo16 : ALFormat.Mono16;
#endif

#if MONOMAC || IOS
            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            if ((int)channels <= 1)
            {
                Format = bitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            }
            else
            {
                Format = bitsPerSample == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;
            }

            _name = "";
            _data = buffer;
#endif
            // bind buffer
            SoundBuffer = new OALSoundBuffer();
            SoundBuffer.BindDataBuffer(_data, Format, Size, (int)Rate);
        }
Exemplo n.º 2
0
        public static int LoadAudio(string filename)
        {
            int audioBuffer;

            audioDictionary.TryGetValue(filename, out audioBuffer);
            if (audioBuffer == 0)
            {
                //Reserve a handle for the audio file
                audioBuffer = AL.GenBuffer();
                audioDictionary.Add(filename, audioBuffer);

                // Load a .wav file from disk.
                int    channels, bits_per_sample, sample_rate;
                byte[] sound_data = LoadWave(
                    File.Open(filename, FileMode.Open),
                    out channels,
                    out bits_per_sample,
                    out sample_rate);
                ALFormat sound_format =
                    channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                    channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                    channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                    channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                    (ALFormat)0; // unknown
                AL.BufferData(audioBuffer, sound_format, sound_data, sound_data.Length, sample_rate);
                if (AL.GetError() != ALError.NoError)
                {
                    // respond to load error etc.
                }
            }
            return(audioBuffer);
        }
Exemplo n.º 3
0
		public void BindDataBuffer (byte[] dataBuffer, ALFormat format, int size, int sampleRate)
		{
			pcmDataBuffer = dataBuffer;
			openALFormat = format;
			dataSize = size;
			this.sampleRate = sampleRate;
			AL.BufferData (openALDataBuffer, openALFormat, pcmDataBuffer, dataSize, this.sampleRate);

			int bits, channels;

			AL.GetBuffer (openALDataBuffer, ALGetBufferi.Bits, out bits);
			AL.GetBuffer (openALDataBuffer, ALGetBufferi.Channels, out channels);

			ALError alError = AL.GetError ();
			if (alError != ALError.NoError) {
#if DEBUG				
				Console.WriteLine ("Failed to get buffer attributes: ", AL.GetErrorString (alError));
#endif
				Duration = -1;
			} else {
				Duration = (float)(size / ((bits / 8) * channels)) / (float)sampleRate;
			}

			//Console.WriteLine("Duration: " + Duration + " / size: " + size + " bits: " + bits + " channels: " + channels + " rate: " + sampleRate);

		}
        public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate)
        {
            openALFormat = format;
            dataSize = size;
            this.sampleRate = sampleRate;
            AL.BufferData(openALDataBuffer, openALFormat, dataBuffer, dataSize, this.sampleRate);

            int bits, channels;

            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Bits, out bits);
            ALError alError = AL.GetError();
            if (alError != ALError.NoError)
            {
                Console.WriteLine("Failed to get buffer bits: {0}, format={1}, size={2}, sampleRate={3}", AL.GetErrorString(alError), format, size, sampleRate);
                Duration = -1;
            }
            else
            {
                AL.GetBuffer(openALDataBuffer, ALGetBufferi.Channels, out channels);

                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    Console.WriteLine("Failed to get buffer bits: {0}, format={1}, size={2}, sampleRate={3}", AL.GetErrorString(alError), format, size, sampleRate);
                    Duration = -1;
                }
                else
                {
                    Duration = (float)(size / ((bits / 8) * channels)) / (float)sampleRate;
                }
            }
            //Console.WriteLine("Duration: " + Duration + " / size: " + size + " bits: " + bits + " channels: " + channels + " rate: " + sampleRate);

        }
Exemplo n.º 5
0
        public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate)
        {
            openALFormat    = format;
            dataSize        = size;
            this.sampleRate = sampleRate;
            AL.BufferData(openALDataBuffer, openALFormat, dataBuffer, dataSize, this.sampleRate);
            ALHelper.CheckError("Failed to fill buffer.");

            int bits, channels;

            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Bits, out bits);
            ALError alError = AL.GetError();

            if (alError != ALError.NoError)
            {
                Console.WriteLine("Failed to get buffer bits: {0}, format={1}, size={2}, sampleRate={3}", AL.GetErrorString(alError), format, size, sampleRate);
                Duration = -1;
            }
            else
            {
                AL.GetBuffer(openALDataBuffer, ALGetBufferi.Channels, out channels);

                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    Console.WriteLine("Failed to get buffer bits: {0}, format={1}, size={2}, sampleRate={3}", AL.GetErrorString(alError), format, size, sampleRate);
                    Duration = -1;
                }
                else
                {
                    Duration = (float)(size / ((bits / 8) * channels)) / (float)sampleRate;
                }
            }
            //Console.WriteLine("Duration: " + Duration + " / size: " + size + " bits: " + bits + " channels: " + channels + " rate: " + sampleRate);
        }
Exemplo n.º 6
0
        /// <summary>
        /// Playback the audio
        /// </summary>
        /// <param name="unencodedData">Raw byte data</param>
        /// <param name="recordingFormat">OpenAL sound format</param>
        /// <param name="sampleFrequency">Frequency of the samples</param>
        /// <param name="speakerLocation">Speaker location</param>
        public void PlayBackAudio(byte[] unencodedData, ALFormat recordingFormat, int sampleFrequency, SpeakerLocation speakerLocation)
        {
            //Determine if sources needed to be switched
            if (sourcesLeft == 0)
            {
                sourcesLeft = sources.Length;
            }

            //Used to rotate the sources being used.
            sourcesLeft--;

            int buf = AL.GenBuffer();

            AL.BufferData(buf, recordingFormat, unencodedData, unencodedData.Length, sampleFrequency);

            position = SetSpeakerPosition(speakerLocation);
            AL.Source(sources[sourcesLeft], ALSource3f.Position, ref position);

            AL.SourceQueueBuffer(sources[sourcesLeft], buf);
            if (AL.GetSourceState(sources[sourcesLeft]) != ALSourceState.Playing)
            {
                ClearSourcePlayBackBuffers(sources[sourcesLeft]);
                AL.SourcePlay(sources[sourcesLeft]);
            }

            ClearSourcePlayBackBuffers(sources[sourcesLeft]);
        }
Exemplo n.º 7
0
                public ReactiveAudioStreamSourceOpenTk(
                    IDiscardableNode parent,
                    Subject <byte[]> populateSubject,
                    int channels,
                    int bytesPerSample,
                    int frequency,
                    int numBuffers,
                    int bufferSize)
                {
                    this.node_            = parent.CreateChild();
                    this.node_.OnDiscard += _ => this.Destroy_();

                    this.sourceId_ = AL.GenSource();

                    this.bufferIds_ = AL.GenBuffers(numBuffers).ToImmutableArray();
                    populateSubject.Subscribe(pcm => {
                        var readyBufferId = this.readyBuffersIds_.Dequeue();
                        this.PopulateAndQueueBuffer_(readyBufferId, pcm);
                    });

                    this.format_ = PcmHelperOpenTk.GetPcmFormat(channels, bytesPerSample);

                    this.frequency_  = frequency;
                    this.bufferSize_ = bufferSize;

                    this.currentBufferIndex_ = new CircularRangedInt(0, 0, numBuffers);

                    // TODO: Delay this until the observable has returned some value. Stream
                    // should remember stop/play/paused state as expected in the meantime.
                    this.readyBuffersIds_ = new Queue <int>();
                    foreach (var bufferId in this.bufferIds_)
                    {
                        this.readyBuffersIds_.Enqueue(bufferId);
                    }
                }
Exemplo n.º 8
0
        public static int BufferData(short[] data, ALFormat format, int sampleRate)
        {
            int handle = AL.GenBuffer();

            AL.BufferData(handle, format, data, data.Length * sizeof(short), sampleRate);
            return(handle);
        }
Exemplo n.º 9
0
        // Converts an error code to an error string with additional information.
        private string ErrorMessage(string devicename, int frequency, ALFormat bufferformat, int buffersize)
        {
            string   alcerrmsg;
            AlcError alcerrcode = CurrentError;

            switch (alcerrcode)
            {
            case AlcError.OutOfMemory:
                alcerrmsg = alcerrcode.ToString() + ": The specified device is invalid, or can not capture audio.";
                break;

            case AlcError.InvalidValue:
                alcerrmsg = alcerrcode.ToString() + ": One of the parameters has an invalid value.";
                break;

            default:
                alcerrmsg = alcerrcode.ToString();
                break;
            }
            return("The handle returned by Alc.CaptureOpenDevice is null." +
                   "\nAlc Error: " + alcerrmsg +
                   "\nDevice Name: " + devicename +
                   "\nCapture frequency: " + frequency +
                   "\nBuffer format: " + bufferformat +
                   "\nBuffer Size: " + buffersize);
        }
Exemplo n.º 10
0
        public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate, int sampleAlignment = 0)
        {
            if ((format == ALFormat.MonoMSAdpcm || format == ALFormat.StereoMSAdpcm) && !OpenALSoundController.GetInstance.SupportsAdpcm)
            {
                throw new InvalidOperationException("MS-ADPCM is not supported by this OpenAL driver");
            }
            if ((format == ALFormat.MonoIma4 || format == ALFormat.StereoIma4) && !OpenALSoundController.GetInstance.SupportsIma4)
            {
                throw new InvalidOperationException("IMA/ADPCM is not supported by this OpenAL driver");
            }

            openALFormat = format;
            dataSize     = size;
            int unpackedSize = 0;

            if (sampleAlignment > 0)
            {
                AL.Bufferi(openALDataBuffer, ALBufferi.UnpackBlockAlignmentSoft, sampleAlignment);
                ALHelper.CheckError("Failed to fill buffer.");
            }

            AL.BufferData(openALDataBuffer, openALFormat, dataBuffer, size, sampleRate);
            ALHelper.CheckError("Failed to fill buffer.");

            int bits, channels;

            Duration = -1;
            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Bits, out bits);
            ALHelper.CheckError("Failed to get buffer bits");
            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Channels, out channels);
            ALHelper.CheckError("Failed to get buffer channels");
            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Size, out unpackedSize);
            ALHelper.CheckError("Failed to get buffer size");
            Duration = (float)(unpackedSize / ((bits / 8) * channels)) / (float)sampleRate;
        }
Exemplo n.º 11
0
        public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate)
        {
            pcmDataBuffer   = dataBuffer;
            openALFormat    = format;
            dataSize        = size;
            this.sampleRate = sampleRate;
            AL.BufferData(openALDataBuffer, openALFormat, pcmDataBuffer, dataSize, this.sampleRate);

            int bits, channels;

            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Bits, out bits);
            AL.GetBuffer(openALDataBuffer, ALGetBufferi.Channels, out channels);

            ALError alError = AL.GetError();

            if (alError != ALError.NoError)
            {
                Console.WriteLine("Failed to get buffer attributes: ", AL.GetErrorString(alError));
                Duration = -1;
            }
            else
            {
                Duration = (float)(size / ((bits / 8) * channels)) / (float)sampleRate;
            }

            //Console.WriteLine("Duration: " + Duration + " / size: " + size + " bits: " + bits + " channels: " + channels + " rate: " + sampleRate);
        }
Exemplo n.º 12
0
        /// <summary>
        /// Extract the bufferdata from an ogg-file.
        /// </summary>
        /// <param name="file">The file to load the data from.</param>
        /// <returns>A SoundBufferData object containing the data from the specified file.</returns>
        public static SoundBufferData FromOgg(Stream file)
        {
            var buffers = new List <short[]>();

            ALFormat format;
            int      sampleRate;

            using (var vorbis = new VorbisReader(file, true))
            {
                // Save format and samplerate for playback
                format     = vorbis.Channels == 1 ? ALFormat.Mono16 : ALFormat.Stereo16;
                sampleRate = vorbis.SampleRate;

                var buffer = new float[16384];
                int count;

                while ((count = vorbis.ReadSamples(buffer, 0, buffer.Length)) > 0)
                {
                    // Sample value range is -0.99999994f to 0.99999994f
                    // Samples are interleaved (chan0, chan1, chan0, chan1, etc.)

                    // Use the OggStreamer method to cast to the right format
                    var castBuffer = new short[count];
                    SoundBufferData.CastBuffer(buffer, castBuffer, count);
                    buffers.Add(castBuffer);
                }
            }

            return(new SoundBufferData(buffers, format, sampleRate));
        }
Exemplo n.º 13
0
 private static byte[] LoadWave(BinaryReader reader, out ALFormat format, out int size, out int frequency)
 {
   if (new string(reader.ReadChars(4)) != "RIFF")
     throw new NotSupportedException("Specified stream is not a wave file.");
   reader.ReadInt32();
   if (new string(reader.ReadChars(4)) != "WAVE")
     throw new NotSupportedException("Specified stream is not a wave file.");
   for (string str = new string(reader.ReadChars(4)); str != "fmt "; str = new string(reader.ReadChars(4)))
     reader.ReadBytes(reader.ReadInt32());
   int num1 = reader.ReadInt32();
   int num2 = (int) reader.ReadInt16();
   int channels = (int) reader.ReadInt16();
   int num3 = reader.ReadInt32();
   reader.ReadInt32();
   int num4 = (int) reader.ReadInt16();
   int bits = (int) reader.ReadInt16();
   if (num1 > 16)
     reader.ReadBytes(num1 - 16);
   string str1;
   for (str1 = new string(reader.ReadChars(4)); str1.ToLower() != "data"; str1 = new string(reader.ReadChars(4)))
     reader.ReadBytes(reader.ReadInt32());
   if (str1 != "data")
     throw new NotSupportedException("Specified wave file is not supported.");
   int num5 = reader.ReadInt32();
   frequency = num3;
   format = AudioLoader.GetSoundFormat(channels, bits);
   byte[] numArray = reader.ReadBytes((int) reader.BaseStream.Length);
   size = num5;
   if (num2 != 1)
   {
     Console.WriteLine("Wave compression is not supported.");
     size = 0;
   }
   return numArray;
 }
Exemplo n.º 14
0
        private ALOutput.BufferCache GetBuffer(string file)
        {
            ALOutput.BufferCache bufferCache1;
            if (this._cache.TryGetValue(file, out bufferCache1))
            {
                ++bufferCache1.RefCount;
                bufferCache1.LastUse = DateTime.Now;
                Debug.WriteLine("Using " + file + " from cache");
                return(bufferCache1);
            }
            SoundInstance soundInstance = SoundInstance.Create(file, this._source.Open(file));
            int           bid           = AL.GenBuffer();

            float[]  numArray = soundInstance.ReadFully();
            short[]  buffer   = new short[numArray.Length];
            ALFormat format   = soundInstance is SoundInstanceMono ? ALFormat.Mono16 : ALFormat.Stereo16;

            for (int index = 0; index < numArray.Length; ++index)
            {
                buffer[index] = (short)((double)numArray[index] * (double)short.MaxValue);
            }
            AL.BufferData <short>(bid, format, buffer, buffer.Length * 2, soundInstance.WaveFormat.SampleRate);
            ALOutput.BufferCache bufferCache2 = new ALOutput.BufferCache()
            {
                Buffer   = bid,
                File     = file,
                RefCount = 1,
                LastUse  = DateTime.Now
            };
            this._cache.Add(file, bufferCache2);
            return(bufferCache2);
        }
Exemplo n.º 15
0
        // --- loading buffers ---

        /// <summary>Loads the specified sound buffer.</summary>
        /// <param name="buffer">The sound buffer.</param>
        /// <returns>Whether loading the buffer was successful.</returns>
        internal static bool LoadBuffer(SoundBuffer buffer)
        {
            if (buffer.Loaded)
            {
                return(true);
            }
            if (buffer.Ignore)
            {
                return(false);
            }
            OpenBveApi.Sounds.Sound sound;
            if (buffer.Origin.GetSound(out sound))
            {
                if (sound.BitsPerSample == 8 | sound.BitsPerSample == 16)
                {
                    byte[] bytes = GetMonoMix(sound);
                    AL.GenBuffers(1, out buffer.OpenAlBufferName);
                    ALFormat format = sound.BitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
                    AL.BufferData(buffer.OpenAlBufferName, format, bytes, bytes.Length, sound.SampleRate);
                    buffer.Duration = sound.Duration;
                    buffer.Loaded   = true;
                    return(true);
                }
            }
            buffer.Ignore = true;
            return(false);
        }
Exemplo n.º 16
0
        internal static void BufferData(int bid, ALFormat format, short[] data, int size, int freq)
        {
            var handle = GCHandle.Alloc(data, GCHandleType.Pinned);

            alBufferData((uint)bid, (int)format, handle.AddrOfPinnedObject(), size, freq);
            handle.Free();
        }
Exemplo n.º 17
0
        public static void LoadWavExt(
            Stream file,
            out byte[] data,
            out uint chunkSize,
            out ALFormat format,
            out uint sampleRate,
            out uint avgBytesPerSec,
            out ushort bytesPerSample,
            out ushort bitsPerSample)
        {
            using (MemoryStream ms = new MemoryStream()) {
                if (!file.CanRead)
                {
                    throw new NotSupportedException("This stream does not support reading");
                }
                byte[] buffer = new byte[16 * 1024];
                int    nread;
                while ((nread = file.Read(buffer, 0, 16 * 1024)) != 0)
                {
                    ms.Write(buffer, 0, nread);
                }

                LoadWavExt(ms.ToArray(), out data, out chunkSize, out format, out sampleRate, out avgBytesPerSec, out bytesPerSample, out bitsPerSample);
            }
        }
Exemplo n.º 18
0
        void INativeAudioBuffer.LoadData(int sampleRate, IntPtr data, int size, AudioDataLayout dataLayout, AudioDataElementType dataElementType)
        {
            ALFormat format = ALFormat.Mono16;

            if (dataLayout == AudioDataLayout.Mono)
            {
                if (dataElementType == AudioDataElementType.Byte)
                {
                    format = ALFormat.Mono8;
                }
                else if (dataElementType == AudioDataElementType.Short)
                {
                    format = ALFormat.Mono16;
                }
            }
            else if (dataLayout == AudioDataLayout.LeftRight)
            {
                if (dataElementType == AudioDataElementType.Byte)
                {
                    format = ALFormat.Stereo8;
                }
                else if (dataElementType == AudioDataElementType.Short)
                {
                    format = ALFormat.Stereo16;
                }
            }

            AL.BufferData(
                this.handle,
                format,
                data,
                (int)size,
                sampleRate);
        }
        public static int LoadBuffer(string fileName)
        {
            // reserve a Handle for the audio file
            int mBuffer = AL.GenBuffer();

            // Load a .wav file from disk
            int channels, bits_per_sample, sample_rate;

            byte[] sound_data = LoadWave(
                File.Open(fileName, FileMode.Open),
                out channels,
                out bits_per_sample,
                out sample_rate);
            ALFormat sound_format =
                channels == 1 && bits_per_sample == 8 ? ALFormat.Mono8 :
                channels == 1 && bits_per_sample == 16 ? ALFormat.Mono16 :
                channels == 2 && bits_per_sample == 8 ? ALFormat.Stereo8 :
                channels == 2 && bits_per_sample == 16 ? ALFormat.Stereo16 :
                (ALFormat)0; //unknown

            AL.BufferData(mBuffer, sound_format, sound_data, sound_data.Length, sample_rate);
            if (AL.GetError() != ALError.NoError)
            {
                // respond to load errror etc.
                return(0);
            }

            return(mBuffer);
        }
Exemplo n.º 20
0
        public SoundBuffer(int bitrate, int channels, int frequency, byte[] buffer, int?length = null)
        {
            Contract.Requires <ArgumentOutOfRangeException>(bitrate == 8 || bitrate == 16);
            Contract.Requires <ArgumentOutOfRangeException>(channels == 1 || channels == 2);
            Contract.Requires <ArgumentOutOfRangeException>(frequency > 0);
            Contract.Requires <ArgumentException>(buffer.Length > 0);

            Id             = AL.GenBuffer();
            this.Buffer    = buffer;
            this.Bitrate   = bitrate;
            this.Channels  = channels;
            this.Frequency = frequency;

            switch (Channels + Bitrate)
            {
            case 1 + 8: _format = ALFormat.Mono8; break;

            case 1 + 16: _format = ALFormat.Mono16; break;

            case 2 + 8: _format = ALFormat.Stereo8; break;

            case 2 + 16: _format = ALFormat.Stereo16; break;

            default: throw new NotSupportedException($"Sound files must have exactly 1 or 2 channels, and a bitrate of exacty 8 or 16 bits per sample (you have {Channels} channel(s) and {Bitrate} bit(s) per sample).");
            }

            _Audio.ErrorCheck();

            AL.BufferData(Id, _format, buffer, length ?? buffer.Length, Frequency);

            _Audio.ErrorCheck();
        }
Exemplo n.º 21
0
        // --- loading buffers ---

        /// <summary>Loads the specified sound buffer.</summary>
        /// <param name="buffer">The sound buffer.</param>
        /// <returns>Whether loading the buffer was successful.</returns>
        public void LoadBuffer(SoundBuffer buffer)
        {
            if (buffer.Loaded)
            {
                return;
            }
            if (buffer.Ignore)
            {
                return;
            }
            Sound sound;

            if (buffer.Origin.GetSound(out sound))
            {
                if (sound.BitsPerSample == 8 | sound.BitsPerSample == 16)
                {
                    byte[] bytes = sound.GetMonoMix();
                    AL.GenBuffers(1, out buffer.OpenAlBufferName);
                    ALFormat format = sound.BitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
                    AL.BufferData(buffer.OpenAlBufferName, format, bytes, bytes.Length, sound.SampleRate);
                    buffer.Duration = sound.Duration;
                    buffer.Loaded   = true;
                    return;
                }
            }
            buffer.Ignore = true;
        }
Exemplo n.º 22
0
        public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate)
        {
            this.pcmDataBuffer = dataBuffer;
            this.openALFormat  = format;
            this.dataSize      = size;
            this.sampleRate    = sampleRate;
            AL.BufferData <byte>(this.openALDataBuffer, this.openALFormat, this.pcmDataBuffer, this.dataSize, this.sampleRate);
            int num1;

            AL.GetBuffer(this.openALDataBuffer, ALGetBufferi.Bits, out num1);
            int num2;

            AL.GetBuffer(this.openALDataBuffer, ALGetBufferi.Channels, out num2);
            ALError error = AL.GetError();

            if (error != ALError.NoError)
            {
                Console.WriteLine("Failed to get buffer attributes: ", (object)AL.GetErrorString(error));
                this.Duration = -1.0;
            }
            else
            {
                this.Duration = (double)(size / (num1 / 8 * num2)) / (double)sampleRate;
            }
        }
Exemplo n.º 23
0
        private void PlatformInitializeBuffer(byte[] buffer, int bufferSize, ALFormat format, int channels, int sampleRate, int blockAlignment, int bitsPerSample, int loopStart, int loopLength)
        {
            switch (format)
            {
            case ALFormat.Mono8:
            case ALFormat.Mono16:
            case ALFormat.Stereo8:
            case ALFormat.Stereo16:
                PlatformInitializePcm(buffer, 0, bufferSize, bitsPerSample, sampleRate, (AudioChannels)channels, loopStart, loopLength);
                break;

            case ALFormat.MonoMSAdpcm:
            case ALFormat.StereoMSAdpcm:
                PlatformInitializeAdpcm(buffer, 0, bufferSize, sampleRate, (AudioChannels)channels, blockAlignment, loopStart, loopLength);
                break;

            case ALFormat.MonoFloat32:
            case ALFormat.StereoFloat32:
                PlatformInitializeIeeeFloat(buffer, 0, bufferSize, sampleRate, (AudioChannels)channels, loopStart, loopLength);
                break;

            case ALFormat.MonoIma4:
            case ALFormat.StereoIma4:
                PlatformInitializeIma4(buffer, 0, bufferSize, sampleRate, (AudioChannels)channels, blockAlignment, loopStart, loopLength);
                break;

            default:
                throw new NotSupportedException("Unsupported wave format!");
            }
        }
Exemplo n.º 24
0
        private static void LoadOgg(VorbisReader vorbis, out byte[] data, out ALFormat format, out uint sampleRate, out TimeSpan len)
        {
            sampleRate = (uint)vorbis.SampleRate;
            format     = vorbis.Channels == 1 ? ALFormat.Mono16 : ALFormat.Stereo16;
            len        = vorbis.TotalTime;

            float[]     buffer = new float[vorbis.SampleRate / 10 * vorbis.Channels];
            List <byte> bytes  = new List <byte>((int)(vorbis.SampleRate * vorbis.Channels * 2 * len.TotalSeconds));
            int         count  = 0;

            while ((count = vorbis.ReadSamples(buffer, 0, buffer.Length)) > 0)
            {
                for (int i = 0; i < count; i++)
                {
                    int temp = (int)(short.MaxValue * buffer [i]);
                    if (temp > short.MaxValue)
                    {
                        temp = short.MaxValue;
                    }
                    else if (temp < short.MinValue)
                    {
                        temp = short.MinValue;
                    }
                    short tempBytes = (short)temp;
                    byte  byte1     = (byte)((tempBytes >> 8) & 0x00FF);
                    byte  byte2     = (byte)((tempBytes >> 0) & 0x00FF);

                    // Little endian
                    bytes.Add(byte2);
                    bytes.Add(byte1);
                }
            }
            // TODO: Add better implementation so that there's no need for array copying
            data = bytes.ToArray();
        }
Exemplo n.º 25
0
		private static void LoadOgg(VorbisReader vorbis, out byte[] data, out ALFormat format, out uint sampleRate, out TimeSpan len) {
			sampleRate = (uint)vorbis.SampleRate;
			format = vorbis.Channels == 1 ? ALFormat.Mono16 : ALFormat.Stereo16;
			len = vorbis.TotalTime;

			float[] buffer = new float[vorbis.SampleRate / 10 * vorbis.Channels];
			List<byte> bytes = new List<byte>((int)(vorbis.SampleRate * vorbis.Channels * 2 * len.TotalSeconds));
			int count = 0;
			while ((count = vorbis.ReadSamples(buffer, 0, buffer.Length)) > 0) {
				for (int i = 0; i < count; i++) {
					int temp = (int)(short.MaxValue * buffer [i]);
					if (temp > short.MaxValue) {
						temp = short.MaxValue;
					} else if (temp < short.MinValue) {
						temp = short.MinValue;
					}
					short tempBytes = (short)temp;
					byte byte1 = (byte)((tempBytes >> 8) & 0x00FF);
					byte byte2 = (byte)((tempBytes >> 0) & 0x00FF);

					// Little endian
					bytes.Add(byte2);
					bytes.Add(byte1);
				}
			}
			// TODO: Add better implementation so that there's no need for array copying
			data = bytes.ToArray();
		}
Exemplo n.º 26
0
        static void PlayAndDispose(Stream reader, int[] buffers, int source, int sampleRate,
                                   ALFormat format = ALFormat.StereoFloat32Ext)
        {
            var totalBytesRead = ReadBuffersAndEnqueueThem(reader, buffers, source, sampleRate, format);

            AL.SourcePlay(source);

            Trace.WriteLine("Playing...");

            ALSourceState sourceState;

            do
            {
                Thread.Sleep(20);

                AL.GetSource(source, ALGetSourcei.BuffersProcessed, out int buffersProcessed);

                if (buffersProcessed > 0 && reader.Position < reader.Length)
                {
                    var unqueuedBuffers = AL.SourceUnqueueBuffers(source, buffersProcessed);

                    Trace.WriteLine(
                        $"- Unqueued {unqueuedBuffers.Length} buffers for {reader.Length - reader.Position} B left");

                    totalBytesRead += ReadBuffersAndEnqueueThem(reader, unqueuedBuffers, source, sampleRate, format);
                }

                sourceState = AL.GetSourceState(source);
            }while (sourceState == ALSourceState.Playing);

            Trace.WriteLine($"Stop ({totalBytesRead} B read)");

            reader.Dispose();
            reader = null;
        }
Exemplo n.º 27
0
        public override void SetFormat(AudioFormat format)
        {
            dataFormat = GetALFormat(format.Channels, format.BitsPerSample);
            // Don't need to recreate device if it's the same
            if (Format.Equals(format))
            {
                return;
            }
            Format = format;

            DisposeSource();
            uint sourceU = 0;

            fixed(uint *buffers = bufferIDs)
            {
                lock (contextLock) {
                    MakeContextCurrent();
                    AL.alGenSources(1, &sourceU);
                    source = sourceU;
                    CheckError("GenSources");

                    AL.alGenBuffers(NumBuffers, buffers);
                    CheckError("GenBuffers");
                }
            }

            if (volume != 1)
            {
                SetVolume(volume);
            }
        }
Exemplo n.º 28
0
 public AudioBuffer( IAudioDevice audioDevice, AudioInfo audioInfo )
 {
     sourceId = AL.GenSource ();
     this.audioInfo = audioInfo;
     alFormat = ( ( audioInfo.AudioChannel == 2 ) ?
         ( ( audioInfo.BitsPerSample == 16 ) ? ALFormat.Stereo16 : ALFormat.Stereo8 ) :
         ( ( audioInfo.BitsPerSample == 16 ) ? ALFormat.Mono16 : ALFormat.Mono8 ) );
 }
Exemplo n.º 29
0
 public static void LoadWav(
     string file,
     out byte[] data,
     out ALFormat format,
     out uint sampleRate)
 {
     LoadWav(File.ReadAllBytes(file), out data, out format, out sampleRate);
 }
Exemplo n.º 30
0
 /// <summary>
 /// Initializes a new instance of the <see cref="SoundStream"/> class.
 /// </summary>
 /// <param name="handle">A valid OpenAL Source Handle.</param>
 /// <param name="buffer">A sound that contains valid audio sample buffer.</param>
 /// <inheritdoc/>
 internal SoundStream(int handle, Sound buffer)
     : base(handle, buffer)
 {
     format    = buffer.Format;
     streaming = false;
     processed = 0;
     state     = SoundStatus.Stopped;
 }
Exemplo n.º 31
0
 public static byte[] Load(Stream data, out ALFormat format, out int size, out int frequency)
 {
     format    = ALFormat.Mono8;
     size      = 0;
     frequency = 0;
     using (BinaryReader reader = new BinaryReader(data))
         return(AudioLoader.LoadWave(reader, out format, out size, out frequency));
 }
Exemplo n.º 32
0
 public static bool IsStereoFormat(ALFormat format)
 {
     return(format == ALFormat.Stereo8 ||
            format == ALFormat.Stereo16 ||
            format == ALFormat.StereoFloat32 ||
            format == ALFormat.StereoIma4 ||
            format == ALFormat.StereoMSAdpcm);
 }
Exemplo n.º 33
0
        private AudioClip(string name, ALFormat format, byte[] data, int size, int rate) : base(name)
        {
            _handle = AL.GenBuffer();
            AL.BufferData(_handle, format, ref data[0], size, rate);

            _sampleRate = rate;
            _size       = size;
        }
Exemplo n.º 34
0
 public static byte[] Load(Stream data, out ALFormat format, out int size, out int frequency)
 {
   format = ALFormat.Mono8;
   size = 0;
   frequency = 0;
   using (BinaryReader reader = new BinaryReader(data))
     return AudioLoader.LoadWave(reader, out format, out size, out frequency);
 }
Exemplo n.º 35
0
        /// <summary>
        /// Initialize the audio stream parameters.
        /// </summary>
        /// <param name="channelCount">The number of channels of the stream.</param>
        /// <param name="sampleRate">The rate of samples, in samples per second.</param>
        protected void Initialize(int channelCount, int sampleRate)
        {
            format    = AudioDevice.Instance.GetFormat(channelCount);
            streaming = false;
            processed = 0;

            SampleRate   = sampleRate;
            ChannelCount = channelCount;
        }
Exemplo n.º 36
0
 public OpenTKMusic(string filename, SoundDevice device)
     : base(filename, device)
 {
     musicStream = new Mp3Stream(File.OpenRead("Content/" + filename + ".mp3"));
     format = musicStream.Channels == 2 ? ALFormat.Stereo16 : ALFormat.Mono16;
     source = AL.GenSource();
     buffers = AL.GenBuffers(NumberOfBuffers);
     bufferData = new byte[BufferSize];
 }
Exemplo n.º 37
0
 public void Init(int size, ALFormat format, int sampleRate)
 {
     Format     = format;
     SampleRate = sampleRate;
     if (!Resize(size))
     {
         Clear();
     }
 }
Exemplo n.º 38
0
 public static byte[] Load(Stream data, out ALFormat format, out int size, out int frequency)
 {
   byte[] numArray = (byte[]) null;
   format = ALFormat.Mono8;
   size = 0;
   frequency = 0;
   using (BinaryReader reader = new BinaryReader(data))
     numArray = AudioLoader.LoadWave(reader, out format, out size, out frequency);
   return numArray;
 }
Exemplo n.º 39
0
 /// <summary>
 /// Gets the amount of bytes in a sample of the specified format.
 /// </summary>
 public static int GetBytesPerSample(ALFormat Format)
 {
     switch (Format)
     {
         case ALFormat.Stereo16: return 4;
         case ALFormat.Stereo8: return 2;
         case ALFormat.Mono16: return 2;
         case ALFormat.Mono8: return 1;
     }
     throw new NotImplementedException();
 }
Exemplo n.º 40
0
        public WavAudioSource(Stream file, bool streaming = true)
        {
            Streaming = streaming;

            _fileStream = file;
            _reader = new BinaryReader(file);

            if (new string(_reader.ReadChars(4)) != "RIFF")
                throw new InvalidDataException("Specified stream is not a wave file.");

            int chunkSize = _reader.ReadInt32();

            if (new string(_reader.ReadChars(4)) != "WAVE")
                throw new InvalidDataException("Specified stream is not a wave file");
            if (new string(_reader.ReadChars(4)) != "fmt ")
                throw new NotSupportedException("Specified wave format is not supported.");

            _formatChunkSize = _reader.ReadInt32();
            _audioFormat = _reader.ReadInt16();
            if (_audioFormat != 1)
                throw new NotSupportedException("Compressed WAV files are not supported.");

            _numChannels = _reader.ReadInt16();
            _sampleRate = _reader.ReadInt32();
            _byteRate = _reader.ReadInt32();
            _blockAlign = _reader.ReadInt16();
            _bitsPerSample = _reader.ReadInt16();

            if (new string(_reader.ReadChars(4)) != "data")
                throw new InvalidDataException("Specified wave format is not supported.");

            _dataChunkSize = _reader.ReadInt32();

            if(_numChannels == 2)
            {
                if (_bitsPerSample == 8)
                    _alFormat = ALFormat.Stereo8;
                else
                    _alFormat = ALFormat.Stereo16;
            }
            else
            {
                if (_bitsPerSample == 8)
                    _alFormat = ALFormat.Mono8;
                else
                    _alFormat = ALFormat.Mono16;
            }

            _dataStart = (int)_reader.BaseStream.Position;

            if (!Streaming)
                _data = _reader.ReadBytes((int)_reader.BaseStream.Length);
        }
Exemplo n.º 41
0
 public NVorbisDecoder(Stream stream)
 {
     //reset the stream
     stream.Seek (0, SeekOrigin.Begin);
     reader = new VorbisReader (stream, false);
     if (reader.Channels == 1) {
         format = ALFormat.Mono16;
     } else if (reader.Channels == 2) {
         format = ALFormat.Stereo16;
     } else {
         throw new NotSupportedException (string.Format ("{0} channels not supported", reader.Channels));
     }
 }
Exemplo n.º 42
0
        public Audio( IAudioDevice audioDevice, AudioInfo audioInfo )
        {
            this.audioDevice = new WeakReference ( audioDevice );
            this.audioInfo = audioInfo;

            bufferIds = new List<int> ();
            sourceId = AL.GenSource ();
            alFormat = ( ( audioInfo.AudioChannel == AudioChannel.Stereo ) ?
                ( ( audioInfo.BitPerSample == 2 ) ? ALFormat.Stereo16 : ALFormat.Stereo8 ) :
                ( ( audioInfo.BitPerSample == 2 ) ? ALFormat.Mono16 : ALFormat.Mono8 ) );

            ( audioDevice as AudioDevice ).audioList.Add ( this );
        }
Exemplo n.º 43
0
 internal StreamingAudio(AudioDevice device, ALFormat format, int sampleRate)
 {
     bufferFormat = format;
     while (!device.ready)
         ;
     var sid = AL.GenSource ();
     sourceId = (int)sid;
     AudioDevice.CheckALError ();
     bufferIds = AL.GenBuffers (4);
     AudioDevice.CheckALError ();
     this.device = device;
     this.sampleRate = sampleRate;
 }
 public DynamicSoundEffectInstance(int sampleRate, AudioChannels channels)
 {
     this.sampleRate = sampleRate;
     this.channels = channels;
     switch (channels)
     {
         case AudioChannels.Mono:
             this.format = ALFormat.Mono16;
             break;
         case AudioChannels.Stereo:
             this.format = ALFormat.Stereo16;
             break;
         default:
             break;
     }                       
 }
Exemplo n.º 45
0
        public OggAudioSource(Stream file, bool streaming = true)
        {
            Streaming = streaming;
            _fileStream = file;

            _reader = new VorbisReader(file, false);
            _sampleRate = _reader.SampleRate;
            _format = (_reader.Channels == 2 ? ALFormat.Stereo16 : ALFormat.Mono16);

            if(!Streaming)
            {
                _data = new short[_reader.TotalSamples];
                float[] samples = new float[_reader.TotalSamples];
                _reader.ReadSamples(samples, 0, (int)_reader.TotalSamples);
                CastSamples(ref samples, ref _data, samples.Length);
            }
        }
Exemplo n.º 46
0
        public static byte[] Load(Stream data, out ALFormat format, out int size, out int frequency)
        {
            byte[] audioData = null;
            format = ALFormat.Mono8;
            size = 0;
            frequency = 0;

            using (BinaryReader reader = new BinaryReader(data))
            {
                // decide which data type is this

                // for now we'll only support wave files
                audioData = LoadWave(reader, out format, out size, out frequency);
            }

            return audioData;
        }
		public void Init(IWaveProvider waveProvider){
			OpenALSoundOut.context = new AudioContext ();
			sourceID = AL.GenSource (); // source is the sound source, like an ID of the soundcard
			bufferCount = 3; // tripple buffering, just so we get backed up.
			bufferIDs = AL.GenBuffers (bufferCount); // generating four buffers, so they can be played in sequence
			state = 4116; // current execution state, should be 4116 for ALSourceState.Stopped and 4114 for ALSourceState.Playing
			channels = 2; // how many audio channels to allocate. 1 for mono and 2 for stereo
			bitsPerSample = 16; // default bits per sample
			sampleRate = 44100; // default audio rate
			milissecondsPerBuffer = 1000;
			alFormat = OpenALSoundOut.GetSoundFormat (channels, bitsPerSample);
			int sampleSizeInBytes = (bitsPerSample / 8) * channels;
			bufferSize = (int)(sampleSizeInBytes * sampleRate * (milissecondsPerBuffer/1000f));
			Console.WriteLine ("Using buffers of "+(milissecondsPerBuffer/1000f)+" seconds");
			currentBufferIndex = 0;
			buffersRead = 0;
			buffersPlayed = 0;

			this.waveProvider = waveProvider;
		}
Exemplo n.º 48
0
 public void BindDataBuffer(byte[] dataBuffer, ALFormat format, int size, int sampleRate)
 {
   this.pcmDataBuffer = dataBuffer;
   this.openALFormat = format;
   this.dataSize = size;
   this.sampleRate = sampleRate;
   AL.BufferData<byte>(this.openALDataBuffer, this.openALFormat, this.pcmDataBuffer, this.dataSize, this.sampleRate);
   int num1;
   AL.GetBuffer(this.openALDataBuffer, ALGetBufferi.Bits, out num1);
   int num2;
   AL.GetBuffer(this.openALDataBuffer, ALGetBufferi.Channels, out num2);
   ALError error = AL.GetError();
   if (error != ALError.NoError)
   {
     Console.WriteLine("Failed to get buffer attributes: ", (object) AL.GetErrorString(error));
     this.Duration = -1.0;
   }
   else
     this.Duration = (double) (size / (num1 / 8 * num2)) / (double) sampleRate;
 }
Exemplo n.º 49
0
 private static byte[] LoadWave(BinaryReader reader, out ALFormat format, out int size, out int frequency)
 {
   if (new string(reader.ReadChars(4)) != "RIFF")
     throw new NotSupportedException("Specified stream is not a wave file.");
   reader.ReadInt32();
   if (new string(reader.ReadChars(4)) != "WAVE")
     throw new NotSupportedException("Specified stream is not a wave file.");
   for (string str = new string(reader.ReadChars(4)); str != "fmt "; str = new string(reader.ReadChars(4)))
     reader.ReadBytes(reader.ReadInt32());
   int num1 = reader.ReadInt32();
   int num2 = (int) reader.ReadUInt16();
   int channels = (int) reader.ReadUInt16();
   int num3 = (int) reader.ReadUInt32();
   int num4 = (int) reader.ReadUInt32();
   int num5 = (int) reader.ReadUInt16();
   int bits = (int) reader.ReadUInt16();
   if (num1 > 16)
     reader.ReadBytes(num1 - 16);
   string str1;
   for (str1 = new string(reader.ReadChars(4)); str1.ToLower(CultureInfo.InvariantCulture) != "data"; str1 = new string(reader.ReadChars(4)))
     reader.ReadBytes(reader.ReadInt32());
   if (str1 != "data")
     throw new NotSupportedException("Specified wave file is not supported.");
   int num6 = reader.ReadInt32();
   frequency = num3;
   format = AudioLoader.GetSoundFormat(channels, bits, num2 == 2);
   byte[] numArray;
   if (reader.BaseStream.Length <= 11072L)
   {
     numArray = new byte[0];
     size = 0;
   }
   else
   {
     numArray = reader.ReadBytes((int) reader.BaseStream.Length);
     size = num6 / num5 * num5;
   }
   return numArray;
 }
Exemplo n.º 50
0
        public SoundEffect(byte[] buffer, int sampleRate, AudioChannels channels)
        {
#if DIRECTX            
            Initialize(new WaveFormat(sampleRate, (int)channels), buffer, 0, buffer.Length, 0, buffer.Length);
#elif (WINDOWS && OPENGL) || LINUX
            _data = buffer;
            Size = buffer.Length;
            Format = (channels == AudioChannels.Stereo) ? ALFormat.Stereo16 : ALFormat.Mono16;
            Rate = sampleRate;
#else
            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            _name = "";

            using (var mStream = new MemoryStream(44+buffer.Length))
            using (var writer = new BinaryWriter(mStream))
            {
                writer.Write("RIFF".ToCharArray()); //chunk id
                writer.Write((int)(36 + buffer.Length)); //chunk size
                writer.Write("WAVE".ToCharArray()); //RIFF type

                writer.Write("fmt ".ToCharArray()); //chunk id
                writer.Write((int)16); //format header size
                writer.Write((short)1); //format (PCM)
                writer.Write((short)channels);
                writer.Write((int)sampleRate);
                short blockAlign = (short)((bitsPerSample / 8) * (int)channels);
                writer.Write((int)(sampleRate * blockAlign)); //byte rate
                writer.Write((short)blockAlign);
                writer.Write((short)bitsPerSample);

                writer.Write("data".ToCharArray()); //chunk id
                writer.Write((int)buffer.Length); //data size   MonoGame.Framework.Windows8.DLL!Microsoft.Xna.Framework.Audio.Sound.Sound(byte[] audiodata, float volume, bool looping) Line 199    C#

                writer.Write(buffer);

                _data = mStream.ToArray();
            }

            _sound = new Sound(_data, 1.0f, false);
#endif
        }
Exemplo n.º 51
0
        private void PlatformInitialize(byte[] buffer, int sampleRate, AudioChannels channels)
        {
			Rate = (float)sampleRate;
            Size = (int)buffer.Length;

#if OPENAL && !(MONOMAC || IOS)

            _data = buffer;
            Format = (channels == AudioChannels.Stereo) ? ALFormat.Stereo16 : ALFormat.Mono16;
            return;

#endif

#if MONOMAC || IOS

            //buffer should contain 16-bit PCM wave data
            short bitsPerSample = 16;

            if ((int)channels <= 1)
                Format = bitsPerSample == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            else
                Format = bitsPerSample == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;

            _name = "";
            _data = buffer;

#endif
        }
Exemplo n.º 52
0
 public void BufferData(byte[] data,ALFormat format,int freq)
 {
     AL.BufferData(id, format, data, data.Length, freq);
 }
Exemplo n.º 53
0
        private void PlatformLoadAudioStream(Stream s)
        {
#if OPENAL && !(MONOMAC || IOS)
            
            ALFormat format;
            int size;
            int freq;

            var stream = s;
#if ANDROID
            var needsDispose = false;
            try
            {
                // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy
                // into a temporary MemoryStream.
                if (!s.CanSeek)
                {
                    needsDispose = true;
                    stream = new MemoryStream();
                    s.CopyTo(stream);
                    stream.Position = 0;
                }
#endif
                _data = AudioLoader.Load(stream, out format, out size, out freq);
#if ANDROID
            }
            finally
            {
                if (needsDispose)
                    stream.Dispose();
            }
#endif
            Format = format;
            Size = size;
            Rate = freq;

#endif

#if MONOMAC || IOS

            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE))
            {
                afs.ParseBytes (audiodata, false);
                Size = (int)afs.DataByteCount;

                _data = new byte[afs.DataByteCount];
                Array.Copy (audiodata, afs.DataOffset, _data, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                    using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                    {
                        channelsPerFrame = (int)player.NumberOfChannels;
                        bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

						Rate = (float)player.SoundSetting.SampleRate;
                        _duration = TimeSpan.FromSeconds(player.Duration);
                    }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    _duration = TimeSpan.FromSeconds(duration);
                }

                if (channelsPerFrame == 1)
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                else
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
            }

#endif
        }
Exemplo n.º 54
0
		public static byte[] LoadFromFile (string filename, out int dataBufferSize, out ALFormat alFormat, out double sampleRate, out double duration)
		{

			return OpenALSupport.GetOpenALAudioDataAll (NSUrl.FromFilename (filename),
			                                    out dataBufferSize, out alFormat, out sampleRate, out duration);
		}
Exemplo n.º 55
0
		public static bool GetDataFromExtAudioFile (ExtAudioFile ext, AudioStreamBasicDescription outputFormat, int maxBufferSize,
		                                       byte[] dataBuffer, out int dataBufferSize, out ALFormat format, out double sampleRate)
		{
			uint errorStatus = 0;
			uint bufferSizeInFrames = 0;
			dataBufferSize = 0;
			format = ALFormat.Mono16;
			sampleRate = 0;
			/* Compute how many frames will fit into our max buffer size */
			bufferSizeInFrames = (uint)(maxBufferSize / outputFormat.BytesPerFrame);

			if (dataBuffer != null) {
				var audioBufferList = new AudioBuffers(maxBufferSize);

				// This a hack so if there is a problem speak to kjpou1 -Kenneth
				// the cleanest way is to copy the buffer to the pointer already allocated
				// but what we are going to do is replace the pointer with our own and restore it later
				//
				GCHandle meBePinned = GCHandle.Alloc (dataBuffer, GCHandleType.Pinned);
				IntPtr meBePointer = meBePinned.AddrOfPinnedObject ();

				audioBufferList.SetData (0, meBePointer);

				try {
					// Read the data into an AudioBufferList
					// errorStatus here returns back the amount of information read
					ExtAudioFileError extAudioFileError = ExtAudioFileError.OK;
					errorStatus = ext.Read (bufferSizeInFrames, audioBufferList, out extAudioFileError);
					if (errorStatus >= 0) {
						/* Success */
						/* Note: 0 == bufferSizeInFrames is a legitimate value meaning we are EOF. */

						/* ExtAudioFile.Read returns the number of frames actually read.
						 * Need to convert back to bytes.
						 */
						dataBufferSize = (int)bufferSizeInFrames * outputFormat.BytesPerFrame;

						// Now we set our format
						format = outputFormat.ChannelsPerFrame > 1 ? ALFormat.Stereo16 : ALFormat.Mono16;

						sampleRate = outputFormat.SampleRate;
					} else {
#if DEBUG						
						Console.WriteLine ("ExtAudioFile.Read failed, Error = " + errorStatus);
#endif
						return false;
					}
				} catch (Exception exc) {
#if DEBUG
					Console.WriteLine ("ExtAudioFile.Read failed: " + exc.Message);
#endif
					return false;
				} finally {
					// Don't forget to free our dataBuffer memory pointer that was pinned above
					meBePinned.Free ();
					// and restore what was allocated to beginwith
					audioBufferList.SetData (0, IntPtr.Zero);
				}


			}
			return true;
		}
Exemplo n.º 56
0
		/**
		 * Returns a byte buffer containing all the pcm data.
		 */
		public static byte[] GetOpenALAudioDataAll (NSUrl file_url, out int dataBufferSize, out ALFormat alFormat, out double sampleRate, out double duration)
		{

			long fileLengthInFrames = 0;
			AudioStreamBasicDescription outputFormat;
			int maxBufferSize;
			byte[] pcmData;
			dataBufferSize = 0;
			alFormat = 0;
			sampleRate = 0;
			duration = 0;

			ExtAudioFile extFile;

			try {
				extFile = GetExtAudioFile (file_url, out outputFormat);
			} catch (Exception extExc) {
#if DEBUG				
				Console.WriteLine ("ExtAudioFile.OpenUrl failed, Error : " + extExc.Message);
#endif
				return null;
			}

			/* Get the total frame count */
			try {
				fileLengthInFrames = extFile.FileLengthFrames;
			} catch (Exception exc) {
#if DEBUG				
				Console.WriteLine ("ExtAudioFile.FileLengthFranes failed, Error : " + exc.Message);
#endif
				return null;
			}

			/* Compute the number of bytes needed to hold all the data in the file. */
			maxBufferSize = (int)(fileLengthInFrames * outputFormat.BytesPerFrame);
			/* Allocate memory to hold all the decoded PCM data. */
			pcmData = new byte[maxBufferSize];

			bool gotData = GetDataFromExtAudioFile (extFile, outputFormat, maxBufferSize, pcmData,
			                        out dataBufferSize, out alFormat, out sampleRate);

			if (!gotData) {
				pcmData = null;
			}

			duration = (dataBufferSize / ((outputFormat.BitsPerChannel / 8) * outputFormat.ChannelsPerFrame)) / outputFormat.SampleRate;

			// we probably should make sure the buffer sizes are in accordance.
			//	assert(maxBufferSize == dataBufferSize);

			// We do not need the ExtAudioFile so we will set it to null
			extFile = null;
			return pcmData;

		}
Exemplo n.º 57
0
 /// <summary>
 /// Preserves the given data buffer by reference and binds its contents to the OALSoundBuffer
 /// that is created in the InitializeSound method.
 /// </summary>
 /// <param name="data">The sound data buffer</param>
 /// <param name="format">The sound buffer data format, e.g. Mono, Mono16 bit, Stereo, etc.</param>
 /// <param name="size">The size of the data buffer</param>
 /// <param name="rate">The sampling rate of the sound effect, e.g. 44 khz, 22 khz.</param>
 protected void BindDataBuffer(byte[] data, ALFormat format, int size, int rate)
 {
     soundBuffer.BindDataBuffer(data, format, size, rate);
 }
Exemplo n.º 58
0
		public static bool GetDataFromExtAudioFile (ExtAudioFile ext, AudioStreamBasicDescription outputFormat, int maxBufferSize,
		                                       byte[] dataBuffer, out int dataBufferSize, out ALFormat format, out double sampleRate)
		{
			int errorStatus = 0;
			int bufferSizeInFrames = 0;
			dataBufferSize = 0;
			format = ALFormat.Mono16;
			sampleRate = 0;
			/* Compute how many frames will fit into our max buffer size */
			bufferSizeInFrames = maxBufferSize / outputFormat.BytesPerFrame;

			if (dataBuffer != null) {
				MutableAudioBufferList audioBufferList = new MutableAudioBufferList (1, maxBufferSize);

				audioBufferList.Buffers [0].DataByteSize = maxBufferSize;
				audioBufferList.Buffers [0].NumberChannels = outputFormat.ChannelsPerFrame;



				// This a hack so if there is a problem speak to kjpou1 -Kenneth
				// the cleanest way is to copy the buffer to the pointer already allocated
				// but what we are going to do is replace the pointer with our own and restore it later
				//
				GCHandle meBePinned = GCHandle.Alloc (dataBuffer, GCHandleType.Pinned);
				IntPtr meBePointer = meBePinned.AddrOfPinnedObject ();

				// Let's not use copy for right now while we test this.  For very large files this
				//  might show some stutter in the sound loading
				//Marshal.Copy(dataBuffer, 0, audioBufferList.Buffers[0].Data, maxBufferSize);
				IntPtr savedDataPtr = audioBufferList.Buffers [0].Data;
				audioBufferList.Buffers [0].Data = meBePointer;


				try {
					// Read the data into an AudioBufferList
					// errorStatus here returns back the amount of information read
					errorStatus = ext.Read (bufferSizeInFrames, audioBufferList);
					if (errorStatus >= 0) {
						/* Success */
						/* Note: 0 == bufferSizeInFrames is a legitimate value meaning we are EOF. */

						/* ExtAudioFile.Read returns the number of frames actually read.
						 * Need to convert back to bytes.
						 */
						dataBufferSize = bufferSizeInFrames * outputFormat.BytesPerFrame;

						// Now we set our format
						format = outputFormat.ChannelsPerFrame > 1 ? ALFormat.Stereo16 : ALFormat.Mono16;

						sampleRate = outputFormat.SampleRate;
					} else {
#if DEBUG						
						Console.WriteLine ("ExtAudioFile.Read failed, Error = " + errorStatus);
#endif
						return false;
					}
				} catch (Exception exc) {
#if DEBUG
					Console.WriteLine ("ExtAudioFile.Read failed: " + exc.Message);
#endif
					return false;
				} finally {
					// Don't forget to free our dataBuffer memory pointer that was pinned above
					meBePinned.Free ();
					// and restore what was allocated to beginwith
					audioBufferList.Buffers[0].Data = savedDataPtr;
				}


			}
			return true;
		}
Exemplo n.º 59
0
        private void Initialize(byte[] data, ALFormat format, int size, int frequency, 
		                        float volume, bool looping)
        {
            InitilizeSoundServices();

            bufferID = AL.GenBuffer();
            sourceID = AL.GenSource();

            try
            {
                // loads sound into buffer
                AL.BufferData(bufferID, format, data, size, frequency);

                // binds buffer to source
                AL.Source(sourceID, ALSourcei.Buffer, bufferID);
            }
            catch(Exception ex)
            {
                throw ex;
            }

            Volume = volume;
            this.looping = looping;
        }
Exemplo n.º 60
0
        private TagLib.File m_TagLibFile; // TagLibSharp file object

        #endregion Fields

        #region Constructors

        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="Filename">
        /// A <see cref="System.String"/> containing the path to the Ogg Vorbis file this instance represents
        /// </param>
        public OggFile(string Filename)
        {
            // Check that the file exists
            if (!(System.IO.File.Exists(Filename))) { throw new OggFileReadException("File not found", Filename); }
            // Load the relevant objects
            m_Filename = Filename;
            try
            {
                m_CSVorbisFile = new VorbisFile(m_Filename);
            }
            catch (Exception ex)
            {
                throw new OggFileReadException("Unable to open file for data reading\n" + ex.Message, Filename);
            }
            try
            {
                m_TagLibFile = TagLib.File.Create(m_Filename);
            }
            catch (TagLib.UnsupportedFormatException ex)
            {
                throw new OggFileReadException("Unsupported format (not an ogg?)\n" + ex.Message, Filename);
            }
            catch (TagLib.CorruptFileException ex)
            {
                throw new OggFileCorruptException(ex.Message, Filename, "Tags");
            }

            // Populate some other info shizzle and do a little bit of sanity checking
            m_Streams = m_CSVorbisFile.streams();
            if (m_Streams<=0) { throw new OggFileReadException("File doesn't contain any logical bitstreams", Filename); }
            // Assuming <0 is for whole file and >=0 is for specific logical bitstreams
            m_Bitrate = m_CSVorbisFile.bitrate(-1);
            m_LengthTime = (int)m_CSVorbisFile.time_total(-1);
            // Figure out the ALFormat of the stream
            m_Info = m_CSVorbisFile.getInfo();	// Get the info of the first stream, assuming all streams are the same? Dunno if this is safe tbh
            if (m_Info[0] == null) { throw new OggFileReadException("Unable to determine Format{FileInfo.Channels} for first bitstream", Filename); }
            if (m_TagLibFile.Properties.AudioBitrate==16) {
                m_Format = (m_Info[0].channels)==1 ? ALFormat.Mono16 : ALFormat.Stereo16; // This looks like a fudge, but I've seen it a couple of times (what about the other formats I wonder?)
            }
            else
            {
                m_Format = (m_Info[0].channels)==1 ? ALFormat.Mono8 : ALFormat.Stereo8;
            }

            // A grab our first instance of the file so we're ready to play
            m_CSVorbisFileInstance = m_CSVorbisFile.makeInstance();
        }