示例#1
0
        public Sound(string filename, float volume, bool looping)
        {
            ALFormat format;
            int      size;
            int      freq;

            byte[] data;
            Stream s;

            try
            {
                s = File.OpenRead(filename);
            }
            catch (IOException e)
            {
                throw new Content.ContentLoadException("Could not load audio data", e);
            }

            data = AudioLoader.Load(s, out format, out size, out freq);

            s.Close();

            Initialize(data, format, size, freq, volume, looping);
        }
示例#2
0
        private void PlatformLoadAudioStream(Stream s)
        {
#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;
#if ANDROID
            var needsDispose = false;
            try
            {
                // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy
                // into a temporary MemoryStream.
                if (!s.CanSeek)
                {
                    needsDispose = true;
                    stream       = new MemoryStream();
                    s.CopyTo(stream);
                    stream.Position = 0;
                }
#endif
            _data = AudioLoader.Load(stream, out format, out size, out freq);
#if ANDROID
        }
        finally
        {
            if (needsDispose)
            {
                stream.Dispose();
            }
        }
#endif
            Format = format;
            Size   = size;
            Rate   = freq;
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                _data = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, _data, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                        using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                        {
                            channelsPerFrame = (int)player.NumberOfChannels;
                            bitsPerChannel   = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

                            Rate      = (float)player.SoundSetting.SampleRate;
                            _duration = TimeSpan.FromSeconds(player.Duration);
                        }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    _duration = TimeSpan.FromSeconds(duration);
                }

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
        }
示例#3
0
        private void PlatformLoadAudioStream(Stream s, out TimeSpan duration)
        {
            byte[] buffer;

#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;

            buffer = AudioLoader.Load(stream, out format, out size, out freq);

            Format = format;
            Size   = size;
            Rate   = freq;

            duration = TimeSpan.FromSeconds((float)size / freq);
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                buffer = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                        using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                        {
                            channelsPerFrame = (int)player.NumberOfChannels;
                            bitsPerChannel   = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

                            Rate     = (float)player.SoundSetting.SampleRate;
                            duration = TimeSpan.FromSeconds(player.Duration);
                        }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    duration = TimeSpan.FromSeconds(durationSec);
                }

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
            // bind buffer
            SoundBuffer = new OALSoundBuffer();
            SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate);
        }
示例#4
0
        private void PlatformLoadAudioStream(Stream s, out TimeSpan duration)
        {
            byte[] buffer;

#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;

            buffer = AudioLoader.Load(stream, out format, out size, out freq);

            Format = format;
            Size   = size;
            Rate   = freq;

            var bytesPerSecond = freq;
            if (format == ALFormat.Mono16 || format == ALFormat.Stereo8)
            {
                bytesPerSecond *= 2;
            }
            else if (format == ALFormat.Stereo16)
            {
                bytesPerSecond *= 4;
            }

            duration = TimeSpan.FromSeconds((float)size / bytesPerSecond);
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                buffer = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                Rate = (float)asbd.SampleRate;
                double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                duration = TimeSpan.FromSeconds(durationSec);

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
            // bind buffer
            SoundBuffer = new OALSoundBuffer();
            SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate);
        }