public Sound(byte[] audiodata, float volume, bool looping) { #if !PORTABLE ALFormat format; int size; int freq; byte[] data; Stream s; try { s = new MemoryStream(audiodata); } catch (IOException e) { throw new Content.ContentLoadException("Could not load audio data", e); } data = AudioLoader.Load(s, out format, out size, out freq); s.Close(); Initialize(data, format, size, freq, volume, looping); #endif }
private byte[] LoadAudioStream(Stream s, float volume, bool looping) { ALFormat format; int size; int frequency; byte[] numArray = AudioLoader.Load(s, out format, out size, out frequency); this.Format = format; this.Size = size; this.Rate = frequency; return(numArray); }
byte[] LoadAudioStream(Stream s, float volume, bool looping) { ALFormat format; int size; int freq; byte[] data; data = AudioLoader.Load(s, out format, out size, out freq); Format = format; Size = size; Rate = freq; return(data); }
byte[] LoadAudioStream(Stream s, float volume, bool looping) { ALFormat format; int size; int freq; byte[] data; data = AudioLoader.Load(s, out format, out size, out freq); //s.Close(); Format = format; Size = size; Rate = freq; return(data); //Initialize(data, format, size, freq, volume, looping); }
private void PlatformLoadAudioStream(Stream stream, out TimeSpan duration) { byte[] buffer; ALFormat format; int freq; int channels; int blockAlignment; int bitsPerSample; int samplesPerBlock; int sampleCount; buffer = AudioLoader.Load(stream, out format, out freq, out channels, out blockAlignment, out bitsPerSample, out samplesPerBlock, out sampleCount); duration = TimeSpan.FromSeconds((float)sampleCount / (float)freq); PlatformInitializeBuffer(buffer, buffer.Length, format, channels, freq, blockAlignment, bitsPerSample, 0, 0); }
public Sound(byte[] audiodata, float volume, bool looping) { Stream data1; try { data1 = (Stream) new MemoryStream(audiodata); } catch (IOException ex) { throw new ContentLoadException("Could not load audio data", (Exception)ex); } ALFormat format; int size; int frequency; byte[] data2 = AudioLoader.Load(data1, out format, out size, out frequency); data1.Close(); this.Initialize(data2, format, size, frequency, volume, looping); }
public Sound(string filename, float volume, bool looping) { ALFormat format; int size; int freq; byte[] data; Stream s; try { s = File.OpenRead(filename); } catch (IOException e) { throw new Content.ContentLoadException("Could not load audio data", e); } data = AudioLoader.Load(s, out format, out size, out freq); s.Close(); Initialize(data, format, size, freq, volume, looping); }
private void PlatformLoadAudioStream(Stream s) { #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; #if ANDROID var needsDispose = false; try { // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy // into a temporary MemoryStream. if (!s.CanSeek) { needsDispose = true; stream = new MemoryStream(); s.CopyTo(stream); stream.Position = 0; } #endif _data = AudioLoader.Load(stream, out format, out size, out freq); #if ANDROID } finally { if (needsDispose) { stream.Dispose(); } } #endif Format = format; Size = size; Rate = freq; #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; _data = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, _data, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; _duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(duration); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif }
private void PlatformLoadAudioStream(Stream s, out TimeSpan duration) { byte[] buffer; #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; buffer = AudioLoader.Load(stream, out format, out size, out freq); Format = format; Size = size; Rate = freq; duration = TimeSpan.FromSeconds((float)size / freq); #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; buffer = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; duration = TimeSpan.FromSeconds(durationSec); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif // bind buffer SoundBuffer = new OALSoundBuffer(); SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate); }
private void PlatformLoadAudioStream(Stream s, out TimeSpan duration) { byte[] buffer; #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; buffer = AudioLoader.Load(stream, out format, out size, out freq); Format = format; Size = size; Rate = freq; var bytesPerSecond = freq; if (format == ALFormat.Mono16 || format == ALFormat.Stereo8) { bytesPerSecond *= 2; } else if (format == ALFormat.Stereo16) { bytesPerSecond *= 4; } duration = TimeSpan.FromSeconds((float)size / bytesPerSecond); #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; buffer = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; Rate = (float)asbd.SampleRate; double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; duration = TimeSpan.FromSeconds(durationSec); if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif // bind buffer SoundBuffer = new OALSoundBuffer(); SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate); }