/// <summary> /// Loads the audio stream from the given byte array. If the AudioFileStream does not return an Ok status /// then a ContentLoadException is thrown. /// </summary> /// <param name="audiodata">The full byte array of the audio stream.</param> void LoadAudioStream(byte[] audiodata) { AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE); //long pac = afs.DataPacketCount; afs.ParseBytes(audiodata, false); // AudioFileStreamStatus status AudioStreamBasicDescription asbd = afs.StreamBasicDescription; Rate = (float)asbd.SampleRate; Size = (int)afs.DataByteCount; if (asbd.ChannelsPerFrame == 1) { Format = asbd.BitsPerChannel == 8 ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = asbd.BitsPerChannel == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16; } byte [] d = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, d, 0, afs.DataByteCount); _data = d; var _dblDuration = (Size / ((asbd.BitsPerChannel / 8) * ((asbd.ChannelsPerFrame == 0) ? 1 : asbd.ChannelsPerFrame))) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(_dblDuration); afs.Close(); //if(status != AudioFileStreamStatus.Ok) { // throw new Content.ContentLoadException("Could not load audio data. The status code was " + status); //} }
public override void ReceivedData(NSUrlConnection connection, NSData data) { if (audioFileStream.ParseBytes((int)data.Length, data.Bytes, false) != AudioFileStreamStatus.Ok) { throw new ApplicationException(); } }
/// <summary> /// Main methode to kick off the streaming, just send the bytes to this method /// </summary> public void ParseBytes(byte[] buffer, int count, bool discontinuity, bool lastPacket) { if (fileStream != null) { this.lastPacket = lastPacket; fileStream.ParseBytes(buffer, 0, count, discontinuity); } }
void LoadAudioStream(byte[] audiodata) { AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE); //long pac = afs.DataPacketCount; afs.PacketDecoded += HandlePacketDecoded; afs.ParseBytes(audiodata, false); afs.Close(); }
/// <summary> /// Main methode to kick off the streaming, just send the bytes to this method /// </summary> public void ParseBytes(byte[] buffer, int count, bool discontinuity, bool lastPacket) { this._lastPacket = lastPacket; _audioFileStream.ParseBytes(buffer, 0, count, discontinuity); }
private void PlatformLoadAudioStream(Stream s) { #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; #if ANDROID var needsDispose = false; try { // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy // into a temporary MemoryStream. if (!s.CanSeek) { needsDispose = true; stream = new MemoryStream(); s.CopyTo(stream); stream.Position = 0; } #endif _data = AudioLoader.Load(stream, out format, out size, out freq); #if ANDROID } finally { if (needsDispose) stream.Dispose(); } #endif Format = format; Size = size; Rate = freq; #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE)) { afs.ParseBytes (audiodata, false); Size = (int)afs.DataByteCount; _data = new byte[afs.DataByteCount]; Array.Copy (audiodata, afs.DataOffset, _data, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; _duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(duration); } if (channelsPerFrame == 1) Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; else Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } #endif }
public void Parse(byte[] bytes) { afs.ParseBytes(bytes, 0, bytes.Length, false); }
private void PlatformLoadAudioStream(Stream s) { #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; #if ANDROID var needsDispose = false; try { // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy // into a temporary MemoryStream. if (!s.CanSeek) { needsDispose = true; stream = new MemoryStream(); s.CopyTo(stream); stream.Position = 0; } #endif _data = AudioLoader.Load(stream, out format, out size, out freq); #if ANDROID } finally { if (needsDispose) { stream.Dispose(); } } #endif Format = format; Size = size; Rate = freq; #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; _data = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, _data, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; _duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(duration); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif }
/// <summary> /// Loads the audio stream from the given byte array. If the AudioFileStream does not return an Ok status /// then a ContentLoadException is thrown. /// </summary> /// <param name="audiodata">The full byte array of the audio stream.</param> void LoadAudioStream (byte[] audiodata) { AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE); //long pac = afs.DataPacketCount; AudioFileStreamStatus status = afs.ParseBytes (audiodata, false); AudioStreamBasicDescription asbd = afs.StreamBasicDescription; Rate = (float)asbd.SampleRate; Size = (int)afs.DataByteCount; if (asbd.ChannelsPerFrame == 1) Format = asbd.BitsPerChannel == 8 ? ALFormat.Mono8 : ALFormat.Mono16; else Format = asbd.BitsPerChannel == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16; _data = audiodata; var _dblDuration = (Size / ((asbd.BitsPerChannel / 8) * asbd.ChannelsPerFrame == 0 ? 1 : asbd.ChannelsPerFrame)) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(_dblDuration); afs.Close (); //if(status != AudioFileStreamStatus.Ok) { // throw new Content.ContentLoadException("Could not load audio data. The status code was " + status); //} }
private void PlatformLoadAudioStream(Stream s, out TimeSpan duration) { byte[] buffer; #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; buffer = AudioLoader.Load(stream, out format, out size, out freq); Format = format; Size = size; Rate = freq; duration = TimeSpan.FromSeconds((float)size / freq); #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; buffer = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; duration = TimeSpan.FromSeconds(durationSec); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif // bind buffer SoundBuffer = new OALSoundBuffer(); SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate); }
private void PlatformLoadAudioStream(Stream s, out TimeSpan duration) { byte[] buffer; #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; buffer = AudioLoader.Load(stream, out format, out size, out freq); Format = format; Size = size; Rate = freq; var bytesPerSecond = freq; if (format == ALFormat.Mono16 || format == ALFormat.Stereo8) { bytesPerSecond *= 2; } else if (format == ALFormat.Stereo16) { bytesPerSecond *= 4; } duration = TimeSpan.FromSeconds((float)size / bytesPerSecond); #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; buffer = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; Rate = (float)asbd.SampleRate; double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; duration = TimeSpan.FromSeconds(durationSec); if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif // bind buffer SoundBuffer = new OALSoundBuffer(); SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate); }
/// <summary> /// Main methode to kick off the streaming, just send the bytes to this method /// </summary> public void ParseBytes(byte [] buffer, int count, bool discontinuity) { fileStream.ParseBytes(buffer, 0, count, discontinuity); }
void LoadAudioStream (byte[] audiodata) { AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE); //long pac = afs.DataPacketCount; afs.PacketDecoded += HandlePacketDecoded; afs.ParseBytes (audiodata, false); afs.Close (); }
public override void DidReceiveData(NSUrlSession session, NSUrlSessionDataTask dataTask, NSData data) { audioFileStream.ParseBytes((int)data.Length, data.Bytes, false); }