protected void Init(Stream Content, bool Loop, double Volume) { _player = AVAudioPlayer.FromData(NSData.FromStream(Content)); _player.NumberOfLoops = Loop? 0: -1; _player.Volume = System.Convert.ToSingle(Volume); //_player.FinishedPlaying += (object sender, AVStatusEventArgs e) => { _player = null; }; }
public void Play(byte[] data) { AVAudioPlayer player = AVAudioPlayer.FromData(NSData.FromArray(data)); player.PrepareToPlay(); player.Play(); }
public void PlaySound(int samplingRate, byte[] pcmData) { int numSamples = pcmData.Length / (bitsPerSample / 8); MemoryStream memoryStream = new MemoryStream(); BinaryWriter binaryWriter = new BinaryWriter(memoryStream, Encoding.ASCII); binaryWriter.Write(new char[] { 'R', 'I', 'F', 'F' }); binaryWriter.Write(36 + sizeof(short) * numSamples); binaryWriter.Write(new char[] { 'W', 'A', 'V', 'E' }); binaryWriter.Write(new char[] { 'f', 'm', 't', ' ' }); binaryWriter.Write(16); binaryWriter.Write((short)1); binaryWriter.Write((short)numChannels); binaryWriter.Write(samplingRate); binaryWriter.Write(samplingRate * numChannels * bitsPerSample / 8); binaryWriter.Write((short)(numChannels * bitsPerSample / 8)); binaryWriter.Write((short)bitsPerSample); binaryWriter.Write(new char[] { 'd', 'a', 't', 'a' }); binaryWriter.Write(numSamples * numChannels * bitsPerSample / 8); binaryWriter.Write(pcmData, 0, pcmData.Length); memoryStream.Seek(0, SeekOrigin.Begin); NSData data = NSData.FromStream(memoryStream); AVAudioPlayer audioPlayer = AVAudioPlayer.FromData(data); audioPlayer.Play(); }
protected void Init(Stream Content, bool Loop, float Volume) { _player = AVAudioPlayer.FromData(NSData.FromStream(Content)); _player.NumberOfLoops = Loop ? 0 : -1; _player.Volume = Volume; //_player.FinishedPlaying += (object sender, AVStatusEventArgs e) => { _player = null; }; }
public void PlaySound(int samplingRate, byte[] pcmData) { int numSamples = pcmData.Length / (bitsPerSample / 8); MemoryStream memoryStream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(memoryStream, Encoding.ASCII); // Construct WAVE header. writer.Write(new char[] { 'R', 'I', 'F', 'F' }); writer.Write(36 + sizeof(short) * numSamples); writer.Write(new char[] { 'W', 'A', 'V', 'E' }); writer.Write(new char[] { 'f', 'm', 't', ' ' }); // format chunk writer.Write(16); // PCM chunk size writer.Write((short)1); // PCM format flag writer.Write((short)numChannels); writer.Write(samplingRate); writer.Write(samplingRate * numChannels * bitsPerSample / 8); // byte rate writer.Write((short)(numChannels * bitsPerSample / 8)); // block align writer.Write((short)bitsPerSample); writer.Write(new char[] { 'd', 'a', 't', 'a' }); // data chunk writer.Write(numSamples * numChannels * bitsPerSample / 8); // Write data as well. writer.Write(pcmData, 0, pcmData.Length); memoryStream.Seek(0, SeekOrigin.Begin); NSData data = NSData.FromStream(memoryStream); AVAudioPlayer audioPlayer = AVAudioPlayer.FromData(data); audioPlayer.Play(); }
public void Speak(string text, string lang) { HttpWebRequest request; HttpWebResponse response = null; try { string uri = String.Format( "http://api.microsofttranslator.com/v2/Http.svc/Speak?appId={0}&text={1}&language={2}", BING_API_ID, text, lang ); request = (HttpWebRequest)WebRequest.Create(uri); response = (HttpWebResponse)request.GetResponse(); Stream s = response.GetResponseStream(); NSData d = NSData.FromStream(s); audioPlayer = AVAudioPlayer.FromData(d); audioPlayer.PrepareToPlay(); audioPlayer.Play(); } catch (System.Net.WebException) { if (response != null) { response.Close(); } } }
///<Summary> /// Load wave or mp3 audio file as a stream ///</Summary> public bool Load(Stream audioStream) { DeletePlayer(); var data = NSData.FromStream(audioStream); player = AVAudioPlayer.FromData(data); return(PreparePlayer()); }
public IOSAVAudioPlayer(Model.PlayableTrackInfo track, System.IO.Stream stream) { NSError error; player = AVAudioPlayer.FromData(NSData.FromStream(stream), out error); //TODO: Do something useful here or remove (beware nullptr after playback done). player.FinishedPlaying += delegate {}; player.PrepareToPlay(); }
public bool Load(Stream audioStream) { var data = NSData.FromStream(audioStream); Stop(); player?.Dispose(); player = AVAudioPlayer.FromData(data); return((player == null) ? false : true); }
public void InitPalyer(string fileUrl) { NSError _err = null; player = AVAudioPlayer.FromData(NSData.FromUrl(NSUrl.FromString(fileUrl)), out _err); player.Volume = 100f; player.FinishedPlaying += (s, e) => { player = null; }; }
public async Task <int> StartPlayTask(string fileName, IProgress <PlayProgress> progessReporter) { int iCurrentPosition = 0; NSUrl url; NSData data; NSError err; if (_mediaPlayer != null) //already loaded so restart if paused { if (_fileName == fileName) //check we havent change tracks { _mediaPlayer.Play(); } else { //track change, reset player: _fileName = fileName; url = NSUrl.FromString(fileName); data = NSData.FromUrl(url); err = null; _mediaPlayer = AVAudioPlayer.FromData(data, out err); if (_mediaPlayer != null) { _mediaPlayer.Play(); } } } else { //new player: _fileName = fileName; url = NSUrl.FromString(fileName); data = NSData.FromUrl(url); err = null; _mediaPlayer = AVAudioPlayer.FromData(data, out err); if (_mediaPlayer != null) { _mediaPlayer.Play(); } } while (_mediaPlayer.Playing) //update play progress { if (progessReporter != null) { iCurrentPosition = (int)_mediaPlayer.CurrentTime; PlayProgress args = new PlayProgress(fileName, iCurrentPosition, (int)_mediaPlayer.Duration); progessReporter.Report(args); await Task.Yield(); } } return(iCurrentPosition); } //StartPlayTask
public void PlayAudio(byte[] audioBuffer) { using (NSData data = NSData.FromArray(audioBuffer)) { AVAudioPlayer player = AVAudioPlayer.FromData(data); player.FinishedPlaying += delegate { player = null; }; player.Play(); } }
internal override void LoadMusic(SoundMusic music) { if (audioPlayer != null) { throw new AudioSystemInternalException("Tried to create a new AudioPlayer but the current instance was not freed."); } CurrentMusic = music; currentMusicDataTypeIsUnsupported = false; NSError loadError; // TODO: Avoid allocating twice the music size (i.e. by using NSData.FromBytesNoCopy on CurrentMusic.Stream.GetBuffer()) CurrentMusic.Stream.Position = 0; audioPlayer = AVAudioPlayer.FromData(NSData.FromStream(CurrentMusic.Stream), out loadError); if (loadError != null) { if (loadError.Code == (int)AudioFileError.UnsupportedFileType || loadError.Code == (int)AudioFileError.UnsupportedDataFormat) { currentMusicDataTypeIsUnsupported = true; musicMediaEvents.Enqueue(new SoundMusicEventNotification(SoundMusicEvent.MetaDataLoaded, null)); return; } throw new AudioSystemInternalException("Music loading failed and failure was not handled. [Error=" + loadError.LocalizedDescription + "]."); } if (audioPlayer == null) // both audioPlayer and loadError are null (happened before when url was not correct) { throw new AudioSystemInternalException("Music loading failed and failure was not handled. [Unspecified Error]."); } audioPlayer.DecoderError += OnAudioPlayerDecoderError; audioPlayer.FinishedPlaying += OnAudioPlayerFinishedPlaying; if (!audioPlayer.PrepareToPlay()) { // this happens sometimes when we put the application on background when starting to play. var currentMusicName = CurrentMusic.Name; CurrentMusic.SetStateToStopped(); ResetMusicPlayer(); Logger.Warning("The music '{0}' failed to prepare to play.", currentMusicName); } else { musicMediaEvents.Enqueue(new SoundMusicEventNotification(SoundMusicEvent.MetaDataLoaded, null)); musicMediaEvents.Enqueue(new SoundMusicEventNotification(SoundMusicEvent.ReadyToBePlayed, null)); } }
///<Summary> /// Load wave or mp3 audio file as a stream ///</Summary> public bool Load(Stream audioStream) { var data = NSData.FromStream(audioStream); Stop(); player?.Dispose(); player = AVAudioPlayer.FromData(data); if (player != null) { player.FinishedPlaying += OnPlaybackEnded; } return((player == null) ? false : true); }
private AVAudioPlayer GetOrCreateAudioPlayer() { AVAudioPlayer audioPlayer; // Let's see if we can use a recycled audio player List <AVAudioPlayer> audioPlayers = GetOrCreateRecycledAudioPlayerList(); int count = audioPlayers.Count; if (count != 0) { int lastIndex = count - 1; audioPlayer = audioPlayers[lastIndex]; audioPlayers.RemoveAt(lastIndex); } else { // If not let's create one (assuming we have the corresponding NSData) NSData soundData; if (sCachedSoundData.TryGetValue(_url, out soundData) == false) { // We could not use a recycled audio player, and the sound has never been loaded. // Let's load it now (synchronously for the moment) NSUrl nsUrl = NSUrl.FromFilename(_url); soundData = NSData.FromUrl(nsUrl); // TODO: Synchronous load! Replace this to asynchronous! sCachedSoundData.Add(_url, soundData); if (soundData == null) { // We could not load the data, does it exist. // We still store it on the cache for next time. And return the error state. return(null); } } audioPlayer = AVAudioPlayer.FromData(soundData); audioPlayer.PrepareToPlay(); // Decode error event is only needed for the sound that created the player // as subsequent sound will not have a decode error audioPlayer.DecoderError += OnDecodeError; } // FinishedPlaying event is specific to this sound, so register the event there audioPlayer.FinishedPlaying += OnFinishedPlaying; return(audioPlayer); }
public void StartSound(Media media) { NSError error; if (soundPlayer != null) { soundPlayer.Stop(); soundPlayer = null; } soundPlayer = AVAudioPlayer.FromData(NSData.FromArray(media.Data), out error); if (soundPlayer != null) { soundPlayer.Play(); } else { logMessage(LogLevel.Error, String.Format("Audio file format of media {0} is not valid", media.Name)); } }
public Sound(byte[] audiodata, float volume, bool looping) { var data = NSData.FromArray(audiodata); _audioPlayer = AVAudioPlayer.FromData(data); _audioPlayer.Volume = volume; if (looping) { _audioPlayer.NumberOfLoops = -1; } else { _audioPlayer.NumberOfLoops = 0; } if (!_audioPlayer.PrepareToPlay()) { Console.WriteLine("Unable to Prepare sound for playback!"); } }
public AppleJcfPlayer(JcfMedia media) { this.players = new Dictionary <PlayableTrackInfo, AVAudioPlayer>(media.InstrumentTracks.Count + 1); this.media = media; NSError error; foreach (var track in media.InstrumentTracks) { players[track] = AVAudioPlayer.FromData(NSData.FromStream(File.OpenRead(Path.Combine(media.Path, $"{track.Identifier.ToString().ToUpper()}_jcfx"))), out error); players[track].FinishedPlaying += delegate { }; players[track].PrepareToPlay(); //TODO: Do something useful here or remove (beware nullptr after playback done). players[track].NumberOfLoops = 0; } players[media.BackingTrack] = AVAudioPlayer.FromData(NSData.FromStream(File.OpenRead(Path.Combine(media.Path, $"{media.BackingTrack.Identifier.ToString().ToUpper()}_jcfx"))), out error); players[media.BackingTrack].NumberOfLoops = 0; }
public override unsafe bool Load(byte[] audio, bool looping, string identifier = null) { if (!base.Load(audio, looping, identifier)) { return(false); } Unload(); NSError error = null; using (NSData data = NSData.FromArray(audio)) { player = AVAudioPlayer.FromData(data, out error); Prepare(); //player.MeteringEnabled = true; -- enable for CurrentPower readings updateVolume(); player.NumberOfLoops = looping ? -1 : 0; } return(error == null); }
private void PlayData(NSData data) { NSError err; ////AVAudioSession.SharedInstance().OverrideOutputAudioPort(AVAudioSessionPortOverride.Speaker, out err); player = AVAudioPlayer.FromData(data); player.Volume = 1.0f; player.DecoderError += (s, ev) => { Debug.WriteLine(ev.Error.ToString()); }; player.FinishedPlaying += (s, ev) => { Debug.WriteLine(OutputLabel.Text = "Finished"); }; player.PrepareToPlay(); player.Play(); }
public void PlayRecord() { try { NSError error = null; AVAudioSession.SharedInstance().SetCategory(AVAudioSession.CategoryPlayback, out error); if (error != null) { throw new Exception(error.DebugDescription); } player = AVAudioPlayer.FromData(NSData.FromArray(audioDataBytes)); player.FinishedPlaying += (sender, e) => { Console.WriteLine("send message to parent"); MessagingCenter.Send <ISoundRecorder, bool>(this, "finishReplaying", true); }; player.Play(); } catch (Exception ex) { Console.WriteLine("There was a problem playing back audio: "); Console.WriteLine(ex.Message); } }
private void PlatformLoadAudioStream(Stream s, out TimeSpan duration) { byte[] buffer; #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; buffer = AudioLoader.Load(stream, out format, out size, out freq); Format = format; Size = size; Rate = freq; duration = TimeSpan.FromSeconds((float)size / freq); #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; buffer = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; duration = TimeSpan.FromSeconds(durationSec); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif // bind buffer SoundBuffer = new OALSoundBuffer(); SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate); }
protected void Init(MusicStream Wave) { _player = AVAudioPlayer.FromData(NSData.FromStream(Wave.Content)); _player.NumberOfLoops = Wave.Loop ? 0 : -1; _player.Volume = Wave.Volume; }
private void PlatformLoadAudioStream(Stream s) { #if OPENAL && !(MONOMAC || IOS) ALFormat format; int size; int freq; var stream = s; #if ANDROID var needsDispose = false; try { // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy // into a temporary MemoryStream. if (!s.CanSeek) { needsDispose = true; stream = new MemoryStream(); s.CopyTo(stream); stream.Position = 0; } #endif _data = AudioLoader.Load(stream, out format, out size, out freq); #if ANDROID } finally { if (needsDispose) { stream.Dispose(); } } #endif Format = format; Size = size; Rate = freq; #endif #if MONOMAC || IOS var audiodata = new byte[s.Length]; s.Read(audiodata, 0, (int)s.Length); using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE)) { afs.ParseBytes(audiodata, false); Size = (int)afs.DataByteCount; _data = new byte[afs.DataByteCount]; Array.Copy(audiodata, afs.DataOffset, _data, 0, afs.DataByteCount); AudioStreamBasicDescription asbd = afs.DataFormat; int channelsPerFrame = asbd.ChannelsPerFrame; int bitsPerChannel = asbd.BitsPerChannel; // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds) if (channelsPerFrame <= 0 || bitsPerChannel <= 0) { NSError err; using (NSData nsData = NSData.FromArray(audiodata)) using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err)) { channelsPerFrame = (int)player.NumberOfChannels; bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16); Rate = (float)player.SoundSetting.SampleRate; _duration = TimeSpan.FromSeconds(player.Duration); } } else { Rate = (float)asbd.SampleRate; double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate; _duration = TimeSpan.FromSeconds(duration); } if (channelsPerFrame == 1) { Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16; } else { Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16; } } #endif }
static void PlayStream(Stream stream, TaskCompletionSource <bool> tcs) { using (var player = AVAudioPlayer.FromData(NSData.FromStream(stream))) Play(player, tcs); }