public bool StopDecoder() { try { if (_mediaCodec != null) { _mediaCodec.Stop(); _mediaCodec.Release(); } if (_audioTrack != null) { _audioTrack.Stop(); _audioTrack.Release(); } _encoderThread?.Interrupt(); _decoderThread?.Interrupt(); return(true); } catch (Exception) { return(false); } }
public static void ReturnAudioTrack(AudioTrack audioTrack) { AudioTrackData audioTrackData = null; for (int i = 0; i < m_audioTracks.Count; i++) { if (m_audioTracks[i].AudioTrack == audioTrack) { audioTrackData = m_audioTracks[i]; break; } } if (!Mixer.EnableAudioTrackCaching) { if (audioTrackData != null) { m_audioTracks.Remove(audioTrackData); } audioTrack.Pause(); audioTrack.Release(); return; } if (audioTrackData == null) { audioTrack.Pause(); audioTrack.Release(); return; } bool flag = false; if (m_audioTracks.Count > 16) { flag = true; for (int j = 0; j < m_audioTracks.Count; j++) { if (m_audioTracks[j].BytesCount < audioTrackData.BytesCount) { flag = false; break; } } } if (flag) { audioTrack.Pause(); audioTrack.Release(); m_audioTracks.Remove(audioTrackData); } else { audioTrack.Stop(); audioTrack.SetPlaybackHeadPosition(audioTrackData.BytesCount / audioTrackData.SoundBuffer.ChannelsCount / 2); audioTrackData.ReloadStaticDataTime = Time.FrameStartTime + 0.75; } LogCacheStats(); }
public void Clear() { recorder?.Stop(); recorder?.Release(); recorder?.Dispose(); recorder = null; audioTrack?.Stop(); audioTrack?.Release(); audioTrack?.Dispose(); audioTrack = null; }
/* //play short tone on cennection * private void websocketClient_Opened(object sender, EventArgs e) * { * * websocketClient.Send("2000"); * Thread.Sleep(333); * websocketClient.Send("2000"); * * } */ private void websocketClient_MessageReceived(object sender, MessageReceivedEventArgs e) { if (!isPlaying) { byte[] GeneratedSnd = CreateSound(int.Parse(e.Message)); track = new AudioTrack(Stream.Music, 8000, ChannelOut.Mono, Encoding.Pcm16bit, 80000, AudioTrackMode.Static); prevTone = int.Parse(e.Message); isPlaying = !isPlaying; track.Write(GeneratedSnd, 0, 80000); try { track.Play(); } catch (Java.Lang.IllegalStateException) { track.Flush(); track.Release(); } } else if (isPlaying && prevTone != int.Parse(e.Message)) { isPlaying = !isPlaying; track.Stop(); track.Flush(); track.Release(); byte[] GeneratedSnd = CreateSound(int.Parse(e.Message)); track = new AudioTrack(Stream.Music, 8000, ChannelOut.Mono, Encoding.Pcm16bit, 80000, AudioTrackMode.Static); prevTone = int.Parse(e.Message); isPlaying = !isPlaying; track.Write(GeneratedSnd, 0, 80000); try { track.Play(); } catch (Java.Lang.IllegalStateException) { track.Flush(); track.Release(); } } else { isPlaying = !isPlaying; track.Stop(); track.Flush(); track.Release(); } }
public void Stop() { lock (stateLocker) { if (recorder != null) { if (recorder.RecordingState == RecordState.Recording) { recorder.Stop(); recorder.Release(); recorder = null; recordingThread = null; } } if (player != null) { if (player.PlayState == PlayState.Playing) { player.Stop(); player.Release(); player = null; } } OnMessage?.Invoke(this, "Stopped"); } }
/// <summary> /// Stops the currently playing audio /// </summary> public void StopPlaying() { //This lock is used to ensure that starting and stopping of songs do not happen at the same time. lock (startStopSyncObject) { if (!IsPlaying) { throw new InvalidOperationException("Audio is not playing"); } #if __ANDROID__ //We use pause instead of stop because pause stops playing immediately playingTrack.Pause(); //Lock track disposal so the track is never in a state where it is disposed/released but not null lock (trackDisposedOfSyncObject) { playingTrack.Release(); playingTrack.Dispose(); playingTrack = null; } #endif #if __IOS__ //Pass true to stop immediately audioQueue.Stop(true); //Lock track disposal so the track is never in a state where it is disposed but not null lock (trackDisposedOfSyncObject) { audioQueue.Dispose(); audioQueue = null; } #endif } }
public void stop() { if (!running) { return; } running = false; MainActivity.Instance.VolumeControlStream = Stream.NotificationDefault; if (audioPlayer != null) { try { audioPlayer.Stop(); audioPlayer.Release(); } catch (Exception) { } audioPlayer.Dispose(); audioPlayer = null; } if (audioDecoder != null) { audioDecoder.stop(); audioDecoder.Dispose(); audioDecoder = null; } bufferSize = 0; }
public void AudioPlayRelease() { record.Stop(); audioTrack.Stop(); audioTrack.Release(); record.Release(); }
/// <summary> /// Plays a single note. Separate from the rest of the song playing code /// </summary> public static void PlayNote(Instrument.Note note) { lock (syncObj) { #if __ANDROID__ if (playingTrack != null) { //We use pause instead of stop because pause stops playing immediately playingTrack.Pause(); playingTrack.Release(); playingTrack.Dispose(); } #endif #if __IOS__ if (audioQueue != null) { //Pass true to stop immediately audioQueue.Stop(true); audioQueue.Dispose(); } #endif #if __ANDROID__ playingTrack = new AudioTrack( // Stream type Android.Media.Stream.Music, // Frequency SongPlayer.PLAYBACK_RATE, // Mono or stereo ChannelOut.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip in bytes (note.data.Length * 2), // Mode. Stream or static. AudioTrackMode.Static); playingTrack.Write(note.data, 0, note.data.Length); playingTrack.Play(); #endif #if __IOS__ audioQueue = new OutputAudioQueue(AudioStreamBasicDescription.CreateLinearPCM(SongPlayer.PLAYBACK_RATE, 1, 16, false)); unsafe { AudioQueueBuffer *buffer; audioQueue.AllocateBuffer(note.data.Length * 2, out buffer); fixed(short *beatData = note.data) { buffer->CopyToAudioData((IntPtr)beatData, note.data.Length * 2); } audioQueue.EnqueueBuffer((IntPtr)buffer, note.data.Length * 2, null); } audioQueue.Start(); #endif } }
public void StopPlaying() { if (audioTrack != null) { audioTrack.Stop(); audioTrack.Release(); } }
public void Dispose() { if (audio.PlayState != PlayState.Stopped) { audio.Stop(); } audio.Release(); }
private void AudioTrackStop() { if (audioTrack != null) { audioTrack.Stop(); audioTrack.Release(); audioTrack = null; } }
public void Close() { if (audioTrack != null) { audioTrack.Stop(); audioTrack.Release(); audioTrack.Dispose(); } }
public void StopProgressTone() { if (mProgressTone != null) { mProgressTone.Stop(); mProgressTone.Release(); mProgressTone = null; } }
public void Dispose() { Stop(); StopImmediate(); audioTrack.Release(); audioTrack.Dispose(); audioTrack = null; }
public void Stop() { if (audioTrack != null) { audioTrack.Stop(); audioTrack.Release(); audioTrack = null; } }
private void StopImmediate() { if (audioTrackImmediate != null) { audioTrackImmediate.Stop(); audioTrackImmediate.Release(); audioTrackImmediate.Dispose(); audioTrackImmediate = null; } }
public void StopPlaying() { WvlLogger.Log(LogType.TraceAll, "StopPlaying()"); if (audioTrack != null) { audioTrack.Stop(); audioTrack.Release(); audioTrack = null; } }
public void Stop() { if (_audioTrack == null) { return; } _audioTrack.Stop(); _audioTrack.Release(); _audioTrack = null; }
internal virtual void InternalDispose() { Mixer.m_sounds.Remove(this); if (m_audioTrack != null) { m_audioTrack.Pause(); m_audioTrack.Release(); m_audioTrack.Dispose(); m_audioTrack = null; Mixer.m_audioTracksDestroyed++; } }
public void stop() { if (audioPlayer == null) { return; } audioPlayer.Stop(); audioPlayer.Release(); audioPlayer.Dispose(); audioPlayer = null; running = false; }
public void PlaySound(int sampling_rate, byte[] pcm_data) { if (audio != null) { audio.Stop(); audio.Release(); } audio = new AudioTrack(Stream.Music, sampling_rate, ChannelOut.Mono, Encoding.Pcm16bit , pcm_data.Length * sizeof(short), AudioTrackMode.Static); audio.Write(pcm_data, 0, pcm_data.Length); audio.Play(); }
private void StopStreamingAudio() { #if __ANDROID__ if (playingTrack == null || playingTrack.PlayState != PlayState.Playing) { throw new InvalidOperationException("Audio is not playing"); } playingTrack.Pause(); //playingTrack.Flush(); playingTrack.Release(); playingTrack.Dispose(); #endif }
public void PlaySound(int samplingRate, byte[] pcmData) { if (previousAudioTrack != null) { previousAudioTrack.Stop(); previousAudioTrack.Release(); } AudioTrack audioTrack = new AudioTrack(Stream.Music, samplingRate, ChannelOut.Mono, Android.Media.Encoding.Pcm16bit, pcmData.Length * sizeof(short), AudioTrackMode.Static); audioTrack.Write(pcmData, 0, pcmData.Length); audioTrack.Play(); previousAudioTrack = audioTrack; }
public void deleteRecord() { if (audioTrack != null) { //audioTrack.Dispose(); //audioTrack = null; audioTrack.Release(); } if (audRecorder != null) { //audRecorder.Dispose(); //audRecorder = null; audRecorder.Release(); } }
public void Stop() { if (State == SoundStates.Stopped) { return; } if (instance == null) { return; } instance.Stop(); instance.Release(); instance.Dispose(); instance = null; audio.removeinstance(); State = SoundStates.Stopped; }
public StreamingSound(StreamingSource streamingSource, float volume = 1f, float pitch = 1f, float pan = 0f, bool isLooped = false, bool disposeOnStop = false, float bufferDuration = 0.3f) { VerifyStreamingSource(streamingSource); m_bufferDuration = MathUtils.Clamp(bufferDuration, 0f, 10f); ChannelOut channelConfig = (streamingSource.ChannelsCount == 1) ? ChannelOut.FrontLeft : ChannelOut.Stereo; int minBufferSize = AudioTrack.GetMinBufferSize(streamingSource.SamplingFrequency, channelConfig, Encoding.Pcm16bit); int bufferSizeInBytes = MathUtils.Max(CalculateBufferSize(m_bufferDuration), minBufferSize); m_audioTrack = new AudioTrack(Stream.Music, streamingSource.SamplingFrequency, channelConfig, Encoding.Pcm16bit, bufferSizeInBytes, AudioTrackMode.Stream); //m_audioTrack = new AudioTrack(new AudioAttributes.Builder().SetUsage(AudioUsageKind.Media).SetContentType(AudioContentType.Music).Build(), new AudioFormat(), bufferSizeInBytes, AudioTrackMode.Static, 0); Mixer.m_audioTracksCreated++; if (m_audioTrack.State == AudioTrackState.Uninitialized) { m_audioTrack.Release(); m_audioTrack = null; Mixer.m_audioTracksDestroyed++; Log.Warning("Failed to create StreamingSound AudioTrack. Created={0}, Destroyed={1}", Mixer.m_audioTracksCreated, Mixer.m_audioTracksDestroyed); } StreamingSource = streamingSource; base.ChannelsCount = streamingSource.ChannelsCount; base.SamplingFrequency = streamingSource.SamplingFrequency; base.Volume = volume; base.Pitch = pitch; base.Pan = pan; base.IsLooped = isLooped; base.DisposeOnStop = disposeOnStop; if (m_audioTrack != null) { m_task = Task.Run(delegate { try { StreamingThreadFunction(); } catch (Exception message) { Log.Error(message); } }); } }
public void stop() { running = false; lock (pendingFrames) { pendingFrames.Clear(); availableBuffers.Clear(); } if (audioPlayer != null) { try { audioPlayer.Stop(); audioPlayer.Release(); } catch (Exception) { } audioPlayer.Dispose(); audioPlayer = null; } if (audioDecoder != null) { try { audioDecoder.Stop(); audioDecoder.Release(); } catch (Exception) { } audioDecoder.Dispose(); audioDecoder = null; } bufferSize = 0; }
private void OnPlayButton(string koe, int speed) { var wav = new AquesTalk().synthe(koe, speed); if (wav.Length == 1) { return; } if (_audioTrack != null) { _audioTrack.Stop(); _audioTrack.Release(); _audioTrack.Dispose(); } _audioTrack = new AudioTrack(Stream.Music, 8000, ChannelOut.Mono, Encoding.Pcm16bit, wav.Length - 44, AudioTrackMode.Static); _audioTrack.Write(wav, 44, wav.Length - 44); _audioTrack.Play(); }
public static AudioTrack GetAudioTrack(SoundBuffer soundBuffer, bool isLooped) { if (!isLooped && Mixer.EnableAudioTrackCaching && m_audioTracks.Count >= 16) { foreach (AudioTrackData audioTrack2 in m_audioTracks) { if (audioTrack2.IsAvailable && audioTrack2.SoundBuffer == soundBuffer) { audioTrack2.IsAvailable = false; m_cacheHits++; LogCacheStats(); return(audioTrack2.AudioTrack); } } AudioTrackData audioTrackData = null; foreach (AudioTrackData audioTrack3 in m_audioTracks) { if (audioTrack3.IsAvailable && audioTrack3.SoundBuffer.ChannelsCount == soundBuffer.ChannelsCount && audioTrack3.SoundBuffer.SamplingFrequency == soundBuffer.SamplingFrequency && audioTrack3.BytesCount >= soundBuffer.m_data.Length && (audioTrackData == null || audioTrack3.BytesCount <= audioTrackData.BytesCount)) { audioTrackData = audioTrack3; } } if (audioTrackData != null) { if (m_buffer == null || m_buffer.Length < audioTrackData.BytesCount) { m_buffer = new byte[audioTrackData.BytesCount]; } Array.Copy(soundBuffer.m_data, 0, m_buffer, 0, soundBuffer.m_data.Length); Array.Clear(m_buffer, soundBuffer.m_data.Length, audioTrackData.BytesCount - soundBuffer.m_data.Length); audioTrackData.AudioTrack.Write(m_buffer, 0, audioTrackData.BytesCount); audioTrackData.SoundBuffer = soundBuffer; audioTrackData.IsAvailable = false; m_cacheHitsWithWrite++; LogCacheStats(); return(audioTrackData.AudioTrack); } bool flag = true; foreach (AudioTrackData audioTrack4 in m_audioTracks) { if (audioTrack4.BytesCount < soundBuffer.m_data.Length) { flag = false; break; } } if (flag) { m_cacheFulls++; Log.Warning("AudioTrackCache full, no audio tracks available."); LogCacheStats(); return(null); } } AudioTrack audioTrack = new AudioTrack(Stream.Music, soundBuffer.SamplingFrequency, (soundBuffer.ChannelsCount == 1) ? ChannelOut.FrontLeft : ChannelOut.Stereo, Encoding.Pcm16bit, soundBuffer.m_data.Length, AudioTrackMode.Static); // AudioTrack audioTrack = new AudioTrack(new AudioAttributes.Builder().SetUsage(AudioUsageKind.Media).SetContentType(AudioContentType.Music).Build(),new AudioFormat(), soundBuffer.m_data.Length, AudioTrackMode.Static,0); if (audioTrack.State != 0) { audioTrack.Write(soundBuffer.m_data, 0, soundBuffer.m_data.Length); if (!isLooped) { m_audioTracks.Add(new AudioTrackData { AudioTrack = audioTrack, SoundBuffer = soundBuffer, BytesCount = soundBuffer.m_data.Length, IsAvailable = false }); } else { Mixer.CheckTrackStatus(audioTrack.SetLoopPoints(0, soundBuffer.SamplesCount, -1)); } } else { audioTrack.Release(); audioTrack = null; Log.Warning("Failed to create Cache AudioTrack."); } m_cacheMisses++; if (Mixer.EnableAudioTrackCaching && m_cacheMisses > 200 && m_cacheMisses % 100 == 0) { Log.Warning("Over {0} AudioTrack objects created.", m_cacheMisses); } LogCacheStats(); return(audioTrack); }