/// <summary> /// Enables the audio loopback. The local audio will be played with <paramref name="policy"/>. /// </summary> /// <param name="policy">The <see cref="AudioStreamPolicy"/> to apply.</param> /// <remarks> /// <see cref="MediaSource"/> does not support all <see cref="AudioStreamType"/>.<br/> /// Supported types are <see cref="AudioStreamType.Media"/>, <see cref="AudioStreamType.Voip"/>, /// <see cref="AudioStreamType.MediaExternalOnly"/>.<br/> /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="policy"/> is null.</exception> /// <exception cref="InvalidOperationException"> /// MediaSource is not attached yet.<br/> /// -or-<br/> /// This MediaSource is not Audio /// </exception> /// <exception cref="NotSupportedException"> /// <see cref="AudioStreamType"/> of <paramref name="policy"/> is not supported on the current platform. /// </exception> /// <exception cref="ObjectDisposedException"> /// <paramref name="policy"/> or WebRTC has already been disposed. /// </exception> /// <returns><see cref="MediaStreamTrack"/></returns> public MediaStreamTrack EnableAudioLoopback(AudioStreamPolicy policy) { if (!SourceId.HasValue) { throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first."); } if (policy == null) { throw new ArgumentNullException(nameof(policy)); } if (MediaType != MediaType.Audio) { throw new InvalidOperationException("AudioLoopback is only for Audio MediaSource"); } var ret = NativeWebRTC.SetAudioLoopback(WebRtc.Handle, SourceId.Value, policy.Handle, out uint trackId); if (ret == WebRTCErrorCode.InvalidArgument) { throw new NotSupportedException("The specified policy is not supported on the current system."); } ret.ThrowIfFailed("Failed to set the audio stream policy to the WebRTC"); return(new MediaStreamTrack(WebRtc, MediaType, trackId)); }
/// <summary> /// Plays a sound. /// </summary> /// <remarks> /// You may need to stop or pause playing a sound when your app is paused. /// </remarks> /// <param name="isLooping">Indicates whether play sound repeatedly.</param> public async Task PlayAsync(bool isLooping = true) { if (_mediaSourceUpdated || _streamTypeUpdated) { if (_player.State != PlayerState.Idle) { _player.Unprepare(); } if (_mediaSourceUpdated) { _player.SetSource(_mediaSource); _mediaSourceUpdated = false; } if (_streamTypeUpdated) { if (_streamPolicy != null) { _streamPolicy.FocusStateChanged -= OnFocusStateChanged; _streamPolicy.Dispose(); } _streamPolicy = new AudioStreamPolicy(_streamType); _streamPolicy.FocusStateChanged += OnFocusStateChanged; // TODO: If you want to reacquire the focus you've lost automatically, enable the focus reacquisition. // In addition, you may need to implement the case of AudioStreamFocusState.Acquired in OnFocusStateChanged. _streamPolicy.FocusReacquisitionEnabled = false; _player.ApplyAudioStreamPolicy(_streamPolicy); _streamTypeUpdated = false; } } if (_player.State == PlayerState.Playing) { _player.Stop(); } else if (_player.State == PlayerState.Idle) { await _player.PrepareAsync(); } if (_streamPolicy.PlaybackFocusState != AudioStreamFocusState.Acquired) { _streamPolicy.AcquireFocus(AudioStreamFocusOptions.Playback, AudioStreamBehaviors.Fading, null); } _player.IsLooping = isLooping; _player.Start(); }
/// <summary> /// Applies the audio stream policy to <see cref="MediaMicrophoneSource"/>. /// </summary> /// <param name="policy">The <see cref="AudioStreamPolicy"/> to apply.</param> /// <remarks> /// The WebRTC must be in the <see cref="WebRTCState.Idle"/> state.<br/> /// <br/> /// <see cref="WebRTC"/> does not support all <see cref="AudioStreamType"/>.<br/> /// Supported types are <see cref="AudioStreamType.Media"/>, <see cref="AudioStreamType.VoiceRecognition"/>, /// <see cref="AudioStreamType.Voip"/>, <see cref="AudioStreamType.MediaExternalOnly"/>. /// </remarks> /// <exception cref="ObjectDisposedException"> /// The WebRTC has already been disposed.<br/> /// -or-<br/> /// <paramref name="policy"/> has already been disposed. /// </exception> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ArgumentNullException"><paramref name="policy"/> is null.</exception> /// <exception cref="NotSupportedException"> /// <see cref="AudioStreamType"/> of <paramref name="policy"/> is not supported on the current platform. /// </exception> /// <seealso cref="AudioStreamPolicy"/> /// <since_tizen> 9 </since_tizen> public void ApplyAudioStreamPolicy(AudioStreamPolicy policy) { if (policy == null) { throw new ArgumentNullException(nameof(policy), "policy is null"); } WebRtc.ValidateWebRTCState(WebRTCState.Idle); var ret = NativeWebRTC.SetAudioStreamPolicyToMicrophoneSource(WebRtc.Handle, SourceId.Value, policy.Handle); if (ret == WebRTCErrorCode.InvalidArgument) { throw new NotSupportedException("The specified policy is not supported on the current system."); } ret.ThrowIfFailed("Failed to set the audio stream policy to the WebRTC"); }
/// <summary> /// Initializes a new instance of the <see cref="MainViewModel"/> class /// </summary> /// <param name="navigation"> /// Navigation instance /// </param> public MainViewModel(INavigation navigation) { Navigation = navigation; PlayCommand = new Command(async() => { _isPlaying = true; _cts = new CancellationTokenSource(); _audioStreamPolicy = new AudioStreamPolicy(AudioStreamType.Media); PlayCommand.ChangeCanExecute(); CancelCommand.ChangeCanExecute(); try { await TonePlayer.StartAsync(ToneType.Default, _audioStreamPolicy, Duration * _secUnit, _cts.Token); } catch (TaskCanceledException) { Tizen.Log.Info("TonePlayer", "A task for playing media was canceled."); } finally { _isPlaying = false; _cts?.Dispose(); _audioStreamPolicy?.Dispose(); } PlayCommand.ChangeCanExecute(); CancelCommand.ChangeCanExecute(); }, CanPlay); CancelCommand = new Command(() => { if (_isPlaying) { _cts?.Cancel(); _isPlaying = false; } PlayCommand.ChangeCanExecute(); CancelCommand.ChangeCanExecute(); }, CanCancel); }
/// <summary> /// Asynchronously plays ringing sound /// </summary> /// <param name="targetVolume">The volume level at playing</param> /// <param name="toneTypes">AlarmToneTypes to play</param> async public void PlaySound(float targetVolume, AlarmToneTypes toneTypes) { audioStreamPolicy = new AudioStreamPolicy(AudioStreamType.Alarm); audioStreamPolicy.AcquireFocus(AudioStreamFocusOptions.Playback, AudioStreamBehaviors.NoResume, null); player = new Player(); MediaUriSource soudSource = new MediaUriSource(SystemSettings.IncomingCallRingtone); player.SetSource(soudSource); player.ApplyAudioStreamPolicy(audioStreamPolicy); await player.PrepareAsync(); player.IsLooping = true; player.Volume = targetVolume; if (player.State == PlayerState.Ready) { player.Start(); } }
/// <summary> /// Asynchronously plays ringing sound /// </summary> async public static void PlaySound() { if (player == null) { audioStreamPolicy = new AudioStreamPolicy(AudioStreamType.Alarm); audioStreamPolicy.AcquireFocus(AudioStreamFocusOptions.Playback, AudioStreamBehaviors.Fading, null); player = new Player(); MediaUriSource soudSource = new MediaUriSource(SystemSettings.SoundNotification); player.SetSource(soudSource); player.ApplyAudioStreamPolicy(audioStreamPolicy); await player.PrepareAsync(); player.IsLooping = true; player.Volume = 1; if (player.State == PlayerState.Ready) { player.Start(); } } }