コード例 #1
0
 private void Start()
 {
     baseMenuPanel.SetActive(true);
     howToPlayPanel.SetActive(false);
     optionsPanel.SetActive(false);
     audioOptions = GetComponent <AudioOptions>();
 }
コード例 #2
0
ファイル: Options.cs プロジェクト: summer-of-software/vtank
 public Options()
 {
     Video = getDefaultVideoOptions();
     Audio = getDefaultAudioOptions();
     GamePlay = getDefaultGamePlayOptions();
     KeySettings = getDefaultKeyBindings();
 }
コード例 #3
0
ファイル: SIPClient.cs プロジェクト: k2fc/sipsorcery
        /// <summary>
        /// Places an outgoing SIP call.
        /// </summary>
        /// <param name="destination">The SIP URI to place a call to. The destination can be a full SIP URI in which case the all will
        /// be placed anonymously directly to that URI. Alternatively it can be just the user portion of a URI in which case it will
        /// be sent to the configured SIP server.</param>
        public async Task Call(string destination)
        {
            // Determine if this is a direct anonymous call or whether it should be placed using the pre-configured SIP server account.
            SIPURI callURI     = null;
            string sipUsername = null;
            string sipPassword = null;
            string fromHeader  = null;

            if (destination.Contains("@") || m_sipServer == null)
            {
                // Anonymous call direct to SIP server specified in the URI.
                callURI    = SIPURI.ParseSIPURIRelaxed(destination);
                fromHeader = (new SIPFromHeader(m_sipFromName, SIPURI.ParseSIPURI(SIPFromHeader.DEFAULT_FROM_URI), null)).ToString();
            }
            else
            {
                // This call will use the pre-configured SIP account.
                callURI     = SIPURI.ParseSIPURIRelaxed(destination + "@" + m_sipServer);
                sipUsername = m_sipUsername;
                sipPassword = m_sipPassword;
                fromHeader  = (new SIPFromHeader(m_sipFromName, new SIPURI(m_sipUsername, m_sipServer, null), null)).ToString();
            }

            StatusMessage(this, $"Starting call to {callURI}.");

            var lookupResult = await Task.Run(() =>
            {
                return(SIPDNSManager.ResolveSIPService(callURI, false));
            });

            if (lookupResult == null || lookupResult.LookupError != null)
            {
                StatusMessage(this, $"Call failed, could not resolve {callURI}.");
            }
            else
            {
                var dstEndpoint = lookupResult.GetSIPEndPoint();
                StatusMessage(this, $"Call progressing, resolved {callURI} to {dstEndpoint}.");
                System.Diagnostics.Debug.WriteLine($"DNS lookup result for {callURI}: {dstEndpoint}.");
                SIPCallDescriptor callDescriptor = new SIPCallDescriptor(sipUsername, sipPassword, callURI.ToString(), fromHeader, null, null, null, null, SIPCallDirection.Out, _sdpMimeContentType, null, null);

                var audioSrcOpts = new AudioOptions
                {
                    AudioSource       = AudioSourcesEnum.Microphone,
                    OutputDeviceIndex = m_audioOutDeviceIndex
                };
                var videoSrcOpts = new VideoOptions
                {
                    VideoSource           = VideoSourcesEnum.TestPattern,
                    SourceFile            = RtpAVSession.VIDEO_TESTPATTERN,
                    SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND
                };
                MediaSession = new RtpAVSession(audioSrcOpts, videoSrcOpts);

                m_userAgent.RemotePutOnHold   += OnRemotePutOnHold;
                m_userAgent.RemoteTookOffHold += OnRemoteTookOffHold;

                await m_userAgent.InitiateCallAsync(callDescriptor, MediaSession);
            }
        }
コード例 #4
0
    public void TestInit()
    {
        float  fadeInTime       = 0.3f;
        float  fadeOutTime      = 0.0f;
        float  volume           = 0.9f;
        bool   shouldLoop       = true;
        bool   hardTransitionIn = false;
        Action onEndHandler     = delegate {};

        AudioOptions options = new AudioOptions(
            fadeInTime,
            fadeOutTime,
            volume,
            shouldLoop,
            hardTransitionIn,
            onEndHandler);

        Assert.AreEqual(fadeInTime, options.FadeInTime);
        Assert.AreEqual(fadeOutTime, options.FadeOutTime);
        Assert.AreEqual(volume, options.Volume);
        Assert.AreEqual(shouldLoop, options.Looping);
        Assert.AreEqual(hardTransitionIn, options.HardTransitionIn);
        Assert.True(options.HasFadeIn);
        Assert.False(options.HasFadeOut);
        Assert.True(options.HasEndHandler);
    }
コード例 #5
0
        /// <summary>
        /// Gets the audio stream information.
        /// </summary>
        /// <param name="serverId">The server identifier.</param>
        /// <param name="options">The options.</param>
        /// <returns>Task&lt;StreamInfo&gt;.</returns>
        public async Task <StreamInfo> GetAudioStreamInfo(string serverId, AudioOptions options)
        {
            var streamBuilder = new StreamBuilder();

            var localItem = await _localAssetManager.GetLocalItem(serverId, options.ItemId);

            if (localItem != null)
            {
                var localMediaSource = localItem.Item.MediaSources[0];

                // Use the local media source, unless a specific server media source was requested
                if (string.IsNullOrWhiteSpace(options.MediaSourceId) ||
                    string.Equals(localMediaSource.Id, options.MediaSourceId,
                                  StringComparison.OrdinalIgnoreCase))
                {
                    // Finally, check to make sure the local file is actually available at this time
                    var fileExists = await _localAssetManager.FileExists(localMediaSource.Path).ConfigureAwait(false);

                    if (fileExists)
                    {
                        options.MediaSources = localItem.Item.MediaSources;

                        var result = streamBuilder.BuildAudioItem(options);
                        result.PlayMethod = PlayMethod.DirectPlay;
                        return(result);
                    }
                }
            }

            return(streamBuilder.BuildAudioItem(options));
        }
コード例 #6
0
        internal static async Task <PlaylistItem> ToPlaylistItem(this BaseItemDto item, IApiClient apiClient, IPlaybackManager playbackManager)
        {
            var profile = VideoProfileHelper.GetWindowsPhoneProfile();
            var options = new AudioOptions
            {
                Profile      = profile,
                ItemId       = item.Id,
                DeviceId     = apiClient.DeviceId,
                MaxBitrate   = 128,
                MediaSources = item.MediaSources
            };

            var streamInfo = await playbackManager.GetAudioStreamInfo(App.ServerInfo.Id, options, false, apiClient);

            var streamUrl = streamInfo.ToUrl(apiClient.GetApiUrl("/"), apiClient.AccessToken);

            var converter = new Converters.ImageUrlConverter();

            return(new PlaylistItem
            {
                Album = item.Album,
                Artist = item.AlbumArtist,
                TrackName = item.Name,
                TrackUrl = streamUrl.Replace(App.ServerInfo.LocalAddress, !string.IsNullOrEmpty(App.ServerInfo.ManualAddress) ? App.ServerInfo.ManualAddress : App.ServerInfo.RemoteAddress),
                MediaBrowserId = item.Id,
                IsJustAdded = true,
                ImageUrl = (string)converter.Convert(item, typeof(string), null, null),
                BackgroundImageUrl = (string)converter.Convert(item, typeof(string), "backdrop", null),
                RunTimeTicks = item.RunTimeTicks ?? 0
            });
        }
コード例 #7
0
    public void Stop(string sourceName, AudioClip clip)
    {
        AudioSource source;

        if (!sources.TryGetValue(sourceName, out source))
        {
            Debug.LogErrorFormat("Source {0} not found. Unable to stop clip.", sourceName);
            return;
        }

        if (!clipOptions.ContainsKey(getClipIdentifier(source, clip)))
        {
            Debug.LogErrorFormat("Source {0} options not found. Unable to stop clip.", sourceName);
            return;
        }

        AudioOptions options          = clipOptions[getClipIdentifier(source, clip)];
        Action       endHandlerAction = delegate { options.OnEnd(); };
        IEnumerator  fadeOutCoroutine = getFadeOutCoroutine(source, options, options.HasEndHandler ? endHandlerAction : null);

        if (options.HasEndHandler)
        {
            IEnumerator endClipCoroutine = waitAndExecute(Mathf.Max(0, options.FadeOutTime), delegate
            {
                StartCoroutine(fadeOutCoroutine);
            });
        }
        else
        {
            StartCoroutine(fadeOutCoroutine);
        }
    }
コード例 #8
0
ファイル: AudioController.cs プロジェクト: Kruemelkatze/Swarm
    private (float volume, float pitch) ApplyVariations(AudioOptions options)
    {
        var volume = options.Volume + Random.Range(-options.VolumeVariation, options.VolumeVariation);
        var pitch  = options.Pitch + Random.Range(-options.PitchVariation, options.PitchVariation);

        return(volume, pitch);
    }
コード例 #9
0
 public Options()
 {
     Video       = getDefaultVideoOptions();
     Audio       = getDefaultAudioOptions();
     GamePlay    = getDefaultGamePlayOptions();
     KeySettings = getDefaultKeyBindings();
 }
コード例 #10
0
        private static string GetStreamedPath(BaseItemDto item, IApiClient apiClient, long?startTimeTicks, int?maxBitrate)
        {
            var profile = new MediaBrowserTheaterProfile();

            StreamInfo info;

            if (item.IsAudio)
            {
                var options = new AudioOptions
                {
                    Context  = EncodingContext.Streaming,
                    DeviceId = apiClient.DeviceId,
                    ItemId   = item.Id,

                    // TODO: Reduce to 2 is user only has stereo speakers
                    MaxAudioChannels = 6,

                    MaxBitrate   = maxBitrate,
                    MediaSources = item.MediaSources,

                    Profile = profile
                };

                info = new StreamBuilder().BuildAudioItem(options);
                info.StartPositionTicks = startTimeTicks ?? 0;

                if (info.MediaSource.Protocol == MediaProtocol.File && File.Exists(info.MediaSource.Path))
                {
                    return(info.MediaSource.Path);
                }
                return(info.ToUrl(apiClient.ServerAddress + "/mediabrowser"));
            }
            else
            {
                var options = new VideoOptions
                {
                    Context  = EncodingContext.Streaming,
                    DeviceId = apiClient.DeviceId,
                    ItemId   = item.Id,

                    // TODO: Reduce to 2 is user only has stereo speakers
                    MaxAudioChannels = 6,

                    MaxBitrate   = maxBitrate,
                    MediaSources = item.MediaSources,

                    Profile = profile
                };

                info = new StreamBuilder().BuildVideoItem(options);
                info.StartPositionTicks = startTimeTicks ?? 0;

                if (info.MediaSource.Protocol == MediaProtocol.File && File.Exists(info.MediaSource.Path))
                {
                    return(info.MediaSource.Path);
                }
                return(info.ToUrl(apiClient.ServerAddress + "/mediabrowser") + "&EnableAdaptiveBitrateStreaming=false");
            }
        }
コード例 #11
0
        /// <summary>
        /// Answers an incoming SIP call.
        /// </summary>
        public async Task <bool> Answer()
        {
            if (m_pendingIncomingCall == null)
            {
                StatusMessage(this, $"There was no pending call available to answer.");
                return(false);
            }
            else
            {
                var sipRequest = m_pendingIncomingCall.ClientTransaction.TransactionRequest;

                SDP  offerSDP = SDP.ParseSDPDescription(sipRequest.Body);
                bool hasAudio = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaStreamStatus != MediaStreamStatusEnum.Inactive);
                bool hasVideo = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.video && x.MediaStreamStatus != MediaStreamStatusEnum.Inactive);

                AudioOptions audioOpts = new AudioOptions {
                    AudioSource = AudioSourcesEnum.None
                };
                if (hasAudio)
                {
                    audioOpts = new AudioOptions
                    {
                        AudioSource       = AudioSourcesEnum.CaptureDevice,
                        OutputDeviceIndex = m_audioOutDeviceIndex,
                        AudioCodecs       = new List <SDPMediaFormatsEnum> {
                            SDPMediaFormatsEnum.PCMU, SDPMediaFormatsEnum.PCMA
                        }
                    };
                }

                VideoOptions videoOpts = new VideoOptions {
                    VideoSource = VideoSourcesEnum.None
                };
                if (hasVideo)
                {
                    videoOpts = new VideoOptions
                    {
                        VideoSource           = VideoSourcesEnum.TestPattern,
                        SourceFile            = RtpAVSession.VIDEO_TESTPATTERN,
                        SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND
                    };
                }

                MediaSession = new RtpAVSession(audioOpts, videoOpts);

                m_userAgent.RemotePutOnHold   += OnRemotePutOnHold;
                m_userAgent.RemoteTookOffHold += OnRemoteTookOffHold;

                bool result = await m_userAgent.Answer(m_pendingIncomingCall, MediaSession);

                m_pendingIncomingCall = null;

                return(result);
            }
        }
コード例 #12
0
        /// <summary>
        /// Gets the audio stream information.
        /// </summary>
        /// <param name="serverId">The server identifier.</param>
        /// <param name="options">The options.</param>
        /// <param name="isOffline">if set to <c>true</c> [is offline].</param>
        /// <param name="apiClient">The API client.</param>
        /// <returns>Task&lt;StreamInfo&gt;.</returns>
        public async Task <StreamInfo> GetAudioStreamInfo(string serverId, AudioOptions options, bool isOffline, IApiClient apiClient)
        {
            var streamBuilder = GetStreamBuilder();

            PlaybackInfoResponse playbackInfo = null;
            string playSessionId = null;

            if (!isOffline)
            {
                playbackInfo = await apiClient.GetPlaybackInfo(new PlaybackInfoRequest
                {
                    Id     = options.ItemId,
                    UserId = apiClient.CurrentUserId,
                    MaxStreamingBitrate = options.MaxBitrate,
                    MediaSourceId       = options.MediaSourceId
                }).ConfigureAwait(false);

                if (playbackInfo.ErrorCode.HasValue)
                {
                    throw new PlaybackException {
                              ErrorCode = playbackInfo.ErrorCode.Value
                    };
                }

                options.MediaSources = playbackInfo.MediaSources;
                playSessionId        = playbackInfo.PlaySessionId;
            }

            var streamInfo = streamBuilder.BuildAudioItem(options);

            EnsureSuccess(streamInfo);

            if (!isOffline)
            {
                var liveMediaSource = await GetLiveStreamInfo(playSessionId, streamInfo.MediaSource, options, apiClient).ConfigureAwait(false);

                if (liveMediaSource != null)
                {
                    options.MediaSources = new List <MediaSourceInfo> {
                        liveMediaSource
                    }.ToArray();
                    streamInfo = GetStreamBuilder().BuildAudioItem(options);
                    EnsureSuccess(streamInfo);
                }
            }

            if (playbackInfo != null)
            {
                //streamInfo.AllMediaSources = playbackInfo.MediaSources.ToList();
                streamInfo.PlaySessionId = playbackInfo.PlaySessionId;
            }

            return(streamInfo);
        }
コード例 #13
0
 //private MenuPause _menuPause;
 //private OptionsPauseMenu _optionsPauseMenu;
 #endregion
 private void Awake()
 {
     InterfaceResources = GetComponent <InterfaceResources>();
     _mainMenu          = GetComponent <MainMenu>();
     _optionsMenu       = GetComponent <OptionsMenu>();
     _videoOptions      = GetComponent <VideoOptions>();
     _gameOptions       = GetComponent <GameOptions>();
     _audioOptions      = GetComponent <AudioOptions>();
     //_menuPause = GetComponent<MenuPause>();
     //_optionsPauseMenu = GetComponent<OptionsPauseMenu>();
 }
コード例 #14
0
ファイル: OptionsMenu.cs プロジェクト: Omaranwa/JumpingJax
    // Get subcomponents to prevent messing up in the editor
    private void GetSubcomponents()
    {
        controlsPanel = GetComponentInChildren <HotKeyOptions>(true);
        videoPanel    = GetComponentInChildren <VideoOptions>(true);
        audioPanel    = GetComponentInChildren <AudioOptions>(true);
        miscPanel     = GetComponentInChildren <MiscOptions>(true);

        if (controlsPanel == null || videoPanel == null || audioPanel == null || miscPanel == null)
        {
            Debug.LogError("An options panel was not found");
        }
    }
コード例 #15
0
        public VTankOptions()
        {
            videoOptions    = getDefaultVideoOptions();
            audioOptions    = getDefaultAudioOptions();
            gamePlayOptions = getDefaultGamePlayOptions();
            keyBindings     = getDefaultKeyBindings();

            ServerAddress  = "glacier2a.cis.vtc.edu";
            ServerPort     = "4063";
            DefaultAccount = "";
            MapsFolder     = "maps";
        }
コード例 #16
0
    public void TestGetDefault()
    {
        AudioOptions options = AudioOptions.GetDefault();

        Assert.AreEqual(0, options.FadeInTime);
        Assert.AreEqual(0, options.FadeOutTime);
        Assert.False(options.Looping);
        Assert.True(options.HardTransitionIn);
        Assert.False(options.HasFadeIn);
        Assert.False(options.HasFadeOut);
        Assert.False(options.HasEndHandler);
    }
コード例 #17
0
ファイル: Conductor.cs プロジェクト: annabelja/TheiaLens
        /// <summary>
        /// Accesses the local audio track as specified
        /// by the operating system.
        /// MUST NOT BE CALLED FROM THE UI THREAD.
        /// </summary>
        /// <param name="factory"></param>
        /// <returns></returns>
        private IMediaStreamTrack getLocalAudio(IWebRtcFactory factory)
        {
            var audioOptions = new AudioOptions()
            {
                Factory = factory
            };

            // this will throw a very unhelpful exception if called from the UI thread
            var audioTrackSource = AudioTrackSource.Create(audioOptions);

            return(MediaStreamTrack.CreateAudioTrack("LocalAudio", audioTrackSource));
        }
コード例 #18
0
        public VTankOptions()
        {
            videoOptions = getDefaultVideoOptions();
            audioOptions = getDefaultAudioOptions();
            gamePlayOptions = getDefaultGamePlayOptions();
            keyBindings = getDefaultKeyBindings();

            ServerAddress = "glacier2a.cis.vtc.edu";
            ServerPort = "4063";
            DefaultAccount = "";
            MapsFolder = "maps";
        }
コード例 #19
0
        public LiveStreamRequest(AudioOptions options)
        {
            MaxStreamingBitrate = options.MaxBitrate;
            ItemId        = options.ItemId;
            DeviceProfile = options.Profile;

            VideoOptions videoOptions = options as VideoOptions;

            if (videoOptions != null)
            {
                AudioStreamIndex    = videoOptions.AudioStreamIndex;
                SubtitleStreamIndex = videoOptions.SubtitleStreamIndex;
            }
        }
コード例 #20
0
 private StreamInfo GetForcedDirectPlayStreamInfo(DlnaProfileType mediaType, AudioOptions options, MediaSourceInfo mediaSource)
 {
     return(new StreamInfo
     {
         ItemId = options.ItemId,
         MediaType = mediaType,
         MediaSource = mediaSource,
         RunTimeTicks = mediaSource.RunTimeTicks,
         Context = options.Context,
         DeviceProfile = options.Profile,
         Container = mediaSource.Container,
         PlayMethod = PlayMethod.DirectPlay
     });
 }
コード例 #21
0
ファイル: LiveStreamRequest.cs プロジェクト: zxhcom/jellyfin
        public LiveStreamRequest(AudioOptions options)
        {
            MaxStreamingBitrate = options.MaxBitrate;
            ItemId           = options.ItemId;
            DeviceProfile    = options.Profile;
            MaxAudioChannels = options.MaxAudioChannels;

            DirectPlayProtocols = new MediaProtocol[] { MediaProtocol.Http };

            if (options is VideoOptions videoOptions)
            {
                AudioStreamIndex    = videoOptions.AudioStreamIndex;
                SubtitleStreamIndex = videoOptions.SubtitleStreamIndex;
            }
        }
コード例 #22
0
ファイル: SIPClient.cs プロジェクト: mbouchef/sipsorcery
        /// <summary>
        /// Answers an incoming SIP call.
        /// </summary>
        public async Task Answer()
        {
            if (m_pendingIncomingCall == null)
            {
                StatusMessage(this, $"There was no pending call available to answer.");
            }
            else
            {
                var sipRequest = m_pendingIncomingCall.ClientTransaction.TransactionRequest;

                SDP  offerSDP = SDP.ParseSDPDescription(sipRequest.Body);
                bool hasAudio = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.audio);
                bool hasVideo = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.video);

                AudioOptions audioOpts = new AudioOptions {
                    AudioSource = AudioSourcesEnum.None
                };
                if (hasAudio)
                {
                    audioOpts = new AudioOptions {
                        AudioSource = AudioSourcesEnum.Microphone
                    };
                }

                VideoOptions videoOpts = new VideoOptions {
                    VideoSource = VideoSourcesEnum.None
                };
                if (hasVideo)
                {
                    videoOpts = new VideoOptions
                    {
                        VideoSource           = VideoSourcesEnum.TestPattern,
                        SourceFile            = RtpAVSession.VIDEO_TESTPATTERN,
                        SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND
                    };
                }

                MediaSession = new RtpAVSession(sipRequest.RemoteSIPEndPoint.Address.AddressFamily, audioOpts, videoOpts);

                m_userAgent.RemotePutOnHold   += OnRemotePutOnHold;
                m_userAgent.RemoteTookOffHold += OnRemoteTookOffHold;

                await m_userAgent.Answer(m_pendingIncomingCall, MediaSession);

                m_pendingIncomingCall = null;
            }
        }
コード例 #23
0
        /// <summary>
        /// Puts the remote call party on hold.
        /// </summary>
        public async void PutOnHold()
        {
            bool hasAudio = MediaSession.HasAudio;
            bool hasVideo = MediaSession.HasVideo;

            m_userAgent.PutOnHold();

            AudioOptions audioOnHold = (!hasAudio) ? null :
                                       new AudioOptions
            {
                AudioSource       = AudioSourcesEnum.Music,
                OutputDeviceIndex = m_audioOutDeviceIndex,
                SourceFiles       = new Dictionary <SDPMediaFormatsEnum, string>
                {
                    { SDPMediaFormatsEnum.PCMU, MUSIC_FILE_PCMU },
                    { SDPMediaFormatsEnum.PCMA, MUSIC_FILE_PCMA }
                }
            };
            VideoOptions videoOnHold = null;

            if (hasVideo)
            {
                //if (bmpSource != null)
                //{
                //    videoOnHold = new VideoOptions
                //    {
                //        VideoSource = VideoSourcesEnum.ExternalBitmap,
                //        BitmapSource = bmpSource
                //    };
                //}
                //else
                //{
                videoOnHold = new VideoOptions
                {
                    VideoSource           = VideoSourcesEnum.TestPattern,
                    SourceFile            = RtpAVSession.VIDEO_ONHOLD_TESTPATTERN,
                    SourceFramesPerSecond = VIDEO_ONHOLD_FRAMES_PER_SECOND
                };
                //}
            }

            await MediaSession.SetSources(audioOnHold, videoOnHold);

            // At this point we could stop listening to the remote party's RTP and play something
            // else and also stop sending our microphone output and play some music.
            StatusMessage(this, "Local party put on hold");
        }
コード例 #24
0
    private void Start()
    {
        InterfaceResources = GetComponent <InterfaceResources>();
        _mainMenu          = GetComponent <MainMenu>();
        _optionsMenu       = GetComponent <OptionsMenu>();
        //_videoOptions = GetComponent<VideoOptions>();
        //_gameOptions = GetComponent<GameOptions>();
        _audioOptions     = GetComponent <AudioOptions>();
        _menuPause        = GetComponent <PauseMenu>();
        _optionsPauseMenu = GetComponent <OptionsPauseMenu>();


        if (_mainMenu)
        {
            Execute(InterfaceObject.MainMenu);
        }
    }
コード例 #25
0
    public void TestInit_ClampVolumeMax()
    {
        float  fadeInTime       = 0.3f;
        float  fadeOutTime      = 0.0f;
        float  volume           = 100f;
        bool   shouldLoop       = true;
        bool   hardTransitionIn = false;
        Action onEndHandler     = delegate { };

        AudioOptions options = new AudioOptions(
            fadeInTime,
            fadeOutTime,
            volume,
            shouldLoop,
            hardTransitionIn,
            onEndHandler);

        Assert.AreEqual(1f, options.Volume);
    }
コード例 #26
0
ファイル: SIPClient.cs プロジェクト: mbouchef/sipsorcery
        /// <summary>
        /// Takes the remote call party off hold.
        /// </summary>
        public async void TakeOffHold()
        {
            await m_userAgent.TakeOffHold();

            AudioOptions audioOnHold = (!MediaSession.HasAudio) ? null : new AudioOptions {
                AudioSource = AudioSourcesEnum.Microphone
            };
            VideoOptions videoOnHold = (!MediaSession.HasVideo) ? null : new VideoOptions
            {
                VideoSource           = VideoSourcesEnum.TestPattern,
                SourceFile            = RtpAVSession.VIDEO_TESTPATTERN,
                SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND
            };
            await MediaSession.SetSources(audioOnHold, videoOnHold);

            // At this point we should reverse whatever changes we made to the media stream when we
            // put the remote call part on hold.
            StatusMessage(this, "Local party taken off on hold");
        }
コード例 #27
0
    public void Play(string sourceName, AudioOptions options = null)
    {
        AudioSource source;

        if (!sources.TryGetValue(sourceName, out source))
        {
            Debug.LogErrorFormat("Source {0} not found. Unable to stop clip.", sourceName);
            return;
        }

        AudioClip clip = source.clip;

        if (clip == null)
        {
            Debug.LogErrorFormat("Clip {0} not found.", sourceName);
            return;
        }

        Play(sourceName, clip, options);
    }
コード例 #28
0
ファイル: SIPClient.cs プロジェクト: raschle/sipsorcery
        /// <summary>
        /// Puts the remote call party on hold.
        /// </summary>
        public async void PutOnHold()
        {
            await m_userAgent.PutOnHold();

            if (MediaSession is RtpAVSession)
            {
                AudioOptions audioOnHold = (!MediaSession.HasAudio) ? null : new AudioOptions {
                    AudioSource = AudioSourcesEnum.Music
                };
                VideoOptions videoOnHold = (!MediaSession.HasVideo) ? null : new VideoOptions
                {
                    VideoSource           = VideoSourcesEnum.TestPattern,
                    SourceFile            = RtpAVSession.VIDEO_ONHOLD_TESTPATTERN,
                    SourceFramesPerSecond = VIDEO_ONHOLD_FRAMES_PER_SECOND
                };
                await(MediaSession as RtpAVSession).SetSources(audioOnHold, videoOnHold);
            }

            // At this point we could stop listening to the remote party's RTP and play something
            // else and also stop sending our microphone output and play some music.
            StatusMessage(this, "Local party put on hold");
        }
コード例 #29
0
    public void Play(string sourceName, AudioClip clip, AudioOptions options = null)
    {
        AudioSource source;

        if (!sources.TryGetValue(sourceName, out source))
        {
            Debug.LogErrorFormat("Source {0} not found. Unable to play clip.", sourceName);
            return;
        }

        if (options == null)
        {
            options = AudioOptions.GetDefault();
        }

        if (clip == null)
        {
            Debug.LogErrorFormat("Clip {0} not found.", sourceName);
            return;
        }
        clipOptions[getClipIdentifier(source, clip)] = options;
        handleClipTransition(source, clip, options);
    }
コード例 #30
0
    public void TestOnEnd()
    {
        float  fadeInTime       = 0.3f;
        float  fadeOutTime      = 0.0f;
        float  volume           = 0.85f;
        bool   shouldLoop       = true;
        bool   hardTransitionIn = false;
        bool   handlerRun       = false;
        Action onEndHandler     = delegate
        {
            handlerRun = true;
        };
        AudioOptions options = new AudioOptions(
            fadeInTime,
            fadeOutTime,
            volume,
            shouldLoop,
            hardTransitionIn,
            onEndHandler);

        options.OnEnd();

        Assert.True(handlerRun);
    }
コード例 #31
0
        void SetupAudioTrack()
        {
            Logger.Debug("PeerChannel", "SetupAudioTrack");

            var opts = new AudioOptions
            {
                Factory = factory
            };

            Logger.Debug("PeerChannel", "create audio source");

            var audioSource = AudioTrackSource.Create(opts);

            Logger.Debug("PeerChannel", "create audio track");

            var track = MediaStreamTrack.CreateAudioTrack(factory,
                                                          mediaOption.AudioTrackId, audioSource);

            Logger.Debug("PeerChannel", "add audio track");

            Conn.AddTrack(track);

            OnAddLocalAudioTrack?.Invoke(track);
        }
コード例 #32
0
 public void ConfigureOptionsForSFX()
 => Options = AudioOptions.DefaultSFX;