예제 #1
0
파일: Hold.cs 프로젝트: mujiansu/Lync
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Hold both endpoints synchronously.
            _audioVideoFlow.BeginHold(HoldType.BothEndpoints, audioVideoFlow_HoldCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Retrieve AudioVideoFlow synchronously.
            _audioVideoFlow.BeginRetrieve(audioVideoFlow_RetrieveCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
예제 #2
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Mute both directions
            _audioVideoFlow.Audio.Mute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Unmute both directions
            _audioVideoFlow.Audio.Unmute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
예제 #3
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing sampling rate from 8Khz or 16Khz to only 8Khz.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.SamplingRate = AudioSamplingRate.EightKhz;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
예제 #4
0
파일: Initialize.cs 프로젝트: mujiansu/Lync
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                audioVideoCall_FlowConfigurationRequested,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing audio direcion to send and receive.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.AllowedDirection = MediaChannelDirection.SendReceive;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(5000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }