コード例 #1
0
 private void CustomAudioClicked(object sender, EventArgs e)
 {
     mediaElement.Source = null;
     MediaStreamSource source = new AudioMediaStreamSource(440);
     mediaElement.SetSource(source);
     sourceTextBlock.Text = "custom audio";
 }
コード例 #2
0
ファイル: PlayerBase.cs プロジェクト: forestrf/AlantaMedia
 public PlayerBase(MediaElement mediaElement, AudioMediaStreamSource audioStreamSource, AudioVisualizer audioVisualizer)
 {
     mMediaElement      = mediaElement;
     mAudioStreamSource = audioStreamSource;
     Frames             = new List <byte[]>();
     mAudioVisualizer   = audioVisualizer;
     VisualizationRate  = 1;
 }
コード例 #3
0
 private void InitializeMedia()
 {
     // Start playing whatever we can get from the media controller
     AudioStreamSource = new AudioMediaStreamSource(_mediaController, _mediaController.PlayedAudioFormat);
     if (!IsPlaying)
     {
         _mediaElement.SetSource(AudioStreamSource);
         _mediaElement.Play();
     }
 }
コード例 #4
0
        protected override PlayerAec GetPlayer()
        {
            var mediaElement      = new MediaElement();
            var audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default);

            mediaElement.SetSource(audioStreamSource);
            var player = new PlayerAec(mediaElement, audioStreamSource, mSourceAudioVisualizer, mEchoCancelFilter);

            audioStreamSource.AudioController = player;
            audioStreamSource.InstanceName    = "ForTestRunnerLive";
            return(player);
        }
コード例 #5
0
        // Executes when the user navigates to this page.
        protected override void OnNavigatedTo(NavigationEventArgs e)
        {
            btnSaveSpeakers.IsEnabled = false;
            btnSaveSource.IsEnabled   = false;
            btnPlaySource.IsEnabled   = false;
            btnPlaySpeakers.IsEnabled = false;
            _sourceFrames             = new List <byte[]>();
            InitializeCaptureSource();

            _mediaElement      = new MediaElement();
            _audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default);
            _mediaElement.SetSource(_audioStreamSource);
            _player = new PlayerBase(_mediaElement, _audioStreamSource, sourceAudioVisualizer);
        }
コード例 #6
0
 public PlayerAec(MediaElement mediaElement, AudioMediaStreamSource audioMediaStreamSource, AudioVisualizer audioVisualizer, EchoCancelFilter echoCancelFilter) :
     base(mediaElement, audioMediaStreamSource, audioVisualizer)
 {
     mEchoCancelFilter = echoCancelFilter;
 }
コード例 #7
0
        public void StartTimingTest()
        {
            // MessageBox.Show("Currently selected context = " + CurrentAudioContext.Description);
            Status             = "Executing timing test for context '" + CurrentAudioContext.Description + "'";
            _mediaElement      = new MediaElement();
            _audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default);
            _captureSource.VideoCaptureDevice = null;

            // Make sure we can get at the devices.
            if (_captureSource.AudioCaptureDevice == null)
            {
                throw new InvalidOperationException("No audio capture device was found");
            }
            if (_captureSource.AudioCaptureDevice.DesiredFormat == null)
            {
                throw new InvalidOperationException("No suitable audio format was found");
            }
            if (!CaptureDeviceConfiguration.AllowedDeviceAccess && !CaptureDeviceConfiguration.RequestDeviceAccess())
            {
                throw new InvalidOperationException("Device access not granted.");
            }

            // Create the audio sink.
            MediaConfig.Default.LocalSsrcId = 1000;
            MediaStatistics = new TimingMediaStatistics();
            var mediaEnvironment = new TestMediaEnvironment();

            // Create the media controller
            _mediaConnection = new LoopbackMediaConnection(MediaConfig.Default.LocalSsrcId);
            var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId);

            _mediaController = new MediaController(MediaConfig.Default, AudioFormat.Default, MediaStatistics, mediaEnvironment, _mediaConnection, vqc);

            // Create the audio sink to grab data from the microphone and send it to the media controller.
            _audioSink = new TimingAudioSinkAdapter(CurrentAudioContext, _captureSource, _mediaController, MediaConfig.Default, new TestMediaEnvironment(), CurrentAudioContext.AudioFormat);
            _audioSink.CaptureSuccessful += _audioSink_CaptureSuccessful;

            // Create the media stream source to play data from the media controller
            _audioStreamSource.AudioController = _mediaController;
            _mediaElement.SetSource(_audioStreamSource);

            // Connect.
            _mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() =>
            {
                if (ex != null)
                {
                    StopTimingTest();
                    MessageBox.Show(ex.ToString());
                }
                else
                {
                    ClientLogger.Debug("TimingViewModel connected to MediaController");
                }
            }));
            _mediaController.RegisterRemoteSession(1001);

            // Start capturing (which should trigger the audio sink).
            _captureSource.Start();
            if (_captureSource.State != CaptureState.Started)
            {
                throw new InvalidOperationException("Unable to capture microphone");
            }

            // Start playing.
            _mediaElement.Play();
            ClientLogger.Debug("CaptureSource initialized; captureSource.State={0}; captureSource.AudioCaptureDevice={1}; mediaElement.CurrentState={2}; ",
                               _captureSource.State, _captureSource.AudioCaptureDevice.FriendlyName, _mediaElement.CurrentState);
        }