public void ChunkHelperTest() { Queue <FrameBlock> queue = GetFrameBlockQueue(); int initialLength = queue.Count; var ctxFactory = new JpegFrameContextFactory(VideoConstants.VideoBlockSize, VideoConstants.VideoBlockSize); var videoChunkPool = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize), bs => bs.Reset()); var frameBlockPool = new ObjectPool <FrameBlock>(() => new FrameBlock(ctxFactory)); var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var helper = new ChunkHelper(VideoConstants.MaxPayloadSize, frameBlockPool, vqc); var buffer = videoChunkPool.GetNext(); Assert.IsTrue(helper.GetNextChunkFromQueue(queue, buffer)); Assert.IsTrue(queue.Count < initialLength); var blocks = helper.ParseChunk(buffer, 1); Assert.AreEqual(queue.Count + blocks.Length, initialLength); Assert.IsTrue(blocks.Length > 0); foreach (var block in blocks) { Assert.AreEqual(block.BlockX, block.BlockY); Assert.AreEqual(block.BlockX + 1, block.EncodedStream.Length); } }
public void zzCodecPerformanceTest() { // Encode and decode a basic raster structure. var perf = new PerformanceMonitor("Encode/Decode", 1); var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var codec = new JpegDiffVideoCodec(vqc); codec.Initialize(height, width, VideoConstants.MaxPayloadSize); var videoChunkPool = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize), bs => bs.Reset()); perf.Start(); const int iterations = 100; for (int i = 0; i < iterations; i++) { byte[] sample = GetRgba(i); codec.EncodeFrame(sample, 0); bool moreChunks = true; var buffer = videoChunkPool.GetNext(); while (moreChunks) { if (codec.GetNextChunk(buffer, out moreChunks)) { codec.DecodeChunk(buffer, 2); } } videoChunkPool.Recycle(buffer); codec.GetNextFrame(); } perf.Stop(); ClientLogger.Debug("Finished JpegEncoderDecoder performance test."); }
public void LogNetworkGlitchTest_Controller() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(1, remoteSessions); // This has the side-effect of setting IsController() to true. controller.LogReceivedVideoQuality(2, VideoQuality.NotSpecified, VideoQuality.Medium); Assert.AreEqual(VideoQuality.Medium, controller.LocalVideoQuality); controller.Now += controller.QualityHoldInterval + TimeSpan.FromSeconds(1); for (int i = 0; i < VideoQualityController.MaxGlitches; i++) { controller.LogGlitch(1); } Assert.AreEqual(VideoQuality.Low, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.Low, controller.ProposedVideoQuality); Assert.AreEqual(VideoQuality.Low, controller.CommandedVideoQuality); controller.Now += controller.QualityHoldInterval + TimeSpan.FromSeconds(1); for (int i = 0; i < VideoQualityController.MaxGlitches; i++) { controller.LogGlitch(1); } Assert.AreEqual(VideoQuality.Fallback, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.Fallback, controller.ProposedVideoQuality); Assert.AreEqual(VideoQuality.Fallback, controller.CommandedVideoQuality); }
public void JitterOutOfOrderTest() { var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var queue = new AudioJitterQueue(new CodecFactory(AudioFormat.Default), vqc); // Write 10 samples in order. for (ushort i = 0; i < 10; i++) { short[] samples = GetAudioSamples(320, (short)i); queue.WriteSamples(samples, 0, samples.Length, i, AudioCodecType.Raw, false); } // Write 10 samples out of order. for (ushort i = 19; i >= 10; i--) { short[] samples = GetAudioSamples(320, (short)i); queue.WriteSamples(samples, 0, samples.Length, i, AudioCodecType.Raw, false); } // Read them back and confirm that they are all in order. var buffer = new short[320]; for (int i = 0; i < 20; i++) { queue.ReadSamples(buffer); Assert.AreEqual(i, buffer[0]); } }
public void VideoQualityPropertyTest() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(ushort.MaxValue, remoteSessions); // Fallback controller.LocalVideoQuality = VideoQuality.Fallback; Assert.AreEqual(VideoQuality.Fallback, controller.LocalVideoQuality); Assert.AreEqual(1, controller.AcceptFramesPerSecond); Assert.AreEqual(1, controller.InterleaveFactor); Assert.AreEqual(8, controller.FullFrameInterval); // Low controller.LocalVideoQuality = VideoQuality.Low; Assert.AreEqual(VideoQuality.Low, controller.LocalVideoQuality); Assert.AreEqual(1, controller.AcceptFramesPerSecond); Assert.AreEqual(1, controller.InterleaveFactor); Assert.AreEqual(4, controller.FullFrameInterval); // Medium controller.LocalVideoQuality = VideoQuality.Medium; Assert.AreEqual(VideoQuality.Medium, controller.LocalVideoQuality); Assert.AreEqual(5, controller.AcceptFramesPerSecond); Assert.AreEqual(1, controller.InterleaveFactor); Assert.AreEqual(20, controller.FullFrameInterval); // High controller.LocalVideoQuality = VideoQuality.High; Assert.AreEqual(VideoQuality.High, controller.LocalVideoQuality); Assert.AreEqual(10, controller.AcceptFramesPerSecond); Assert.AreEqual(1, controller.InterleaveFactor); Assert.AreEqual(20, controller.FullFrameInterval); }
public void RtpPacketEncodingTest() { var outpacket = new RtpPacketData(); var rtpPacketDataListRecycler = new ObjectPool <List <RtpPacketData> >(() => new List <RtpPacketData>()); var rtpPacketDataRecycler = new ObjectPool <RtpPacketData>(() => new RtpPacketData()); var ctxFactory = new JpegFrameContextFactory(VideoConstants.VideoBlockSize, VideoConstants.VideoBlockSize); var frameBlockPool = new ObjectPool <FrameBlock>(() => new FrameBlock(ctxFactory)); outpacket.SequenceNumber = 255; outpacket.PayloadType = RtpPayloadType.Audio; Queue <FrameBlock> queue = GetFrameBlockQueue(); var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var helper = new ChunkHelper(VideoConstants.MaxPayloadSize, frameBlockPool, vqc); // var videoChunkPool = new ObjectPool<ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize), bs => bs.Reset()); var chunk = new ByteStream(VideoConstants.MaxPayloadSize); helper.GetNextChunkFromQueue(queue, chunk); outpacket.Payload = chunk.Data; outpacket.PayloadLength = (ushort)chunk.DataLength; var data = new ByteStream(outpacket.BuildPacket()); RtpPacketData inpacket = RtpPacketData.GetPacketsFromData(data, rtpPacketDataListRecycler, rtpPacketDataRecycler)[0]; Assert.AreEqual(outpacket.SequenceNumber, inpacket.SequenceNumber); Assert.AreEqual(RtpPayloadType.Audio, inpacket.PayloadType); Assert.AreEqual(outpacket.PayloadLength, inpacket.PayloadLength); }
public void ConstructorTest() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(ushort.MaxValue, remoteSessions); Assert.AreEqual(VideoQuality.Medium, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.NotSpecified, controller.CommandedVideoQuality); }
public void LogReceivedVideoQualityTest_LowerProposedVideoQuality_Controller() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(1, remoteSessions); controller.LogReceivedVideoQuality(2, VideoQuality.NotSpecified, VideoQuality.Low); Assert.AreEqual(VideoQuality.Low, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.Low, controller.CommandedVideoQuality); }
public void IsNotControllerTest() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(2, remoteSessions); controller.LogReceivedVideoQuality(1, VideoQuality.NotSpecified, VideoQuality.High); Assert.AreEqual(VideoQuality.Medium, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.NotSpecified, controller.CommandedVideoQuality); }
public void LogReceivedVideoQualityTest_SelectLowestProposedVideoQuality_Controller() { var remoteSessions = new Dictionary <ushort, VideoThreadData>(); remoteSessions[1] = null; var controller = new VideoQualityController(1, remoteSessions); for (ushort i = 2; i < 60; i++) { controller.LogReceivedVideoQuality(i, VideoQuality.NotSpecified, VideoQuality.High); controller.Now += TimeSpan.FromSeconds(1); } Assert.AreEqual(VideoQuality.High, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.High, controller.CommandedVideoQuality); controller.Now += controller.QualityUpdateInterval; controller.LogReceivedVideoQuality(2, VideoQuality.NotSpecified, VideoQuality.Low); Assert.AreEqual(VideoQuality.Low, controller.LocalVideoQuality); Assert.AreEqual(VideoQuality.Low, controller.CommandedVideoQuality); }
public void JitterReadWriteTest() { var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var queue = new AudioJitterQueue(new CodecFactory(AudioFormat.Default), vqc); for (ushort i = 0; i < 20; i++) { short[] samples = GetAudioSamples(320, (short)i); queue.WriteSamples(samples, 0, samples.Length, i, AudioCodecType.Raw, false); } var buffer = new short[320]; for (int i = 0; i < 20; i++) { queue.ReadSamples(buffer); Assert.AreEqual(i, buffer[0]); } }
public void ParsedFrameTest() { byte[] sample = GetSample(); var ctxFactory = new JpegFrameContextFactory(VideoConstants.VideoBlockSize, VideoConstants.VideoBlockSize); var frameBlockPool = new ObjectPool <FrameBlock>(() => new FrameBlock(ctxFactory)); var vqc = new VideoQualityController(1); vqc.RemoteSessions = _remoteSessions; var frame = new ParsedFrame(vqc, sample, height, width, 0, 0, frameBlockPool); for (int i = 0; i < frame.FrameBlocks.Length; i++) { if (frame.FrameBlocks[i] != null) { var expected = (byte)i; foreach (byte b in frame.FrameBlocks[i].RgbaRaw) { Assert.AreEqual(expected, b); } } } }
public void AnalyzeSingleFrame(List <byte[]> testFrames) { // Create the audio context. var config = MediaConfig.Default; config.LocalSsrcId = 1000; var rawAudioFormat = new AudioFormat(); // This will be overwritten later var playedAudioFormat = new AudioFormat(); var mediaEnvironment = new TestMediaEnvironment(); var audioContextFactory = new AudioContextFactory(rawAudioFormat, playedAudioFormat, config, mediaEnvironment); var audioContext = audioContextFactory.HighQualityDirectCtx; // Create the media controller var mediaStatistics = new TimingMediaStatistics(); var mediaConnection = new SingleFrameMediaConnection(MediaConfig.Default.LocalSsrcId); mediaConnection.FirstPacketReceived += mediaConnection_FirstPacketReceived; var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId); var mediaController = new MediaController(MediaConfig.Default, playedAudioFormat, mediaStatistics, mediaEnvironment, mediaConnection, vqc); // Connect. mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() => { if (ex != null) { MessageBox.Show(ex.ToString()); } else { ClientLogger.Debug("TimingViewModel connected to MediaController"); } })); mediaController.RegisterRemoteSession(1001); foreach (var frame in testFrames) { mediaController.SubmitRecordedFrame(audioContext, frame); } }
private void btnStart_Click(object sender, RoutedEventArgs e) { try { if (!(CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess())) { MessageBox.Show("Unable to capture microphone"); return; } _audioSinkAdapter.UseGeneratedTone = chkUseGeneratedTone.IsChecked ?? true; _mediaStreamSource.UseGeneratedTone = chkUseGeneratedTone.IsChecked ?? true; // Create the context factory. var rootMediaConfig = GetMediaConfig(); var connectionSelection = (ComboBoxItem)cboConnection.SelectedItem; var connectionType = (MediaConnectionType)Enum.Parse(typeof(MediaConnectionType), (string)connectionSelection.Content, true); var formatSelection = (ComboBoxItem)cboAudioFormat.SelectedItem; var audioFormat = new AudioFormat(int.Parse((string)formatSelection.Content)); var enhancerSelection = (ComboBoxItem)cboEnhancer.SelectedValue; var enhancerType = (SpeechEnhancementStack)Enum.Parse(typeof(SpeechEnhancementStack), (string)enhancerSelection.Content, true); var encoderSelection = (ComboBoxItem)cboEncoder.SelectedValue; var encoderType = (AudioCodecType)Enum.Parse(typeof(AudioCodecType), (string)encoderSelection.Content, true); var ctxFactory = new TestAudioContextFactory(rootMediaConfig, _audioSinkAdapter.RawAudioFormat, audioFormat, enhancerType, encoderType); _audioSinkAdapter.RootAudioContext = ctxFactory.GetAudioContext(); _mediaServerVms.Clear(); _audioSinkAdapter.AudioControllers.Clear(); _audioSinkAdapter.AudioContexts.Clear(); _mediaStreamSource.AudioControllers.Clear(); var connections = (int)txtConnections.Value; var rooms = (int)txtRooms.Value; _audioSinkAdapter.Rooms = rooms; _audioSinkAdapter.ConnectionsPerRoom = connections; for (int room = 0; room < rooms; room++) { string roomId = string.Format("__alantaTestRoom{0}__", room); var mediaStats = new MediaStatistics(); var mediaEnvironment = new MediaEnvironment(mediaStats); // Register each room on the remote server. for (int connection = 0; connection < connections; connection++) { var connectionMediaConfig = GetMediaConfig(); IMediaConnection mediaConnection; if (connectionType == MediaConnectionType.MediaServer) { mediaConnection = new RtpMediaConnection(connectionMediaConfig, mediaStats); } else { mediaConnection = new LoopbackMediaConnection(connectionMediaConfig.LocalSsrcId); } var vqc = new VideoQualityController(connectionMediaConfig.LocalSsrcId); var vm = new MediaServerViewModel(connectionMediaConfig, AudioFormat.Default, mediaStats, mediaEnvironment, mediaConnection, vqc, roomId); _audioSinkAdapter.AudioControllers.Add(vm.MediaController); _audioSinkAdapter.AudioContexts.Add(ctxFactory.GetAudioContext()); _mediaStreamSource.AudioControllers.Add(new AudioControllerEntry(vm.MediaController)); _mediaServerVms.Add(vm); vm.Connect(); } // Make sure each session knows about all the others in the same room. var localVms = _mediaServerVms.Where(x => x.RoomId == roomId).ToList(); foreach (var localVm in localVms) { var vm = localVm; var remoteVms = localVms.Where(x => x.SsrcId != vm.SsrcId).ToList(); foreach (var remoteVm in remoteVms) { vm.MediaController.RegisterRemoteSession(remoteVm.SsrcId); } } } if (mediaElement.CurrentState == MediaElementState.Closed) { mediaElement.SetSource(_mediaStreamSource); } _captureSource.Start(); mediaElement.Play(); btnStop.IsEnabled = true; btnStart.IsEnabled = false; } catch (Exception ex) { MessageBox.Show(ex.ToString()); } }
public void StartTimingTest() { // MessageBox.Show("Currently selected context = " + CurrentAudioContext.Description); Status = "Executing timing test for context '" + CurrentAudioContext.Description + "'"; _mediaElement = new MediaElement(); _audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default); _captureSource.VideoCaptureDevice = null; // Make sure we can get at the devices. if (_captureSource.AudioCaptureDevice == null) { throw new InvalidOperationException("No audio capture device was found"); } if (_captureSource.AudioCaptureDevice.DesiredFormat == null) { throw new InvalidOperationException("No suitable audio format was found"); } if (!CaptureDeviceConfiguration.AllowedDeviceAccess && !CaptureDeviceConfiguration.RequestDeviceAccess()) { throw new InvalidOperationException("Device access not granted."); } // Create the audio sink. MediaConfig.Default.LocalSsrcId = 1000; MediaStatistics = new TimingMediaStatistics(); var mediaEnvironment = new TestMediaEnvironment(); // Create the media controller _mediaConnection = new LoopbackMediaConnection(MediaConfig.Default.LocalSsrcId); var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId); _mediaController = new MediaController(MediaConfig.Default, AudioFormat.Default, MediaStatistics, mediaEnvironment, _mediaConnection, vqc); // Create the audio sink to grab data from the microphone and send it to the media controller. _audioSink = new TimingAudioSinkAdapter(CurrentAudioContext, _captureSource, _mediaController, MediaConfig.Default, new TestMediaEnvironment(), CurrentAudioContext.AudioFormat); _audioSink.CaptureSuccessful += _audioSink_CaptureSuccessful; // Create the media stream source to play data from the media controller _audioStreamSource.AudioController = _mediaController; _mediaElement.SetSource(_audioStreamSource); // Connect. _mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() => { if (ex != null) { StopTimingTest(); MessageBox.Show(ex.ToString()); } else { ClientLogger.Debug("TimingViewModel connected to MediaController"); } })); _mediaController.RegisterRemoteSession(1001); // Start capturing (which should trigger the audio sink). _captureSource.Start(); if (_captureSource.State != CaptureState.Started) { throw new InvalidOperationException("Unable to capture microphone"); } // Start playing. _mediaElement.Play(); ClientLogger.Debug("CaptureSource initialized; captureSource.State={0}; captureSource.AudioCaptureDevice={1}; mediaElement.CurrentState={2}; ", _captureSource.State, _captureSource.AudioCaptureDevice.FriendlyName, _mediaElement.CurrentState); }
public MediaTests() { _remoteSessions[1] = null; _videoQualityController = new VideoQualityController(0); }
public SourceRoomController(IViewModelFactory viewModelFactory, IRoomInfo roomInfo, IConfigurationService configurationService, MediaTest roomPage) : base(viewModelFactory, roomInfo, configurationService) { // Set the roomViewModel initial values. var rnd = new Random(); var sourceSsrcId = (ushort)rnd.Next(ushort.MinValue, ushort.MaxValue); var sourceConfig = new MediaConfig { MediaServerHost = DataGlobals.MediaServerHost, MediaServerControlPort = Constants.DefaultMediaServerControlPort, MediaServerStreamingPort = Constants.DefaultMediaServerStreamingPort, LocalSsrcId = sourceSsrcId, CodecFactory = new CodecFactory(AudioFormat.Default), ExpectedAudioLatency = 250 }; var sourceMediaStats = new MediaStatistics(); var sourceMediaEnvironment = new MediaEnvironment(sourceMediaStats); var sourceMediaConnection = new RtpMediaConnection(sourceConfig, sourceMediaStats); var vqc = new VideoQualityController(sourceConfig.LocalSsrcId); _sourceMediaController = new SourceMediaController(sourceConfig, sourceMediaStats, sourceMediaEnvironment, sourceMediaConnection, vqc); RoomVm.RoomName = Constants.DefaultRoomName; RoomVm.MediaController = _sourceMediaController; _sourceMediaController.InputAudioVisualizer = roomPage.audioVisualizer; // Setup the local session. var room = new Room { Name = Constants.DefaultRoomName, Sessions = new ObservableCollection <Session>() }; var user = new RegisteredUser { UserId = Guid.NewGuid(), UserTag = "smithkl42", UserName = "******" }; var session = new Session { SessionId = Guid.NewGuid(), SsrcId = _sourceMediaController.LocalSsrcId, User = user }; var sessionViewModel = viewModelFactory.GetViewModel <SessionViewModel>(vm => vm.Model.SessionId == session.SessionId); sessionViewModel.Model = session; _sessionCollectionViewModel = viewModelFactory.GetViewModel <SessionCollectionViewModel>(); RoomVm.SessionVm = sessionViewModel; _sessionCollectionViewModel.ViewModels.Add(sessionViewModel); RoomVm.SessionId = RoomVm.SessionVm.Model.SessionId; var owner = user; owner.SharedFiles = new ObservableCollection <SharedFile>(); room.SharedFiles = owner.SharedFiles; room.User = owner; RoomVm.UserTag = owner.UserTag; room.UserId = owner.UserId; RoomVm.Model = room; LocalUserVm.Model = owner; LocalUserVm.UserId = owner.UserId; // Add the destination sessions. var codecFactory = new DestinationCodecFactory(); for (int i = 0; i < RemoteSessionCount; i++) { var destinationSsrcId = (ushort)rnd.Next(ushort.MinValue, ushort.MaxValue); var config = new MediaConfig { MediaServerHost = DataGlobals.MediaServerHost, MediaServerControlPort = Constants.DefaultMediaServerControlPort, MediaServerStreamingPort = Constants.DefaultMediaServerStreamingPort, LocalSsrcId = destinationSsrcId, CodecFactory = codecFactory, ExpectedAudioLatency = 250 }; var mediaStatistics = new MediaStatistics(); var mediaEnvironment = new MediaEnvironment(mediaStatistics); var mediaConnection = new RtpMediaConnection(config, mediaStatistics); var destinationVqc = new VideoQualityController(config.LocalSsrcId); var destinationMediaController = new DestinationMediaController(config, mediaStatistics, mediaEnvironment, mediaConnection, destinationVqc); var remoteSession = new Session { SessionId = Guid.NewGuid(), SsrcId = destinationMediaController.LocalSsrcId }; //todo: create sessionViewModel, add it to SessionCollectionViewModel _sourceMediaController.RegisterRemoteSession((ushort)(remoteSession.SsrcId)); remoteSession.User = new RegisteredUser { UserId = Guid.NewGuid(), UserTag = "smithkl42", UserName = "******" }; room.Sessions.Add(remoteSession); var destinationRoomPage = new DestinationRoomPage(); destinationMediaController.OutputAudioVisualizer = destinationRoomPage.audioVisualizer; // var newViewModelFactory = new ViewModelFactory(RoomService, MessageService, ViewLocator); var destinationController = new DestinationRoomController(destinationMediaController, viewModelFactory, new TestRoomInfo(), configurationService, RoomVm, remoteSession.SessionId); destinationMediaController.Connect(RoomVm.Model.RoomId.ToString()); // Store references to the created objects. _destinationMediaControllers[remoteSession.SessionId] = destinationMediaController; _destinationRoomPages[remoteSession.SessionId] = destinationRoomPage; _destinationRoomControllers[remoteSession.SessionId] = destinationController; } // We have to wait until all the sessions have been created before we can register them with their media controllers and initialize their pages. foreach (Guid sessionId in _destinationRoomControllers.Keys) { var destinationRoomController = _destinationRoomControllers[sessionId]; var destinationPage = _destinationRoomPages[sessionId]; destinationPage.Initialize(destinationRoomController); } }
public void StartSendingAudioToRoom(string ownerUserTag, string host, List <byte[]> testFrames, bool sendLive, OperationCallback callback) { // What we should use when there's only one other person, and CPU is OK: // 16Khz, Speex, WebRtc at full strength var config = MediaConfig.Default; config.LocalSsrcId = (ushort)rnd.Next(ushort.MinValue, ushort.MaxValue); config.AudioContextSelection = AudioContextSelection.HighQualityDirect; config.MediaServerHost = host; // Create the media controller var playedAudioFormat = new AudioFormat(); var mediaStatistics = new TimingMediaStatistics(); var mediaEnvironment = new TestMediaEnvironment(); var mediaConnection = new RtpMediaConnection(config, mediaStatistics); var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId); _mediaController = new MediaController(MediaConfig.Default, playedAudioFormat, mediaStatistics, mediaEnvironment, mediaConnection, vqc); // Create the audio sink adapter. _captureSource = new CaptureSource(); _captureSource.VideoCaptureDevice = null; if (_captureSource.AudioCaptureDevice == null) { _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); if (_captureSource.AudioCaptureDevice == null) { throw new InvalidOperationException("No suitable audio capture device was found"); } } MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice); _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds _audioSinkAdapter = sendLive ? new AudioSinkAdapter(_captureSource, _mediaController, config, mediaEnvironment, playedAudioFormat) : new FromFileAudioSinkAdapter(_captureSource, _mediaController, config, mediaEnvironment, playedAudioFormat, testFrames); var roomService = new RoomServiceAdapter(); roomService.CreateClient(); roomService.GetRoomId(Constants.DefaultCompanyTag, Constants.DefaultAuthenticationGroupTag, ownerUserTag, Constants.DefaultRoomName, (getRoomError, result) => { if (getRoomError != null) { callback(getRoomError); } else { // Connect. _mediaController.Connect(result.RoomId.ToString(), connectError => Deployment.Current.Dispatcher.BeginInvoke(() => { if (connectError == null) { ClientLogger.Debug("MacTestViewModel connected to MediaController"); _captureSource.Start(); } _mediaController.RegisterRemoteSession((ushort)(config.LocalSsrcId + 1)); callback(connectError); })); } }); }