Esempio n. 1
0
        public MediaController(MediaConfig config, AudioFormat playedAudioFormat, MediaStatistics mediaStats, IMediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController)
        {
            // Initialize the class variables.
            _mediaEnvironment                  = mediaEnvironment;
            MediaConfig                        = config;
            MediaStats                         = mediaStats;
            MediaConnection                    = mediaConnection;
            VideoQualityController             = videoQualityController;
            MediaConnection.AudioPacketHandler = HandleAudioPacket;
            MediaConnection.VideoPacketHandler = HandleVideoPacket;

            Logger         = new MediaControllerLogger(VideoQualityController, MediaStats);
            _localSsrcId   = config.LocalSsrcId;
            RemoteSessions = new Dictionary <ushort, VideoThreadData>();
            VideoQualityController.RemoteSessions = RemoteSessions;
            PlayedAudioFormat = playedAudioFormat;

            _silentBytes        = new byte[PlayedAudioFormat.BytesPerFrame];
            _decodedFrameBuffer = new short[PlayedAudioFormat.SamplesPerFrame * 10];             // Make room for 10 frames.

            _codecFactory = config.CodecFactory;
            _videoEncoder = _codecFactory.GetVideoEncoder(VideoQualityController, MediaStats);

            // Instantiate the audio jitter class
            _audioJitter = new AudioJitterQueue(_codecFactory, VideoQualityController, MediaStats);
            _audioJitter.CodecTypeChanged += audioJitter_CodecTypeChanged;

            _audioDecodeBuffer = new byte[VideoConstants.MaxPayloadSize];
            _audioSendBuffer   = new ByteStream(RtpPacketData.DataPacketMaxLength);

            // Spin up the various audio and video encoding threads.
            // On multiprocessor machines, these can spread the load, but even on single-processor machines it helps a great deal
            // if the various audio and video sinks can return immediately.
            _audioEncodeResetEvent  = new ManualResetEvent(false);
            _audioEncodeThread      = new Thread(TransmitAudio);
            _audioEncodeThread.Name = "MediaController.TransmitAudio";
            _audioEncodeThread.Start();
            _videoEncodeResetEvent    = new ManualResetEvent(false);
            _videoTransmitThread      = new Thread(TransmitVideo);
            _videoTransmitThread.Name = "MediaController.TransmitVideo";
            _videoTransmitThread.Start();

            // Create the object pools that will help us reduce time spent in garbage collection.
            _videoBufferPool  = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize * 2), bs => bs.Reset());
            _packetBufferPool = new ObjectPool <ByteStream>(() => new ByteStream(RtpPacketData.DataPacketMaxLength), bs => bs.Reset());
            _videoChunkPool   = new ObjectPool <Chunk>(() => new Chunk {
                Payload = new ByteStream(VideoConstants.MaxPayloadSize * 2)
            }, chunk => { chunk.SsrcId = 0; chunk.Payload.Reset(); });

            AudioStats = new ObservableCollection <AudioStatistics>();

            _speakerStatistics    = new AudioStatistics("Volume:Sent to Speaker", MediaStats);
            _microphoneStatistics = new AudioStatistics("Volume:Received from Microphone", MediaStats);
            _cancelledStatistics  = new AudioStatistics("Volume:Echo Cancelled", MediaStats);

            AudioStats.Add(_speakerStatistics);
            AudioStats.Add(_microphoneStatistics);
            AudioStats.Add(_cancelledStatistics);
        }
Esempio n. 2
0
 public MediaServerViewModel(MediaConfig mediaConfig, AudioFormat audioFormat, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController, string roomId)
 {
     MediaController = new MediaController(mediaConfig, audioFormat, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController);
     RoomId          = roomId;
     MediaServerKpis = new ObservableCollection <MediaServerKpi>();
     MediaController.MediaStats.Counters.CollectionChanged += Counters_CollectionChanged;
 }
 public SourceMediaController(MediaConfig mediaConfig, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController) :
     base(mediaConfig, AudioFormat.Default, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController)
 {
 }
Esempio n. 4
0
        // ReSharper restore NotAccessedField.Local

        public DestinationMediaController(MediaConfig mediaConfig, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController) :
            base(mediaConfig, AudioFormat.Default, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController)
        {
            _audioTimer = new Timer(audioTimer_Tick, null, 20, 20);
        }
Esempio n. 5
0
        public void StartTimingTest()
        {
            // MessageBox.Show("Currently selected context = " + CurrentAudioContext.Description);
            Status             = "Executing timing test for context '" + CurrentAudioContext.Description + "'";
            _mediaElement      = new MediaElement();
            _audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default);
            _captureSource.VideoCaptureDevice = null;

            // Make sure we can get at the devices.
            if (_captureSource.AudioCaptureDevice == null)
            {
                throw new InvalidOperationException("No audio capture device was found");
            }
            if (_captureSource.AudioCaptureDevice.DesiredFormat == null)
            {
                throw new InvalidOperationException("No suitable audio format was found");
            }
            if (!CaptureDeviceConfiguration.AllowedDeviceAccess && !CaptureDeviceConfiguration.RequestDeviceAccess())
            {
                throw new InvalidOperationException("Device access not granted.");
            }

            // Create the audio sink.
            MediaConfig.Default.LocalSsrcId = 1000;
            MediaStatistics = new TimingMediaStatistics();
            var mediaEnvironment = new TestMediaEnvironment();

            // Create the media controller
            _mediaConnection = new LoopbackMediaConnection(MediaConfig.Default.LocalSsrcId);
            var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId);

            _mediaController = new MediaController(MediaConfig.Default, AudioFormat.Default, MediaStatistics, mediaEnvironment, _mediaConnection, vqc);

            // Create the audio sink to grab data from the microphone and send it to the media controller.
            _audioSink = new TimingAudioSinkAdapter(CurrentAudioContext, _captureSource, _mediaController, MediaConfig.Default, new TestMediaEnvironment(), CurrentAudioContext.AudioFormat);
            _audioSink.CaptureSuccessful += _audioSink_CaptureSuccessful;

            // Create the media stream source to play data from the media controller
            _audioStreamSource.AudioController = _mediaController;
            _mediaElement.SetSource(_audioStreamSource);

            // Connect.
            _mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() =>
            {
                if (ex != null)
                {
                    StopTimingTest();
                    MessageBox.Show(ex.ToString());
                }
                else
                {
                    ClientLogger.Debug("TimingViewModel connected to MediaController");
                }
            }));
            _mediaController.RegisterRemoteSession(1001);

            // Start capturing (which should trigger the audio sink).
            _captureSource.Start();
            if (_captureSource.State != CaptureState.Started)
            {
                throw new InvalidOperationException("Unable to capture microphone");
            }

            // Start playing.
            _mediaElement.Play();
            ClientLogger.Debug("CaptureSource initialized; captureSource.State={0}; captureSource.AudioCaptureDevice={1}; mediaElement.CurrentState={2}; ",
                               _captureSource.State, _captureSource.AudioCaptureDevice.FriendlyName, _mediaElement.CurrentState);
        }