コード例 #1
0
        public void AnalyzeSingleFrame(List <byte[]> testFrames)
        {
            // Create the audio context.
            var config = MediaConfig.Default;

            config.LocalSsrcId = 1000;
            var rawAudioFormat      = new AudioFormat();        // This will be overwritten later
            var playedAudioFormat   = new AudioFormat();
            var mediaEnvironment    = new TestMediaEnvironment();
            var audioContextFactory = new AudioContextFactory(rawAudioFormat, playedAudioFormat, config, mediaEnvironment);
            var audioContext        = audioContextFactory.HighQualityDirectCtx;

            // Create the media controller
            var mediaStatistics = new TimingMediaStatistics();
            var mediaConnection = new SingleFrameMediaConnection(MediaConfig.Default.LocalSsrcId);

            mediaConnection.FirstPacketReceived += mediaConnection_FirstPacketReceived;
            var vqc             = new VideoQualityController(MediaConfig.Default.LocalSsrcId);
            var mediaController = new MediaController(MediaConfig.Default, playedAudioFormat, mediaStatistics, mediaEnvironment, mediaConnection, vqc);

            // Connect.
            mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() =>
            {
                if (ex != null)
                {
                    MessageBox.Show(ex.ToString());
                }
                else
                {
                    ClientLogger.Debug("TimingViewModel connected to MediaController");
                }
            }));
            mediaController.RegisterRemoteSession(1001);

            foreach (var frame in testFrames)
            {
                mediaController.SubmitRecordedFrame(audioContext, frame);
            }
        }
コード例 #2
0
        public void StartTimingTest()
        {
            // MessageBox.Show("Currently selected context = " + CurrentAudioContext.Description);
            Status             = "Executing timing test for context '" + CurrentAudioContext.Description + "'";
            _mediaElement      = new MediaElement();
            _audioStreamSource = new AudioMediaStreamSource(null, AudioFormat.Default);
            _captureSource.VideoCaptureDevice = null;

            // Make sure we can get at the devices.
            if (_captureSource.AudioCaptureDevice == null)
            {
                throw new InvalidOperationException("No audio capture device was found");
            }
            if (_captureSource.AudioCaptureDevice.DesiredFormat == null)
            {
                throw new InvalidOperationException("No suitable audio format was found");
            }
            if (!CaptureDeviceConfiguration.AllowedDeviceAccess && !CaptureDeviceConfiguration.RequestDeviceAccess())
            {
                throw new InvalidOperationException("Device access not granted.");
            }

            // Create the audio sink.
            MediaConfig.Default.LocalSsrcId = 1000;
            MediaStatistics = new TimingMediaStatistics();
            var mediaEnvironment = new TestMediaEnvironment();

            // Create the media controller
            _mediaConnection = new LoopbackMediaConnection(MediaConfig.Default.LocalSsrcId);
            var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId);

            _mediaController = new MediaController(MediaConfig.Default, AudioFormat.Default, MediaStatistics, mediaEnvironment, _mediaConnection, vqc);

            // Create the audio sink to grab data from the microphone and send it to the media controller.
            _audioSink = new TimingAudioSinkAdapter(CurrentAudioContext, _captureSource, _mediaController, MediaConfig.Default, new TestMediaEnvironment(), CurrentAudioContext.AudioFormat);
            _audioSink.CaptureSuccessful += _audioSink_CaptureSuccessful;

            // Create the media stream source to play data from the media controller
            _audioStreamSource.AudioController = _mediaController;
            _mediaElement.SetSource(_audioStreamSource);

            // Connect.
            _mediaController.Connect("test", ex => Deployment.Current.Dispatcher.BeginInvoke(() =>
            {
                if (ex != null)
                {
                    StopTimingTest();
                    MessageBox.Show(ex.ToString());
                }
                else
                {
                    ClientLogger.Debug("TimingViewModel connected to MediaController");
                }
            }));
            _mediaController.RegisterRemoteSession(1001);

            // Start capturing (which should trigger the audio sink).
            _captureSource.Start();
            if (_captureSource.State != CaptureState.Started)
            {
                throw new InvalidOperationException("Unable to capture microphone");
            }

            // Start playing.
            _mediaElement.Play();
            ClientLogger.Debug("CaptureSource initialized; captureSource.State={0}; captureSource.AudioCaptureDevice={1}; mediaElement.CurrentState={2}; ",
                               _captureSource.State, _captureSource.AudioCaptureDevice.FriendlyName, _mediaElement.CurrentState);
        }
コード例 #3
0
        public void StartSendingAudioToRoom(string ownerUserTag, string host, List <byte[]> testFrames, bool sendLive, OperationCallback callback)
        {
            // What we should use when there's only one other person, and CPU is OK:
            // 16Khz, Speex, WebRtc at full strength
            var config = MediaConfig.Default;

            config.LocalSsrcId           = (ushort)rnd.Next(ushort.MinValue, ushort.MaxValue);
            config.AudioContextSelection = AudioContextSelection.HighQualityDirect;
            config.MediaServerHost       = host;

            // Create the media controller
            var playedAudioFormat = new AudioFormat();
            var mediaStatistics   = new TimingMediaStatistics();
            var mediaEnvironment  = new TestMediaEnvironment();
            var mediaConnection   = new RtpMediaConnection(config, mediaStatistics);
            var vqc = new VideoQualityController(MediaConfig.Default.LocalSsrcId);

            _mediaController = new MediaController(MediaConfig.Default, playedAudioFormat, mediaStatistics, mediaEnvironment, mediaConnection, vqc);

            // Create the audio sink adapter.
            _captureSource = new CaptureSource();
            _captureSource.VideoCaptureDevice = null;
            if (_captureSource.AudioCaptureDevice == null)
            {
                _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
                if (_captureSource.AudioCaptureDevice == null)
                {
                    throw new InvalidOperationException("No suitable audio capture device was found");
                }
            }
            MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice);
            _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame;             // 20 milliseconds
            _audioSinkAdapter = sendLive
                                ? new AudioSinkAdapter(_captureSource, _mediaController, config, mediaEnvironment, playedAudioFormat)
                                : new FromFileAudioSinkAdapter(_captureSource, _mediaController, config, mediaEnvironment, playedAudioFormat, testFrames);

            var roomService = new RoomServiceAdapter();

            roomService.CreateClient();
            roomService.GetRoomId(Constants.DefaultCompanyTag, Constants.DefaultAuthenticationGroupTag, ownerUserTag, Constants.DefaultRoomName, (getRoomError, result) =>
            {
                if (getRoomError != null)
                {
                    callback(getRoomError);
                }
                else
                {
                    // Connect.
                    _mediaController.Connect(result.RoomId.ToString(), connectError => Deployment.Current.Dispatcher.BeginInvoke(() =>
                    {
                        if (connectError == null)
                        {
                            ClientLogger.Debug("MacTestViewModel connected to MediaController");
                            _captureSource.Start();
                        }
                        _mediaController.RegisterRemoteSession((ushort)(config.LocalSsrcId + 1));
                        callback(connectError);
                    }));
                }
            });
        }