コード例 #1
0
        public IVideoCodec GetVideoDecoder(IVideoQualityController videoQualityController, MediaStatistics mediaStatistics = null)
        {
            var videoCodec = new JpegDiffVideoCodec(videoQualityController, mediaStatistics);

            videoCodec.Initialize(VideoConstants.Height, VideoConstants.Width, VideoConstants.MaxPayloadSize);
            return(videoCodec);
        }
コード例 #2
0
        public JpegDiffVideoCodec(IVideoQualityController videoQualityController, MediaStatistics mediaStatistics = null)
        {
            _videoQualityController = videoQualityController;
            EncodedBlocks           = new Queue <FrameBlock>(VideoConstants.MaxQueuedBlocksPerStream);
            if (mediaStatistics != null)
            {
                _blocksNotTransmittedCounter             = mediaStatistics.RegisterCounter("Video: Blocks Not Transmitted %");
                _blocksNotTransmittedCounter.AxisMinimum = 0;
                _blocksNotTransmittedCounter.AxisMaximum = 100;
                _blocksJpegCounter              = mediaStatistics.RegisterCounter("Video:Blocks Jpeg Transmitted %");
                _blocksJpegCounter.AxisMinimum  = 0;
                _blocksJpegCounter.AxisMaximum  = 100;
                _blocksDeltaCounter             = mediaStatistics.RegisterCounter("Video:Blocks Delta Transmitted %");
                _blocksDeltaCounter.AxisMinimum = 0;
                _blocksDeltaCounter.AxisMaximum = 100;
            }

            var contextFactory = new JpegFrameContextFactory(VideoConstants.VideoBlockSize, VideoConstants.VideoBlockSize);

            _frameBlockPool = new ReferenceCountedObjectPool <FrameBlock>(
                () => new FrameBlock(contextFactory),
                fb =>
            {
                fb.CumulativeDifferences = 0;
            });
        }
コード例 #3
0
        private static IVideoCodec GetJdifVideoCodec(IVideoQualityController videoQualityController)
        {
            var codec = new JpegDiffVideoCodec(videoQualityController);

            codec.Initialize(VideoConstants.Height, VideoConstants.Width, VideoConstants.MaxPayloadSize);
            return(codec);
        }
コード例 #4
0
        public MediaController(MediaConfig config, AudioFormat playedAudioFormat, MediaStatistics mediaStats, IMediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController)
        {
            // Initialize the class variables.
            _mediaEnvironment                  = mediaEnvironment;
            MediaConfig                        = config;
            MediaStats                         = mediaStats;
            MediaConnection                    = mediaConnection;
            VideoQualityController             = videoQualityController;
            MediaConnection.AudioPacketHandler = HandleAudioPacket;
            MediaConnection.VideoPacketHandler = HandleVideoPacket;

            Logger         = new MediaControllerLogger(VideoQualityController, MediaStats);
            _localSsrcId   = config.LocalSsrcId;
            RemoteSessions = new Dictionary <ushort, VideoThreadData>();
            VideoQualityController.RemoteSessions = RemoteSessions;
            PlayedAudioFormat = playedAudioFormat;

            _silentBytes        = new byte[PlayedAudioFormat.BytesPerFrame];
            _decodedFrameBuffer = new short[PlayedAudioFormat.SamplesPerFrame * 10];             // Make room for 10 frames.

            _codecFactory = config.CodecFactory;
            _videoEncoder = _codecFactory.GetVideoEncoder(VideoQualityController, MediaStats);

            // Instantiate the audio jitter class
            _audioJitter = new AudioJitterQueue(_codecFactory, VideoQualityController, MediaStats);
            _audioJitter.CodecTypeChanged += audioJitter_CodecTypeChanged;

            _audioDecodeBuffer = new byte[VideoConstants.MaxPayloadSize];
            _audioSendBuffer   = new ByteStream(RtpPacketData.DataPacketMaxLength);

            // Spin up the various audio and video encoding threads.
            // On multiprocessor machines, these can spread the load, but even on single-processor machines it helps a great deal
            // if the various audio and video sinks can return immediately.
            _audioEncodeResetEvent  = new ManualResetEvent(false);
            _audioEncodeThread      = new Thread(TransmitAudio);
            _audioEncodeThread.Name = "MediaController.TransmitAudio";
            _audioEncodeThread.Start();
            _videoEncodeResetEvent    = new ManualResetEvent(false);
            _videoTransmitThread      = new Thread(TransmitVideo);
            _videoTransmitThread.Name = "MediaController.TransmitVideo";
            _videoTransmitThread.Start();

            // Create the object pools that will help us reduce time spent in garbage collection.
            _videoBufferPool  = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize * 2), bs => bs.Reset());
            _packetBufferPool = new ObjectPool <ByteStream>(() => new ByteStream(RtpPacketData.DataPacketMaxLength), bs => bs.Reset());
            _videoChunkPool   = new ObjectPool <Chunk>(() => new Chunk {
                Payload = new ByteStream(VideoConstants.MaxPayloadSize * 2)
            }, chunk => { chunk.SsrcId = 0; chunk.Payload.Reset(); });

            AudioStats = new ObservableCollection <AudioStatistics>();

            _speakerStatistics    = new AudioStatistics("Volume:Sent to Speaker", MediaStats);
            _microphoneStatistics = new AudioStatistics("Volume:Received from Microphone", MediaStats);
            _cancelledStatistics  = new AudioStatistics("Volume:Echo Cancelled", MediaStats);

            AudioStats.Add(_speakerStatistics);
            AudioStats.Add(_microphoneStatistics);
            AudioStats.Add(_cancelledStatistics);
        }
コード例 #5
0
 public VideoMediaStreamSource(IVideoController videoController, IVideoQualityController videoQualityController, ushort ssrcId, int frameWidth, int frameHeight)
 {
     _frameWidth             = frameWidth;
     _frameHeight            = frameHeight;
     _videoController        = videoController;
     _videoQualityController = videoQualityController;
     _ssrcId = ssrcId;
 }
コード例 #6
0
 public TestInstance(IVideoQualityController videoQualityController, IVideoCodec videoCodec, List <byte[]> rawFrames, List <byte[]> processedFrames, ObjectPool <ByteStream> videoChunkPool)
 {
     VideoQualityController = videoQualityController;
     VideoCodec             = videoCodec;
     RawFrames       = rawFrames;
     ProcessedFrames = processedFrames;
     RawSize         = RawFrames.Count * RawFrames[0].Length;
     mVideoChunkPool = videoChunkPool;
 }
コード例 #7
0
ファイル: ParsedFrame.cs プロジェクト: forestrf/AlantaMedia
 public ParsedFrame(IVideoQualityController videoQualityController, byte[] image, int height, int width, int frameNumber, int stride, IObjectPool <FrameBlock> frameBlockPool)
     : base(height, width, frameBlockPool)
 {
     this.videoQualityController = videoQualityController;
     this.frameBlockPool         = frameBlockPool;
     this.stride = stride;
     FrameNumber = frameNumber;
     IsIFrame    = FrameNumber % videoQualityController.FullFrameInterval == 0;
     ParseImage(image);
 }
コード例 #8
0
 public AudioJitterQueue(ICodecFactory codecFactory, IVideoQualityController videoQualityController, MediaStatistics mediaStatistics = null)
 {
     _codecFactory           = codecFactory;
     AudioDecoder            = codecFactory.GetAudioDecoder(AudioCodecType.Speex);  // Default to Speex.
     _videoQualityController = videoQualityController;
     _queue     = new PriorityQueue <AudioJitterQueueEntry>();
     _logger    = new AudioJitterQueueLogger(mediaStatistics);
     _entryPool = new ObjectPool <AudioJitterQueueEntry>(() => new AudioJitterQueueEntry());
     SetDefaults();
 }
コード例 #9
0
 public MediaSinkFactory(IAudioController audioController,
                         IVideoController videoController,
                         MediaConfig mediaConfig,
                         IMediaEnvironment mediaEnvironment,
                         IVideoQualityController videoQualityController)
 {
     _audioController        = audioController;
     _videoController        = videoController;
     _mediaConfig            = mediaConfig;
     _mediaEnvironment       = mediaEnvironment;
     _videoQualityController = videoQualityController;
 }
コード例 #10
0
 public MediaControllerLogger(IVideoQualityController videoQualityController,
                              MediaStatistics mediaStatistics = null)
 {
     _videoQualityController = videoQualityController;
     if (mediaStatistics != null)
     {
         _videoKbpsCounter                  = mediaStatistics.RegisterCounter("Video:KbpsEncoded");
         _videoKbpsCounter.AxisMinimum      = 0;
         _duplicateSequenceNumbers          = mediaStatistics.RegisterCounter("Audio:Duplicate SequenceNumbers");
         _duplicateSequenceNumbers.IsActive = false;
         _recordingRateCounter              = mediaStatistics.RegisterCounter("Audio:RecordingRate");
         _recordingRateCounter.IsActive     = true;
         _recordingRateCounter.MinimumDelta = 2;
         _playingRateCounter                = mediaStatistics.RegisterCounter("Audio:PlayingRate");
         _playingRateCounter.MinimumDelta   = 2;
         _playingRateCounter.IsActive       = true;
     }
 }
コード例 #11
0
 public MediaServerViewModel(MediaConfig mediaConfig, AudioFormat audioFormat, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController, string roomId)
 {
     MediaController = new MediaController(mediaConfig, audioFormat, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController);
     RoomId          = roomId;
     MediaServerKpis = new ObservableCollection <MediaServerKpi>();
     MediaController.MediaStats.Counters.CollectionChanged += Counters_CollectionChanged;
 }
コード例 #12
0
 public SourceMediaController(MediaConfig mediaConfig, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController) :
     base(mediaConfig, AudioFormat.Default, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController)
 {
 }
コード例 #13
0
 public VideoSinkAdapter(CaptureSource captureSource, IVideoController mediaController, IVideoQualityController videoQualityController)
 {
     CaptureSource           = captureSource;
     _mediaController        = mediaController;
     _videoQualityController = videoQualityController;
 }
コード例 #14
0
        // ReSharper restore NotAccessedField.Local

        public DestinationMediaController(MediaConfig mediaConfig, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController) :
            base(mediaConfig, AudioFormat.Default, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController)
        {
            _audioTimer = new Timer(audioTimer_Tick, null, 20, 20);
        }
コード例 #15
0
 public Player(MediaElement mediaElement, IVideoQualityController videoQualityController)
 {
     mMediaElement     = mediaElement;
     timeBetweenFrames = TimeSpan.FromMilliseconds(1000.0 / videoQualityController.AcceptFramesPerSecond);
 }
コード例 #16
0
 public ChunkHelper(short maxChunkSize, IObjectPool <FrameBlock> frameBlockPool, IVideoQualityController videoQualityController)
 {
     _maxChunkSize           = maxChunkSize;
     _frameBlockPool         = frameBlockPool;
     _videoQualityController = videoQualityController;
 }