Beispiel #1
0
        public FFmpegDecoder(NanoClient nano, AudioFormat audioFormat, VideoFormat videoFormat)
        {
            _nano        = nano;
            _audioFormat = audioFormat;
            _videoFormat = videoFormat;

            _audioAssembler = new AudioAssembler();
            _videoAssembler = new VideoAssembler();

            _audioRefTimestamp = _nano.Audio.ReferenceTimestamp;
            _videoRefTimestamp = _nano.Video.ReferenceTimestamp;

            _audioFrameId = _nano.Audio.FrameId;
            _videoFrameId = _nano.Video.FrameId;

            _audioHandler = new FFmpegAudio();
            _videoHandler = new FFmpegVideo();

            _audioHandler.Initialize(_audioFormat);
            _videoHandler.Initialize(_videoFormat);
            _audioHandler.CreateDecoderContext();
            _videoHandler.CreateDecoderContext();

            DecodedAudioQueue = new Queue <PCMSample>();
            DecodedVideoQueue = new Queue <YUVFrame>();

            // Register queues for decoded video frames / audio samples
            _audioHandler.SampleDecoded += DecodedAudioQueue.Enqueue;
            _videoHandler.FrameDecoded  += DecodedVideoQueue.Enqueue;
        }
Beispiel #2
0
        public MediaCoreConsumer()
        {
            VideoFormat videoFormat = SmartGlassConnection.Instance.VideoFormat;

            _video = new VideoDecoder(MediaFormat.MimetypeVideoAvc,
                                      (int)videoFormat.Width, (int)videoFormat.Height);
            _videoAssembler = new VideoAssembler();

            _audioFormat = SmartGlassConnection.Instance.AudioFormat;
            _audio       = new AudioDecoder(MediaFormat.MimetypeAudioAac,
                                            (int)_audioFormat.SampleRate, (int)_audioFormat.Channels);
            _audioAssembler = new AudioAssembler();
        }
Beispiel #3
0
        public void ConsumeAudioData(Packets.AudioData data)
        {
            AACFrame frame = AudioAssembler.AssembleAudioFrame(
                data,
                AACProfile.LC,
                (int)_audioFormat.SampleRate,
                (byte)_audioFormat.Channels);

            if (_audioCodec != null)
            {
                _audioFrameQueue.Enqueue(frame);
            }
        }
        /* Called by NanoClient on freshly received data */
        public void ConsumeAudioData(object sender, AudioDataEventArgs args)
        {
            // TODO: Sorting
            AACFrame frame = AudioAssembler.AssembleAudioFrame(
                data: args.AudioData,
                profile: AACProfile.LC,
                samplingFreq: (int)_audioFormat.SampleRate,
                channels: (byte)_audioFormat.Channels);

            if (!_audioContextInitialized)
            {
                _audioHandler.UpdateCodecParameters(frame.GetCodecSpecificData());
                _audioContextInitialized = true;
            }

            if (frame == null)
            {
                return;
            }

            // Enqueue encoded audio data in decoder
            _audioHandler.PushData(frame);
        }