public FFmpegDecoder(NanoClient nano, AudioFormat audioFormat, VideoFormat videoFormat) { _nano = nano; _audioFormat = audioFormat; _videoFormat = videoFormat; _audioAssembler = new AudioAssembler(); _videoAssembler = new VideoAssembler(); _audioRefTimestamp = _nano.Audio.ReferenceTimestamp; _videoRefTimestamp = _nano.Video.ReferenceTimestamp; _audioFrameId = _nano.Audio.FrameId; _videoFrameId = _nano.Video.FrameId; _audioHandler = new FFmpegAudio(); _videoHandler = new FFmpegVideo(); _audioHandler.Initialize(_audioFormat); _videoHandler.Initialize(_videoFormat); _audioHandler.CreateDecoderContext(); _videoHandler.CreateDecoderContext(); DecodedAudioQueue = new Queue <PCMSample>(); DecodedVideoQueue = new Queue <YUVFrame>(); // Register queues for decoded video frames / audio samples _audioHandler.SampleDecoded += DecodedAudioQueue.Enqueue; _videoHandler.FrameDecoded += DecodedVideoQueue.Enqueue; }
public MediaCoreConsumer() { VideoFormat videoFormat = SmartGlassConnection.Instance.VideoFormat; _video = new VideoDecoder(MediaFormat.MimetypeVideoAvc, (int)videoFormat.Width, (int)videoFormat.Height); _videoAssembler = new VideoAssembler(); _audioFormat = SmartGlassConnection.Instance.AudioFormat; _audio = new AudioDecoder(MediaFormat.MimetypeAudioAac, (int)_audioFormat.SampleRate, (int)_audioFormat.Channels); _audioAssembler = new AudioAssembler(); }