private void AddOutputStreams(Uri source, MediaMuxer muxer, out AVStream *vidOutStream, out AVStream *audioOutStream, out IDictionary <MediaStreamType, int> streamMap) { vidOutStream = audioOutStream = null; using var audioDecoder = new MediaDecoder(source, HardwareDevice); using var videoDecoder = new MediaDecoder(source, HardwareDevice); var hasAudio = audioDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO); var hasVideo = videoDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_VIDEO); var streamIndex = 0; streamMap = new Dictionary <MediaStreamType, int>(); var decoders = new List <MediaDecoder>(); if (hasVideo) { decoders.Add(videoDecoder); } if (hasAudio) { decoders.Add(audioDecoder); } foreach (var decoder in decoders) { var targetCodec = decoder.CodecId; var decoderCodec = decoder.CodecContext; AVStream *stream; var encoder = ffmpeg.avcodec_find_encoder(targetCodec); stream = muxer.AddStream(encoder); if (decoder == videoDecoder) { vidOutStream = stream; streamMap.Add(MediaStreamType.Video, streamIndex); } else if (decoder == audioDecoder) { audioOutStream = stream; streamMap.Add(MediaStreamType.Audio, streamIndex); } else { throw new NotImplementedException("Unknown decoder type."); } streamIndex++; //streams[streamIndex] = stream; var param = stream->codecpar; ffmpeg.avcodec_parameters_from_context(param, decoder.GetStream()->codec).ThrowOnError(); stream->codecpar->codec_tag = 0; } }
private Dictionary <MediaStreamType, MediaDecoder> MakeDecoders() { var decoders = new Dictionary <MediaStreamType, MediaDecoder>(); foreach (var source in _sources.Values) { var decoder = new MediaDecoder(source.Path, HardwareDevice); switch (source.StreamType) { case MediaStreamType.Audio: decoder.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO); break; case MediaStreamType.Video: decoder.SelectStream(AVMediaType.AVMEDIA_TYPE_VIDEO); break; default: throw new NotSupportedException($"Media stream type of {source.StreamType} is not supported."); } decoders.Add(source.StreamType, decoder); } return(decoders); }
private void ConcatFile(Uri source, ConcatContext context) { using var videoDecoder = new MediaDecoder(source, HardwareDevice); using var audioDecoder = new MediaDecoder(source, HardwareDevice); var hasVideo = new Reference <bool>(videoDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_VIDEO)); var hasAudio = new Reference <bool>(audioDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO)); var audioDts = new Reference <long>(); var videoDts = new Reference <long>(); while (hasVideo || hasAudio) { MediaStreamType type; if (!hasAudio) { type = MediaStreamType.Video; } else if (!hasVideo) { type = MediaStreamType.Audio; } else { type = audioDts.Value < videoDts.Value ? MediaStreamType.Audio : MediaStreamType.Video; } AVStream * outStream; MediaDecoder decoder; Reference <long> dts; Reference <bool> has; switch (type) { case MediaStreamType.Audio: outStream = context.AudioOutStream; decoder = audioDecoder; dts = audioDts; has = hasAudio; break; case MediaStreamType.Video: outStream = context.VideoOutStream; decoder = videoDecoder; dts = videoDts; has = hasVideo; break; default: throw new NotSupportedException(); } using var packet = decoder.ReadPacket(); if (packet == null) { has.Value = false; continue; } var pck = packet.Pointer; ffmpeg.av_packet_rescale_ts(pck, decoder.TimeBase, outStream->time_base); pck->stream_index = context.StreamMap[type]; dts.Value = pck->dts; context.Muxer.WritePacket(pck); } }