Ejemplo n.º 1
0
        public void Build()
        {
            var streams    = new AVStream *[2];
            var stream_dic = new Dictionary <MediaStreamType, int>();

            ValidateArguments();

            // open output iocontext
            using var output = new IOContext(OutputPath, ffmpeg.AVIO_FLAG_WRITE);
            // open muxer
            using var muxer = new MediaMuxer(output, OutputExtension, OutputMimeType);

            // create output streams
            var firstSource = _sources.First();

            AddOutputStreams(firstSource, muxer, out var vidOutStream, out var audioOutStream, out var streamMap);

            // write headers
            muxer.WriteHeader();

            // concat media files
            var context = new ConcatContext
            {
                Output         = output,
                Muxer          = muxer,
                VideoOutStream = vidOutStream,
                AudioOutStream = audioOutStream,
                StreamMap      = streamMap,
            };

            foreach (var source in _sources)
            {
                ConcatFile(source, context);
            }

            // write trailer
            muxer.WriteTrailer();
        }
Ejemplo n.º 2
0
        private void ConcatFile(Uri source, ConcatContext context)
        {
            using var videoDecoder = new MediaDecoder(source, HardwareDevice);
            using var audioDecoder = new MediaDecoder(source, HardwareDevice);
            var hasVideo = new Reference <bool>(videoDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_VIDEO));
            var hasAudio = new Reference <bool>(audioDecoder.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO));
            var audioDts = new Reference <long>();
            var videoDts = new Reference <long>();

            while (hasVideo || hasAudio)
            {
                MediaStreamType type;
                if (!hasAudio)
                {
                    type = MediaStreamType.Video;
                }
                else if (!hasVideo)
                {
                    type = MediaStreamType.Audio;
                }
                else
                {
                    type = audioDts.Value < videoDts.Value ? MediaStreamType.Audio : MediaStreamType.Video;
                }

                AVStream *       outStream;
                MediaDecoder     decoder;
                Reference <long> dts;
                Reference <bool> has;
                switch (type)
                {
                case MediaStreamType.Audio:
                    outStream = context.AudioOutStream;
                    decoder   = audioDecoder;
                    dts       = audioDts;
                    has       = hasAudio;
                    break;

                case MediaStreamType.Video:
                    outStream = context.VideoOutStream;
                    decoder   = videoDecoder;
                    dts       = videoDts;
                    has       = hasVideo;
                    break;

                default:
                    throw new NotSupportedException();
                }

                using var packet = decoder.ReadPacket();
                if (packet == null)
                {
                    has.Value = false;
                    continue;
                }
                var pck = packet.Pointer;
                ffmpeg.av_packet_rescale_ts(pck, decoder.TimeBase, outStream->time_base);
                pck->stream_index = context.StreamMap[type];
                dts.Value         = pck->dts;

                context.Muxer.WritePacket(pck);
            }
        }