Beispiel #1
0
        internal override void UpdateFromNative()
        {
            int sampleCount = frame->NbSamples;

            Resize(sampleCount);
            FF.av_samples_copy(datas, (IntPtr *)frame->ExtendedData, 0, 0, sampleCount, format.Channels, format.SampleFormat);
        }
Beispiel #2
0
 protected override void Setup()
 {
     frame->NbSamples    = sampleCount;
     frame->Format       = (int)format.SampleFormat;
     frame->ExtendedData = (byte **)&frame->Data;
     FF.av_samples_fill_arrays(frame->ExtendedData, frame->Linesize, (byte *)cache, format.Channels, sampleCount, format.SampleFormat, 1);
 }
Beispiel #3
0
        public static void Merge(AudioFrame outFrame, params AudioFrame[] inFrames)
        {
            if (inFrames.Length == 0)
            {
                return;
            }
            var format = inFrames[0].format;

            for (int i = 1; i < inFrames.Length; i++)
            {
                if (format != inFrames[i].format)
                {
                    throw new ArgumentException($"{nameof(inFrames)}的所有元素的{nameof(Format)}必须一致", nameof(inFrames));
                }
            }

            int outSampleCount = inFrames.Sum(frame => frame.sampleCount);

            outFrame.format = format;
            outFrame.Resize(outSampleCount);

            int offset = 0;

            for (int i = 0; i < inFrames.Length; i++)
            {
                FF.av_samples_copy(outFrame.datas, inFrames[i].datas, offset, 0, inFrames[i].sampleCount, format.Channels, format.SampleFormat);
                offset += inFrames[i].sampleCount;
            }
        }
Beispiel #4
0
        public MediaStream(Stream baseStream, bool write = false, AVOutputFormat *outputFormat = null)
        {
            if (write && !baseStream.CanWrite)
            {
                throw new ArgumentException($"流不能被写入,请确保Stream.CanWrite为true");
            }

            if (baseStream.CanRead)
            {
                procRead = Read;
            }
            if (write && baseStream.CanWrite)
            {
                procWrite = Write;
            }
            if (baseStream.CanSeek)
            {
                procSeek = Seek;
            }
            this.baseStream = baseStream;

            try {
                formatContext = FF.avformat_alloc_context();
                var buffer = (byte *)FF.av_malloc((IntPtr)bufferLength);
                ioContext = FF.avio_alloc_context(buffer, bufferLength, write, null, procRead, procWrite, procSeek);
                if (write)
                {
                    formatContext->Oformat = outputFormat;
                }
                formatContext->Pb = ioContext;
            } catch {
                Dispose();
                throw;
            }
        }
Beispiel #5
0
        unsafe public VideoFormat(int width, int height, AVPixelFormat pixelFormat, int align = 8)
        {
            if (!align.IsPowOf2())
            {
                throw new ArgumentException($"对齐数必须是2的整数幂", nameof(align));
            }

            Width       = width;
            Height      = height;
            PixelFormat = pixelFormat;
            Align       = align;
            Bytes       = FF.av_image_get_buffer_size(pixelFormat, width, height, align).CheckFFmpegCode();
            PlaneCount  = FF.av_pix_fmt_count_planes(pixelFormat).CheckFFmpegCode();

            //--- 参考 imgutils.c 的 av_image_fill_arrays
            var stridesTmp = stackalloc int[4];

            FF.av_image_fill_linesizes(stridesTmp, pixelFormat, width).CheckFFmpegCode();

            strides = new int[PlaneCount];
            for (int i = 0; i < PlaneCount; i++)
            {
                strides[i] = (stridesTmp[i] + (align - 1)) & ~(align - 1);
            }
            //---
        }
Beispiel #6
0
        unsafe public static string GetErrorString(int errorCode)
        {
            byte *buffer = stackalloc byte[Internal.Constant.AV_ERROR_MAX_STRING_SIZE];

            FF.av_strerror(errorCode, buffer, (IntPtr)Internal.Constant.AV_ERROR_MAX_STRING_SIZE);
            return(Marshal.PtrToStringAnsi((IntPtr)buffer));
        }
Beispiel #7
0
        ///// <summary>
        ///// 创建一个封装模式的媒体写入器
        ///// </summary>
        ///// <param name="file"></param>
        ///// <param name="mediaReader"></param>
        //public MediaWriter(string file, MediaReader mediaReader)
        //	: base(File.Open(file, FileMode.Create, FileAccess.Write), true, FF.av_guess_format(null, file, null)) {
        //	outputFormat = formatContext->Oformat;
        //	remuxing = true;

        //	SetEncoders(mediaReader);
        //}

        ///// <summary>
        ///// 创建一个封装模式的媒体写入器
        ///// </summary>
        ///// <param name="outputStream"></param>
        ///// <param name="mediaName"></param>
        ///// <param name="mediaReader"></param>
        //public MediaWriter(Stream outputStream, string mediaName, MediaReader mediaReader)
        //	: base(outputStream, true, FF.av_guess_format(mediaName, null, null)) {
        //	outputFormat = formatContext->Oformat;
        //	remuxing = true;

        //	SetEncoders(mediaReader);
        //}

        //private void SetEncoders(MediaReader mediaReader) {
        //	try {
        //		foreach (var decoder in mediaReader.Decoders) {
        //			if (decoder != null) {
        //				var stream = FF.avformat_new_stream(formatContext, decoder.codec);
        //				if (stream == null) throw new InvalidOperationException("无法创建流");
        //				int result = FF.avcodec_copy_context(stream->Codec, decoder.codecContext);
        //				if (result < 0) throw new FFmpegException(result);
        //				stream->Codec->CodecTag = 0;
        //				if (outputFormat->Flags.HasFlag(AVFmt.GlobalHeader)) {
        //					stream->Codec->Flags |= AVCodecFlag.GlobalHeader;
        //				}
        //				stream->TimeBase = mediaReader.formatContext->Streams[decoder.StreamIndex]->TimeBase;
        //				//stream->TimeBase = decoder.codecContext->TimeBase;
        //				result = FF.avcodec_parameters_from_context(stream->Codecpar, stream->Codec);
        //				if (result < 0) throw new FFmpegException(result);
        //			}
        //		}
        //		inputFmtCtx = mediaReader.formatContext;
        //	} catch {
        //		Dispose();
        //		throw;
        //	}
        //}

        public MediaWriter AddAudio(AudioFormat format, BitRate bitRate)
        {
            if (readyEncoders != null)
            {
                throw new InvalidOperationException($"该{nameof(MediaWriter)}对象已经初始化");
            }
            if (outputFormat == null)
            {
                throw new InvalidOperationException("无法确定媒体的输出格式");
            }

            if (outputFormat->AudioCodec == AVCodecID.None)
            {
                throw new InvalidOperationException($"该{nameof(MediaWriter)}对象并不支持音频");
            }

            var stream = FF.avformat_new_stream(formatContext, Codec.GetEncoder(outputFormat->AudioCodec));

            if (stream == null)
            {
                throw new InvalidOperationException("无法创建流");
            }
            var codecContext = stream->Codec;
            var audioEncoder = new AudioEncoder(stream, format, bitRate);

            stream->TimeBase = codecContext->TimeBase;
            int result = FF.avcodec_parameters_from_context(stream->Codecpar, codecContext);

            if (result < 0)
            {
                throw new FFmpegException(result);
            }
            encoders.Add(audioEncoder);
            return(this);
        }
Beispiel #8
0
 protected void InternalWrite(Packet packet)
 {
     if (packet.Size > 0)
     {
         FF.av_interleaved_write_frame(formatContext, packet.packet).CheckFFmpegCode();
     }
 }
Beispiel #9
0
        public MediaWriter Initialize()
        {
            if (readyEncoders != null)
            {
                throw new InvalidOperationException($"该{nameof(MediaWriter)}对象已经初始化");
            }
            if (outputFormat == null)
            {
                throw new InvalidOperationException("无法确定媒体的输出格式");
            }

            int result = FF.avformat_write_header(formatContext, null);

            if (result < 0)
            {
                throw new FFmpegException(result, "写入头部错误");
            }

            readyEncoders = new List <Encoder>(encoders);
            fixedQueues   = new FixedAudioFrameQueue[encoders.Count];
            for (int i = 0; i < encoders.Count; i++)
            {
                if (encoders[i] is AudioEncoder audioEncoder)
                {
                    fixedQueues[i] = new FixedAudioFrameQueue(audioEncoder.RequestSamples);
                }
            }
            return(this);
        }
Beispiel #10
0
        private void NewStreams(Codec[] codecs)
        {
            try {
                foreach (var codec in codecs)
                {
                    var stream = FF.avformat_new_stream(formatContext, codec.codec);
                    if (stream == null)
                    {
                        throw new InvalidOperationException("无法创建流");
                    }
                    FF.avcodec_copy_context(stream->Codec, codec.codecContext).CheckFFmpegCode();
                    stream->Codec->CodecTag = 0;
                    if (outputFormat->Flags.HasFlag(AVFmt.GlobalHeader))
                    {
                        stream->Codec->Flags |= AVCodecFlag.GlobalHeader;
                    }
                    stream->TimeBase = codec.codecContext->TimeBase;
                    FF.avcodec_parameters_from_context(stream->Codecpar, stream->Codec).CheckFFmpegCode();
                }

                FF.avformat_write_header(formatContext, null).CheckFFmpegCode();
            } catch {
                Dispose();
                throw;
            }
        }
Beispiel #11
0
        public override bool Decode(Packet packet, Frame outFrame)
        {
            var videoFrame = outFrame as VideoFrame;

            if (videoFrame == null)
            {
                throw new ArgumentException($"{nameof(outFrame)}必须是{nameof(VideoFrame)}类型且不为null。");
            }

            int gotPicture = 0;

            FF.avcodec_decode_video2(codecContext, outFrame.frame, &gotPicture, packet.packet).CheckFFmpegCode("视频解码发生错误");

            if (gotPicture == 0)
            {
                return(false);
            }

            if (stream != null)
            {
                outFrame.presentTimestamp = new Timestamp(outFrame.frame->Pts, stream->TimeBase);
            }
            videoFrame.pictureType = outFrame.frame->PictType;
            videoFrame.format      = InFormat;
            if (resampler != null)
            {
                resampler.InternalResample(videoFrame);
            }
            else
            {
                videoFrame.UpdateFromNative();
            }

            return(true);
        }
Beispiel #12
0
        private static string GetErrorString(int errorCode)
        {
            StringBuilder buffer = new StringBuilder(1000);

            FF.av_strerror(errorCode, buffer, (IntPtr)1000);
            return(buffer.ToString());
        }
Beispiel #13
0
 public void CopyToNoResize(int srcSampleOffset, int srcSampleCount, AudioFrame dstFrame, int dstSampleOffset = 0)
 {
     if (dstFrame.format != format)
     {
         throw new ArgumentException("目标帧格式不一致", nameof(dstFrame));
     }
     FF.av_samples_copy(dstFrame.datas, datas, dstSampleOffset, srcSampleOffset, srcSampleCount, format.Channels, format.SampleFormat);
 }
Beispiel #14
0
 public void Flush()
 {
     if (!isFlush)
     {
         FF.av_write_trailer(formatContext).CheckFFmpegCode();
         isFlush = true;
     }
 }
Beispiel #15
0
 protected override void Setup()
 {
     frame->ExtendedData = (byte **)(&frame->Data);
     frame->Width        = format.Width;
     frame->Height       = format.Height;
     frame->Format       = (int)format.PixelFormat;
     FF.av_image_fill_arrays(frame->ExtendedData, frame->Linesize, (byte *)cache, format.PixelFormat, format.Width, format.Height, format.Align);
 }
        public void Reset()
        {
            int result = FF.swr_init(ctx);

            if (result != 0)
            {
                throw new FFmpegException(result);
            }
        }
        protected override void Dispose(bool disposing)
        {
            if (ctx == null)
            {
                return;
            }

            FF.swr_free(ref ctx);
        }
Beispiel #18
0
 public void Update(int sampleCount, params IntPtr[] newDatas)
 {
     if (newDatas.Length != format.LineCount)
     {
         throw new ArgumentException("参数个数和数据行数不一致", nameof(newDatas));
     }
     Resize(sampleCount);
     FF.av_samples_copy(datas, newDatas, 0, 0, sampleCount, format.Channels, format.SampleFormat);
 }
Beispiel #19
0
 internal override void UpdateFromNative()
 {
     Resize();
     fixed(IntPtr *datas = this.datas)
     fixed(int *dstLinesize = format.strides)
     {
         FF.av_image_copy((byte **)datas, dstLinesize, frame->ExtendedData, frame->Linesize, format.PixelFormat, format.Width, format.Height);
     }
 }
Beispiel #20
0
 public void Update(int sampleCount, IntPtr newData)
 {
     if (format.LineCount != 1)
     {
         throw new ArgumentException($"该{nameof(AudioFrame)}对象拥有大于1的数据行数,因此不能调用此方法", nameof(newData));
     }
     Resize(sampleCount);
     FF.av_samples_copy(datas, &newData, 0, 0, sampleCount, format.Channels, format.SampleFormat);
 }
Beispiel #21
0
        static void Main(string[] args)
        {
            void *i = null;
            List <(string Name, string LongName, Ptr <AVOutputFormat>)> list = new List <(string Name, string LongName, Ptr <AVOutputFormat>)>();

            while (Ptr.Get(FF.av_muxer_iterate(&i), out var outFormat))
            {
                list.Add((outFormat->Debug_Name, outFormat->Debug_LongName, new Ptr <AVOutputFormat>(outFormat)));
            }
        }
        /// <summary>
        /// 重采样
        /// </summary>
        /// <param name="inDatas">输入PCM数据缓冲区的数组。当此参数为<see cref="IntPtr.Zero"/>时,将输出重采样器中剩余的采样点。</param>
        /// <param name="inSampleCount">输入缓冲区存放的采样个数(多声道只计算一个)</param>
        /// <param name="outDatas">输出PCM数据缓冲区的数组。</param>
        /// <param name="outSampleCount">输出缓冲区能够容纳的采样个数(多声道只计算一个)</param>
        /// <returns>返回输出的采样个数</returns>
        public int Resample(IntPtr inDatas, int inSampleCount, IntPtr outDatas, int outSampleCount)
        {
            int resultSampleCount = FF.swr_convert(ctx, (byte **)outDatas, outSampleCount, (byte **)inDatas, inSampleCount);

            if (resultSampleCount < 0)
            {
                throw new FFmpegException(resultSampleCount);
            }
            return(resultSampleCount);
        }
        protected override void Dispose(bool disposing)
        {
            if (ctx == null)
            {
                return;
            }

            FF.sws_freeContext(ctx);
            ctx = null;
        }
Beispiel #24
0
        protected Decoder(AVStream *stream) : base(stream)
        {
            codecContext->Codec = FF.avcodec_find_decoder(codecContext->CodecId);
            int resultCode = FF.avcodec_open2(codecContext, codecContext->Codec, null);

            if (resultCode != 0)
            {
                throw new FFmpegException(resultCode);
            }
        }
Beispiel #25
0
        private void Init(VideoEncoderParameters encoderParams)
        {
            var pixelFormats = GetSupportedPixelFormats(codec);

            if (pixelFormats != null && !pixelFormats.Contains(InFormat.PixelFormat))
            {
                outFormat = new VideoFormat(InFormat.Width, InFormat.Height, pixelFormats[0]);
                resampler = new VideoResampler(InFormat, OutFormat, encoderParams.ResampleFlags);
                tempFrame = new VideoFrame();
            }
            else
            {
                outFormat = InFormat;
            }

            framePerSecond = encoderParams.FrameRate;

            try {
                codecContext->PixFmt    = OutFormat.PixelFormat;
                codecContext->Width     = OutFormat.Width;
                codecContext->Height    = OutFormat.Height;
                codecContext->BitRate   = encoderParams.BitRate.Value;
                codecContext->TimeBase  = encoderParams.FrameRate.Reciprocal;
                codecContext->Framerate = encoderParams.FrameRate;
                if (encoderParams.GopSize != 0)
                {
                    codecContext->GopSize = encoderParams.GopSize;
                }
                if (encoderParams.MaxBFrames != 0)
                {
                    codecContext->MaxBFrames = encoderParams.MaxBFrames;
                }
                if (encoderParams.MbDecision != 0)
                {
                    codecContext->MbDecision = encoderParams.MbDecision;
                }
                if (encoderParams.Qmin != 0)
                {
                    codecContext->Qmin = encoderParams.Qmin;
                }
                if (encoderParams.Qmax != 0)
                {
                    codecContext->Qmax = encoderParams.Qmax;
                }

                var result = FF.avcodec_open2(codecContext, codec, null);
                if (result < 0)
                {
                    throw new CSharp.FFmpegException(result);
                }
            } catch {
                Dispose();
                throw;
            }
        }
        public VideoResampler(VideoFormat source, VideoFormat destination, SwsFlags flags = SwsFlags.FastBilinear)
        {
            Source      = source;
            Destination = destination;
            Flags       = flags;

            ctx = FF.sws_getContext(
                source.Width, source.Height, source.PixelFormat,
                destination.Width, destination.Height, destination.PixelFormat,
                flags, null, null, null);
        }
Beispiel #27
0
        public override bool Encode(Frame frame, Packet outPacket)
        {
            if (frame != null)
            {
                if (!(frame is VideoFrame))
                {
                    throw new ArgumentException($"{nameof(frame)}必须是{nameof(VideoFrame)}类型", nameof(frame));
                }
                if (!(frame as VideoFrame).format.Equals(InFormat))
                {
                    throw new ArgumentException("输入帧的格式和编码器输入格式不同");
                }
            }

            if (resampler != null)
            {
                if (frame != null)
                {
                    resampler.Resample(frame as VideoFrame, tempFrame);
                    frame = tempFrame;
                }
            }

            encodeFrames = 0;
            outPacket.ReleaseNativeBuffer();
            int gotPicture = 0;

            if (frame != null)
            {
                try {
                    frame.SetupToNative();
                    frame.presentTimestamp = new Timestamp(inputFrames, framePerSecond.Reciprocal);
                    frame.presentTimestamp.Transform(codecContext->TimeBase);
                    frame.frame->Pts = frame.presentTimestamp.Value;
                    FF.avcodec_encode_video2(codecContext, outPacket.packet, frame.frame, &gotPicture).CheckFFmpegCode("视频编码发生错误");
                } finally {
                    frame.ReleaseSetup();
                }

                inputFrames++;
                encodeFrames = 1;
            }
            else
            {
                FF.avcodec_encode_video2(codecContext, outPacket.packet, null, &gotPicture).CheckFFmpegCode("视频编码发生错误");
            }

            if (gotPicture != 0)
            {
                ConfigPakcet(outPacket);
                return(true);
            }
            return(false);
        }
Beispiel #28
0
        protected override void Dispose(bool disposing)
        {
            if (frame == null)
            {
                return;
            }

            cache.Free();

            FF.av_frame_free(ref frame);
        }
Beispiel #29
0
        internal AudioDecoder(AVStream *stream) : base(stream)
        {
            var sampleRate    = codecContext->SampleRate;
            var channelLayout = codecContext->ChannelLayout;
            var sampleFormat  = codecContext->SampleFmt;

            if (channelLayout == 0)
            {
                channelLayout = FF.av_get_default_channel_layout(codecContext->Channels);
            }
            InFormat = new AudioFormat(sampleRate, channelLayout, sampleFormat);
        }
Beispiel #30
0
        public void Resize(int sampleCount)
        {
            if (format == null)
            {
                throw new InvalidOperationException($"{nameof(AudioFrame)} 对象未指定格式");
            }

            this.sampleCount = sampleCount;
            int bytes = format.GetBytes(sampleCount);

            cache.Resize(bytes);
            FF.av_samples_fill_arrays(datas, null, cache.data, format.Channels, sampleCount, format.SampleFormat, 1);
        }