コード例 #1
0
        /// <summary>
        /// Add stream by copy <see cref="ffmpeg.avcodec_parameters_copy(AVCodecParameters*, AVCodecParameters*)"/>,
        /// </summary>
        /// <param name="stream"></param>
        /// <param name="flags"></param>
        /// <returns></returns>
        public MediaStream AddStream(MediaStream stream, int flags = 0)
        {
            AVStream *pstream = ffmpeg.avformat_new_stream(pFormatContext, null);

            pstream->id = (int)(pFormatContext->nb_streams - 1);
            ffmpeg.avcodec_parameters_copy(pstream->codecpar, stream.Stream.codecpar);
            pstream->codecpar->codec_tag = 0;
            MediaCodec mediaCodec = null;

            if (stream.Codec != null)
            {
                mediaCodec = MediaEncoder.CreateEncode(stream.Codec.AVCodecContext.codec_id, flags, _ =>
                {
                    AVCodecContext *pContext       = _;
                    AVCodecParameters *pParameters = ffmpeg.avcodec_parameters_alloc();
                    ffmpeg.avcodec_parameters_from_context(pParameters, stream.Codec).ThrowIfError();
                    ffmpeg.avcodec_parameters_to_context(pContext, pParameters);
                    ffmpeg.avcodec_parameters_free(&pParameters);
                    pContext->time_base = stream.Stream.r_frame_rate.ToInvert();
                });
            }
            streams.Add(new MediaStream(pstream)
            {
                TimeBase = stream.Stream.r_frame_rate.ToInvert(), Codec = mediaCodec
            });
            return(streams.Last());
        }
コード例 #2
0
ファイル: VideoFrame.cs プロジェクト: stephan27/EmguFFmpeg
 public static VideoFrame CreateFrameByCodec(MediaCodec codec)
 {
     if (codec.Type != AVMediaType.AVMEDIA_TYPE_VIDEO)
     {
         throw new FFmpegException(FFmpegException.CodecTypeError);
     }
     return(new VideoFrame(codec.AVCodecContext.pix_fmt, codec.AVCodecContext.width, codec.AVCodecContext.height));
 }
コード例 #3
0
ファイル: PixelConverter.cs プロジェクト: geedrius/EmguFFmpeg
 /// <summary>
 /// create video frame converter by dst codec
 /// </summary>
 /// <param name="dstCodec"></param>
 /// <param name="flag"></param>
 public PixelConverter(MediaCodec dstCodec, int flag = ffmpeg.SWS_BILINEAR)
 {
     if (dstCodec.Type != AVMediaType.AVMEDIA_TYPE_VIDEO)
     {
         throw new FFmpegException(FFmpegException.CodecTypeError);
     }
     DstWidth  = dstCodec.AVCodecContext.width;
     DstHeight = dstCodec.AVCodecContext.height;
     DstFormat = dstCodec.AVCodecContext.pix_fmt;
     SwsFlag   = flag;
     dstFrame  = new VideoFrame(DstWidth, DstHeight, DstFormat);
 }
コード例 #4
0
ファイル: AudioFrame.cs プロジェクト: geedrius/EmguFFmpeg
        /// <summary>
        /// create a audio frame by codec's parames
        /// </summary>
        /// <param name="codec"></param>
        /// <returns></returns>
        public static AudioFrame CreateFrameByCodec(MediaCodec codec)
        {
            if (codec.Type != AVMediaType.AVMEDIA_TYPE_AUDIO)
            {
                throw new FFmpegException(FFmpegException.CodecTypeError);
            }
            AudioFrame audioFrame = new AudioFrame(codec.AVCodecContext.channels, codec.AVCodecContext.frame_size, codec.AVCodecContext.sample_fmt, codec.AVCodecContext.sample_rate);

            if (codec.AVCodecContext.channel_layout > 0)
            {
                audioFrame.pFrame->channel_layout = codec.AVCodecContext.channel_layout;
            }
            return(audioFrame);
        }
コード例 #5
0
 /// <summary>
 /// create audio converter by dst codec
 /// </summary>
 /// <param name="dstCodec"></param>
 public SampleConverter(MediaCodec dstCodec)
 {
     if (dstCodec.Type != AVMediaType.AVMEDIA_TYPE_AUDIO)
     {
         throw new FFmpegException(FFmpegException.CodecTypeError);
     }
     DstFormat        = dstCodec.AVCodecContext.sample_fmt;
     DstChannels      = dstCodec.AVCodecContext.channels;
     DstChannelLayout = dstCodec.AVCodecContext.channel_layout;
     if (DstChannelLayout == 0)
     {
         DstChannelLayout = FFmpegHelper.GetChannelLayout(DstChannels);
     }
     DstNbSamples  = dstCodec.AVCodecContext.frame_size;
     DstSampleRate = dstCodec.AVCodecContext.sample_rate;
     dstFrame      = new AudioFrame(DstChannels, DstNbSamples, DstFormat, DstSampleRate);
     AudioFifo     = new AudioFifo(DstFormat, DstChannels);
 }
コード例 #6
0
        private static AVCodecHWConfig?avcodec_get_hw_config_safe(MediaCodec codec, int i)
        {
            var ptr = ffmpeg.avcodec_get_hw_config(codec, i);

            return(ptr != null ? *ptr : (AVCodecHWConfig?)null);
        }
コード例 #7
0
 private static string av_get_profile_name_safe(MediaCodec codec, int i)
 {
     return(ffmpeg.av_get_profile_name(codec, i));
 }
コード例 #8
0
        private ulong?channel_layouts_next_safe(MediaCodec codec, int i)
        {
            var ptr = codec.pCodec->channel_layouts + i;

            return(ptr != null ? *ptr : (ulong?)null);
        }
コード例 #9
0
        private int?supported_samplerates_next_safe(MediaCodec codec, int i)
        {
            var ptr = codec.pCodec->supported_samplerates + i;

            return(ptr != null ? *ptr : (int?)null);
        }
コード例 #10
0
        private AVSampleFormat?sample_fmts_next_safe(MediaCodec codec, int i)
        {
            var ptr = codec.pCodec->sample_fmts + i;

            return(ptr != null ? *ptr : (AVSampleFormat?)null);
        }
コード例 #11
0
        private AVRational?supported_framerates_next_safe(MediaCodec codec, int i)
        {
            var ptr = codec.pCodec->supported_framerates + i;

            return(ptr != null ? *ptr : (AVRational?)null);
        }
コード例 #12
0
        private static AVPixelFormat?pix_fmts_next_safe(MediaCodec codec, int i)
        {
            var ptr = codec.pCodec->pix_fmts + i;

            return(ptr != null ? *ptr : (AVPixelFormat?)null);
        }