コード例 #1
0
 public OverFlowCheckViewModel()
 {
     tempSever = new SimpleTCP.SimpleTcpServer().Start(10086);
     tempSever.DataReceived += (sender, msg) =>
     {
         IP = msg.TcpClient.Client.LocalEndPoint.ToString();
         string message = Encoding.ASCII.GetString(msg.Data);
         Common.TCPHelper.COMMANDER cmd = new Common.TCPHelper.COMMANDER(message);
         messageList.Add(cmd);
         DMCode temp = new DMCode();
         temp.CodeID   = cmd.BoxId;
         temp.CodeName = cmd.CommandType;
         temp.Email    = cmd.PackagePosition;
         temp.Info     = cmd.PackagePositionCount;
         temp.Phone    = cmd.DATETIME;
         severmeeage   = cmd.GenerateSendSuccessMessage();
         byte[] data1 = Encoding.ASCII.GetBytes(severmeeage);
         //  tempSever.Send(tempSever.clientList[0],severmeeage);
         // tempSever.clientList[0].BeginSend(data1,0,data1.Length,SocketFlags.None, new AsyncCallback(Message_Send), tempSever.clientList[0]);
         CodeList.Add(temp);
         msg.Reply(severmeeage);
     };
     FFmpegBinariesHelper.RegisterFFmpegBinaries();
     //  SetupLogging();
 }
コード例 #2
0
ファイル: AudioTest.cs プロジェクト: lin9405/FFmpeg-usbCam
        public AudioTest()
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            _fmt_ctx      = ffmpeg.avformat_alloc_context();
            receivedFrame = ffmpeg.av_frame_alloc();
        }
コード例 #3
0
ファイル: StreamDecoder.cs プロジェクト: oT2/OT2Player
        public StreamDecoder(string url, DecoderConfiguration configuration)
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries(); //Should not be here

            this.decoderConfiguration = configuration;
            _pFormatContext           = ffmpeg.avformat_alloc_context();

            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();

            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();

            // find the first video stream
            AVStream *pStream = null;

            for (var i = 0; i < _pFormatContext->nb_streams; i++)
            {
                if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = _pFormatContext->streams[i];
                    break;
                }
            }

            if (pStream == null)
            {
                throw new InvalidOperationException("Could not found video stream.");
            }

            _streamIndex   = pStream->index;
            _pCodecContext = pStream->codec;


            var codecId = _pCodecContext->codec_id;
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new InvalidOperationException("Unsupported codec.");
            }

            ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError();

            decoderConfiguration.codec           = FormatHelper.FFmpegToOT2(codecId);
            decoderConfiguration.inputResolution = new Resolution(_pCodecContext->height, _pCodecContext->width);
            //CodecName = ffmpeg.avcodec_get_name(codecId);
            //PixelFormat = _pCodecContext->pix_fmt;
            decoderConfiguration.inputPixelFormat = FormatHelper.FFmpegToOT2(_pCodecContext->pix_fmt);

            _pPacket       = ffmpeg.av_packet_alloc();
            _pFrame        = ffmpeg.av_frame_alloc();
            decodingThread = new Thread(() => { while (DecodeFrame() == 0)
                                                {
                                                    ;
                                                }
                                        });
        }
コード例 #4
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Image screenboard = ScreenBoradImage.Image;

            status        = "정상 운행중";
            end           = "프로그램을 종료합니다.";
            start         = "프로그램을 시작합니다.";
            connected     = "연결이 되었습니다.";
            disconnected  = "연결이 끊겼습니다.";
            camerastatus1 = "전방 카메라 ";
            camerastatus2 = "후방 카메라 ";
            firstlcs      = "1차로 ";
            secondlcs     = "2차로 ";
            thirdlcs      = "3차로 ";
            alllcs        = "전차로 ";
            stop          = "진입금지";
            forward       = "진입가능";
            this.lgm      = new LogManager(start);
            enterhold     = true;
            enterhold2    = true;
            enterhold3    = true;
            camera1cnt    = 0;
            camera2cnt    = 0;
            video1cnt     = 0;
            video2cnt     = 0;
            gridcnt       = 0;
            imagemax1     = false;
            imagemax2     = false;
            camera1record = 0;
            camera2record = 0;
            this.dataGridView1.ColumnHeadersDefaultCellStyle.Font = new Font("Courier New", 13, FontStyle.Bold);
            this.dataGridView1.DefaultCellStyle.Font          = new Font("Courier New", 13);
            this.dataGridView1.RowsDefaultCellStyle.BackColor = Color.DarkBlue;

            lcsgo     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=0&on=1&time=0&pwd=0&";
            lcsgostop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=0&on=0&time=0&pwd=0&";

            lcsstop     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=1&on=1&time=0&pwd=0&";
            lcsstopstop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=1&on=0&time=0&pwd=0&";

            lcsflash     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=2&on=1&time=0&pwd=0&";
            lcsflashstop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=2&on=0&time=0&pwd=0&";

            //SendLcsData(lcsgo);
            PlayCamera1();
            PlayCamera2();
            hwDeviceType  = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
            hwDeviceType2 = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;    //temp

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            //ConfigureHWDecoder(out hwDeviceType);

            isInit  = false;
            isInit2 = false;
        }
コード例 #5
0
ファイル: RawFrameDecoder.cs プロジェクト: oT2/OT2Player
        public VideoDecoder(DecoderConfiguration configuration)
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            //outputResolution = resolution;
            Console.WriteLine("Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");


            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");


            //FFMPEG initialization
            _pFormatContext = ffmpeg.avformat_alloc_context();

            var pFormatContext = _pFormatContext;

            //ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();

            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();

            // find the first video stream
            //AVStream* pStream = null;
            //for (var i = 0; i < _pFormatContext->nb_streams; i++)
            //    if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            //    {
            //        pStream = _pFormatContext->streams[i];
            //        break;
            //}

            //if (pStream == null) throw new InvalidOperationException("Could not found video stream.");

            // GET DECODER FOR STREAM
            //_streamIndex = pStream->index;
            //_pCodecContext = pStream->codec;

            //var codecId = _pCodecContext->codec_id;

            var codecId = FormatHelper.OT2ToFFmpeg(configuration.codec);
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId);

            _pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);
            if (pCodec == null)
            {
                throw new InvalidOperationException("Unsupported codec.");
            }

            ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError();

            var codecName   = ffmpeg.avcodec_get_name(codecId);
            var pixelFormat = _pCodecContext->pix_fmt;

            // ALLOC FRAME AND PACKET
            packet         = ffmpeg.av_packet_alloc();
            frame          = ffmpeg.av_frame_alloc();
            decodingThread = new Thread(DecodeFrames);
        }
        private bool activeThread;      //rtsp thread 활성화 유무

        public MainWindow()
        {
            InitializeComponent();

            //비디오 프레임 디코딩 thread 생성
            ts     = new ThreadStart(DecodeAllFramesToImages);
            thread = new Thread(ts);

            //FFmpeg dll 파일 참조 경로 설정
            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            //SetupLogging();

            activeThread = true;
        }
コード例 #7
0
        public MainWindow()
        {
            timeBeginPeriod(1);
            InitializeComponent();
            sys_pite = sys_con_pite;

            m_window = this;
            if (SharedSetting.FullS)
            {
                this.WindowStyle = WindowStyle.None;
                this.WindowState = System.Windows.WindowState.Maximized;
            }

            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            return;
        }
コード例 #8
0
        public MainWindow()
        {
            InitializeComponent();

            //FFmpeg dll 파일 참조 경로 설정
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            Console.WriteLine("Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");
            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            SetupLogging();

            //비디오 프레임 디코딩 thread 생성
            decodingThreadStart = new ThreadStart(DecodeAllFramesToImages);
            decodingThread      = new Thread(decodingThreadStart);

            //비디오 프레임 인코딩 thread 생성
            encodingThreadStart = new ThreadStart(EncodeImagesToH264);
            encodingThread      = new Thread(encodingThreadStart);
        }
コード例 #9
0
ファイル: VideoDecoder.cs プロジェクト: igoole/FFmpeg4Unity
    public void Run(String filename, int frameIndex)
    {
        _isRunning = true;

        Debug.Log("Current directory: " + Environment.CurrentDirectory);
        var mode = Environment.Is64BitProcess ? "64" : "32";

        Debug.Log($"Running in {mode}-bit mode");

        FFmpegBinariesHelper.RegisterFFmpegBinaries();

        Debug.Log($"FFmpeg version info: {ffmpeg.av_version_info()}");

        SetupLogging();

        ConfigureHWDecoder(out var deviceType);

        Debug.Log("Decoding...");

        var texture2D = DecodeFrameToTexture2D(filename, frameIndex, deviceType);

        // _isRunning = false;
        // File.WriteAllBytes($"image_{frameIndex:D5}.png", texture2D.EncodeToPNG());
    }
コード例 #10
0
ファイル: Program.cs プロジェクト: tianxin8206/rtspconverter
        static void Main(string[] args)
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            int      ret;
            AVPacket packet; //= { .data = NULL, .size = 0 };

            packet.data = null;
            packet.size = 0;
            AVFrame *frame = null;

            AVMediaType type;
            int         stream_index;
            int         i;

            //ffmpeg.av_register_all();
            //ffmpeg.avfilter_register_all();
            if ((ret = OpenInputFile("rtsp://113.136.42.40:554/PLTV/88888888/224/3221226090/10000100000000060000000001759099_0.smil")) < 0)
            {
                goto end;
            }
            if ((ret = OpenOutputFile("E:\\hls\\out.m3u8")) < 0)
            {
                goto end;
            }
            //var avBitStreamFilter = ffmpeg.av_bsf_get_by_name("h264_mp4toannexb");
            //fixed (AVBSFContext** ctx = &absCtx)
            //ffmpeg.av_bsf_alloc(avBitStreamFilter, ctx);
            //ffmpeg.av_bsf_init(absCtx);
            /* read all packets */
            int count = 0;
            int flag  = 1;

            while (true)
            {
                if ((ret = ffmpeg.av_read_frame(ifmt_ctx, &packet)) < 0)
                {
                    break;
                }
                stream_index = packet.stream_index;
                type         = ifmt_ctx->streams[packet.stream_index]->codec->codec_type;
                ffmpeg.av_log(null, ffmpeg.AV_LOG_DEBUG, "Demuxer gave frame of stream_index %u\n");

                ffmpeg.av_log(null, ffmpeg.AV_LOG_DEBUG, "Going to reencode&filter the frame\n");
                frame = ffmpeg.av_frame_alloc();
                if (null == frame)
                {
                    ret = ffmpeg.AVERROR(12);
                    break;
                }
                ffmpeg.av_packet_rescale_ts(&packet,
                                            ifmt_ctx->streams[stream_index]->time_base,
                                            ifmt_ctx->streams[stream_index]->codec->time_base);

                ret = dec_func(ifmt_ctx->streams[stream_index]->codec, frame, &packet);
                if (ret < 0)
                {
                    ffmpeg.av_frame_free(&frame);
                    ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "Decoding failed\n");
                    break;
                }
                //if (got_frame == 0)
                //{
                frame->pts = frame->pkt_pts;
                // frame->pts = av_frame_get_best_effort_timestamp(frame);
                // frame->pts=count;
                if (type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    ret = encode_write_frame(frame, stream_index, null);
                }
                else
                {
                    if (flag != 0)
                    {
                        InitSwr(stream_index);
                        flag = 0;
                    }

                    AVFrame *frame_out = ffmpeg.av_frame_alloc();
                    if (0 != TransSample(frame, frame_out, stream_index))
                    {
                        ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "convert audio failed\n");
                        ret = -1;
                    }
                    // frame_out->pts = frame->pkt_pts;
                    ret = encode_write_frame(frame_out, stream_index, null);
                    ffmpeg.av_frame_free(&frame_out);
                }
                ffmpeg.av_frame_free(&frame);
                if (ret < 0)
                {
                    goto end;
                }
                //}
                //else
                //{
                //    ffmpeg.av_frame_free(&frame);
                //}

                ffmpeg.av_packet_unref(&packet);
                ++count;
            }
            /* flush  encoders */
            // for (i = 0; i < ifmt_ctx->nb_streams; i++) {
            // ret = flush_encoder(i);
            // if (ret < 0) {
            // av_log(NULL, AV_LOG_ERROR, "Flushing encoder failed\n");
            // goto end;
            // }
            // }
            ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "Flushing encoder failed\n");
            ffmpeg.av_write_trailer(ofmt_ctx);
end:
            ffmpeg.av_packet_unref(&packet);
            ffmpeg.av_frame_free(&frame);
            //fixed (AVBSFContext** ctx = &absCtx)
            //ffmpeg.av_bsf_free(ctx);
            for (i = 0; i < ifmt_ctx->nb_streams; i++)
            {
                ffmpeg.avcodec_close(ifmt_ctx->streams[i]->codec);
                if (ofmt_ctx != null && ofmt_ctx->nb_streams > i && ofmt_ctx->streams[i] != null && ofmt_ctx->streams[i]->codec != null)
                {
                    ffmpeg.avcodec_close(ofmt_ctx->streams[i]->codec);
                }
            }
            // av_free(filter_ctx);
            fixed(AVFormatContext **ss = &ifmt_ctx)
            ffmpeg.avformat_close_input(ss);

            if (ofmt_ctx != null && (ofmt_ctx->oformat->flags & ffmpeg.AVFMT_NOFILE) == 0)
            {
                ffmpeg.avio_closep(&ofmt_ctx->pb);
            }
            ffmpeg.avformat_free_context(ofmt_ctx);

            // if (ret < 0)
            // av_log(NULL, AV_LOG_ERROR, "Error occurred: %s\n", av_err2str(ret)); //av_err2str(ret));
        }
コード例 #11
0
ファイル: test.cs プロジェクト: lin9405/FFmpeg-usbCam
        public void testSet()
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            ffmpeg.avdevice_register_all();
            var fmt_ctx = _fmt_ctx;

            fmt_ctx = ffmpeg.avformat_alloc_context();

            AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow");
            string         device  = "video=USB3. 0 capture:audio=디지털 오디오 인터페이스(5- USB3. 0 capture)";


            var a          = ffmpeg.avformat_open_input(&fmt_ctx, device, iformat, null); //음수이면 파일 안열려..그런 장치 없어!!
            var b          = ffmpeg.avformat_find_stream_info(fmt_ctx, null);             //Stream을 찾을수 없어...
            int videoIndex = -1;
            int audioIndex = -1;

            _fmt_ctx = fmt_ctx;
            AVFormatContext *outputFmtCtx;

            AudioIndex = -1;
            VideoIndex = -1;
            string           filename    = @"C:\Users\admin\Desktop\output223423423.avi";
            AVFormatContext *inputFmtCtx = _fmt_ctx;

            if (ffmpeg.avformat_alloc_output_context2(&outputFmtCtx, null, null, filename) < 0)     //음수가 나오면 에러인거야...
            {
                Console.WriteLine("파일 생성 못해!!!");
            }
            var oCodec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_MPEG4);

            for (int index = 0; index < inputFmtCtx->nb_streams; index++)
            {
                AVStream *      in_stream    = inputFmtCtx->streams[index];
                AVCodecContext *in_codec_ctx = in_stream->codec;
                in_codec_ctx = ffmpeg.avcodec_alloc_context3(inputFmtCtx->data_codec);
                AVStream *out_stream = ffmpeg.avformat_new_stream(outputFmtCtx, null);

                if (out_stream == null)
                {
                    Console.WriteLine("OUTPUT 스트림 NULL");
                }

                //
                AVCodecContext *outCodecContext = out_stream->codec;
                outCodecContext->codec = oCodec;
                outCodecContext        = ffmpeg.avcodec_alloc_context3(oCodec);

                outCodecContext->height = 500;
                outCodecContext->width  = 600;
                //  outCodecContext->sample_aspect_ratio = videoInfo.Sample_aspect_ratio;
                outCodecContext->pix_fmt   = AVPixelFormat.AV_PIX_FMT_YUV420P;
                outCodecContext->time_base = new AVRational {
                    num = 1, den = 15
                };
                //   outCodecContext->framerate = ffmpeg.av_inv_q(videoInfo.Framerate);

                //context를 설정해야 뭔가 쓸수잇오.....


                if (ffmpeg.avcodec_parameters_from_context(out_stream->codecpar, outCodecContext) < 0)
                {
                    Console.WriteLine("copy 못해에!!!");
                }

                out_stream->time_base = in_stream->time_base;

                outCodecContext->codec_tag = 0;


                if ((outputFmtCtx->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) == 0)
                {
                    outCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
                }
                //  ffmpeg.avcodec_open2(outCodecContext, oCodec, null).ThrowExceptionIfError();

                VideoIndex = 0;
                AudioIndex = 1;
                ffmpeg.av_dump_format(outputFmtCtx, 0, filename, 1);

                if ((outputFmtCtx->oformat->flags & ffmpeg.AVFMT_NOFILE) == 0)
                {
                    // This actually open the file
                    if (ffmpeg.avio_open(&outputFmtCtx->pb, filename, ffmpeg.AVIO_FLAG_WRITE) < 0)
                    {
                        Console.WriteLine("못만들오...");
                    }
                }
                if (ffmpeg.avformat_write_header(outputFmtCtx, null) < 0)
                {
                    Console.WriteLine("헤더를 못써...\n");
                }
            }
            //ffmpeg.av_write_trailer(outputFmtCtx);
            //ffmpeg.avio_closep(&outputFmtCtx->pb);
            //ffmpeg.avformat_free_context(outputFmtCtx);

            //nb_streams : 요소 몇갠지..!!! 내가 찾은거에서 뭐있는지
            for (int index = 0; index < fmt_ctx->nb_streams; index++)
            {
                var avCodecContext = fmt_ctx->streams[index]->codec;
                if (avCodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoIndex = index;
                }
                else if (avCodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audioIndex = index;
                    Console.WriteLine(audioIndex + "***");
                }
                if (avCodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoIndex = index;
                    Console.WriteLine($"====================={avCodecContext->codec_type}======================");
                    //Console.WriteLine(avCodecContext->bit_rate); //W * H *FPS
                    //Console.WriteLine(avCodecContext->codec_id);
                    //Console.WriteLine(avCodecContext->width);
                    //Console.WriteLine(avCodecContext->coded_width);
                    //Console.WriteLine(avCodecContext->height);
                    //Console.WriteLine(avCodecContext->coded_height);
                    //Console.WriteLine(avCodecContext->pts_correction_num_faulty_pts);
                    //Console.WriteLine(avCodecContext->pts_correction_last_dts);
                    //Console.WriteLine(avCodecContext->pts_correction_last_pts);
                    Console.WriteLine();
                }
                else if (avCodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audioIndex = index;
                    Console.WriteLine($"====================={avCodecContext->codec_type}======================");
                    //Console.WriteLine(avCodecContext->bit_rate); //W * H *FPS
                    //Console.WriteLine(avCodecContext->codec_id);
                    //Console.WriteLine($"Channels :  {avCodecContext->channels}");
                    //Console.WriteLine(avCodecContext->width);
                    //Console.WriteLine(avCodecContext->coded_width);
                    //Console.WriteLine(avCodecContext->height);
                    //Console.WriteLine(avCodecContext->coded_height);
                    //Console.WriteLine(avCodecContext->pts_correction_num_faulty_pts);
                    //Console.WriteLine(avCodecContext->pts_correction_last_dts);
                    //Console.WriteLine(avCodecContext->pts_correction_last_pts);
                }
            }

            int      ret;
            AVPacket pkt;
            int      out_stream_index;

            while (true)
            {
                ret = ffmpeg.av_read_frame(fmt_ctx, &pkt); //ret == 0 이면

                if (ret == ffmpeg.AVERROR_EOF)
                {
                    Console.WriteLine("frame end");
                    break;
                }

                if (pkt.stream_index == videoIndex)
                {
                    Console.WriteLine("Video Packet");
                }
                else if (pkt.stream_index == audioIndex)
                {
                    Console.WriteLine("Audio Packet");
                }

                AVStream *in_stream = fmt_ctx->streams[pkt.stream_index];
                out_stream_index = (pkt.stream_index == videoIndex) ? videoIndex : audioIndex;
                AVStream *out_stream = outputFmtCtx->streams[out_stream_index];

                ffmpeg.av_packet_rescale_ts(&pkt, in_stream->time_base, out_stream->time_base);


                pkt.stream_index = out_stream_index;

                if (ffmpeg.av_interleaved_write_frame(outputFmtCtx, &pkt) < 0)
                {
                    Console.WriteLine("!!!!!!!!@#####!@#!@#!");
                    break;
                }

                ffmpeg.av_packet_unref(&pkt); //옛날엔 av_free_packet()
            }

            ffmpeg.av_write_trailer(outputFmtCtx);
        }
コード例 #12
0
        static unsafe void Main(string[] args)
        {
            Console.WriteLine("Hello World!");

            string fileInput  = "rtsp://*****:*****@192.168.1.83:554/h264/ch40/sub/av_stream";
            string fileOutput = "rtsp://192.168.1.120:8554/live/mystream";

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            string inUrl  = "rtsp://*****:*****@192.168.1.83:554/h264/ch40/sub/av_stream";//可以是本地文件
            string outUrl = "rtsp://192.168.1.120:8554/live/mystream";

            //初始化所有封装器
            ffmpeg.av_register_all();

            //初始化网络库
            ffmpeg.avformat_network_init();

            int res = 0;
            //打开文件,解封装文件头
            //输入封装上下文
            AVFormatContext *ictx = null;
            //设置rtsp协议延时最大值
            AVDictionary *opts = null;

            ffmpeg.av_dict_set(&opts, "max_delay", "500", 0);
            if ((res = ffmpeg.avformat_open_input(&ictx, inUrl, null, &opts)) != 0)
            {
                new Exception();
            }

            //获取音视频流信息
            if ((res = ffmpeg.avformat_find_stream_info(ictx, null)) < 0)
            {
                new Exception();
            }
            ffmpeg.av_dump_format(ictx, 0, inUrl, 0);

            //创建输出上下文
            AVFormatContext *octx = null;

            if (ffmpeg.avformat_alloc_output_context2(&octx, null, "rtsp", outUrl) < 0)
            {
                new Exception();
            }

            //配置输出流
            //遍历输入的AVStream
            for (int i = 0; i < ictx->nb_streams; ++i)
            {
                //创建输出流
                AVStream *out1 = ffmpeg.avformat_new_stream(octx, ictx->streams[i]->codec->codec);
                if (out1 == null)
                {
                    //printf("new stream error.\n");
                    return;
                }
                //复制配置信息
                if ((res = ffmpeg.avcodec_copy_context(out1->codec, ictx->streams[i]->codec)) != 0)
                {
                    new Exception();
                }
                //out->codec->codec_tag = 0;//标记不需要重新编解码
            }
            ffmpeg.av_dump_format(octx, 0, outUrl, 1);

            //rtmp推流
            //打开io
            //@param s Used to return the pointer to the created AVIOContext.In case of failure the pointed to value is set to NULL.
            res = ffmpeg.avio_open(&octx->pb, outUrl, ffmpeg.AVIO_FLAG_READ_WRITE);
            if (octx->pb == null)
            {
                new Exception();
            }

            //写入头信息
            //avformat_write_header可能会改变流的timebase
            if ((res = ffmpeg.avformat_write_header(octx, null)) < 0)
            {
                new Exception();
            }

            var      begintime = ffmpeg.av_gettime();
            var      realdts   = 0;
            var      caldts    = 0;
            AVPacket pkt;

            while (true)
            {
                if ((res = ffmpeg.av_read_frame(ictx, &pkt)) != 0)
                {
                    break;
                }
                if (pkt.size <= 0)//读取rtsp时pkt.size可能会等于0
                {
                    continue;
                }
                //转换pts、dts、duration
                //pkt.pts = (long)(pkt.pts * ffmpeg.av_q2d(ictx->streams[pkt.stream_index]->time_base) / ffmpeg.av_q2d(octx->streams[pkt.stream_index]->time_base));
                //pkt.dts = (long)(pkt.dts * ffmpeg.av_q2d(ictx->streams[pkt.stream_index]->time_base) / ffmpeg.av_q2d(octx->streams[pkt.stream_index]->time_base));
                //pkt.duration = (long)(pkt.duration * ffmpeg.av_q2d(ictx->streams[pkt.stream_index]->time_base) / ffmpeg.av_q2d(octx->streams[pkt.stream_index]->time_base));
                //pkt.pos = -1;//byte position in stream, -1 if unknown

                //文件推流计算延时
                //av_usleep(30 * 1000);

                /*realdts = av_gettime() - begintime;
                 * caldts = 1000 * 1000 * pkt.pts * av_q2d(octx->streams[pkt.stream_index]->time_base);
                 * if (caldts > realdts)
                 *  av_usleep(caldts - realdts);*/

                if ((res = ffmpeg.av_interleaved_write_frame(octx, &pkt)) < 0)//推流,推完之后pkt的pts,dts竟然都被重置了!而且前面几帧还因为dts没有增长而返回-22错误
                {
                    new Exception();
                }

                ffmpeg.av_packet_unref(&pkt); //回收pkt内部分配的内存
            }
            ffmpeg.av_write_trailer(octx);    //写文件尾
        }
コード例 #13
0
        public void changecontainerset()
        {
            AVFormatContext *input_format_context  = null;
            AVFormatContext *output_format_context = null;

            FFmpegBinariesHelper.RegisterFFmpegBinaries();
            ffmpeg.avdevice_register_all();
            input_format_context = ffmpeg.avformat_alloc_context();

            AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow");
            string         device  = "video=USB3. 0 capture:audio=디지털 오디오 인터페이스(5- USB3. 0 capture)";


            var a = ffmpeg.avformat_open_input(&input_format_context, device, iformat, null); //음수이면 파일 안열려..그런 장치 없어!!
            var b = ffmpeg.avformat_find_stream_info(input_format_context, null);             //Stream을 찾을수 없어...

            var fileName = @"C:\Users\admin\Desktop\changeContainer.avi";

            ffmpeg.avformat_alloc_output_context2(&output_format_context, null, null, fileName);
            var number_of_streams = input_format_context->nb_streams;

            var streams_list = new int[2];
            int stream_index = 0;


            for (int i = 0; i < input_format_context->nb_streams; i++)
            {
                AVStream *         out_stream;
                AVStream *         in_stream   = input_format_context->streams[i];
                AVCodecParameters *in_codecpar = in_stream->codecpar;
                Console.WriteLine(in_codecpar->codec_id);

                if (in_codecpar->codec_type != AVMediaType.AVMEDIA_TYPE_VIDEO &&
                    in_codecpar->codec_type != AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    streams_list[i] = -1;
                    continue;
                }
                streams_list[i] = stream_index++;

                out_stream = ffmpeg.avformat_new_stream(output_format_context, null);

                var ret = ffmpeg.avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
            }

            if (ffmpeg.avio_open(&output_format_context->pb, fileName, ffmpeg.AVIO_FLAG_WRITE) < 0)
            {
                Console.WriteLine("Failed to open output file! \n");
            }

            ffmpeg.avformat_write_header(output_format_context, null);

            output_format_context->streams[0]->time_base = new AVRational {
                num = 1, den = 30
            };
            output_format_context->streams[0]->codec->time_base = new AVRational {
                num = 1, den = 30
            };
            output_format_context->streams[0]->codec->framerate = new AVRational {
                num = 30, den = 1
            };
            int index = 1;

            while (index < 1000)
            {
                AVStream *in_stream;
                AVStream *out_stream;
                AVPacket  packet;
                var       ret = ffmpeg.av_read_frame(input_format_context, &packet);
                if (ret < 0)
                {
                    break;
                }
                in_stream = input_format_context->streams[packet.stream_index];

                if (packet.stream_index == 0)
                {
                    in_stream->codec->time_base = new AVRational {
                        num = 1, den = 30
                    };
                    in_stream->codec->framerate = new AVRational {
                        num = 30, den = 1
                    };
                    in_stream->r_frame_rate = new AVRational {
                        num = 30, den = 1
                    };
                    output_format_context->streams[0]->r_frame_rate = new AVRational {
                        num = 30, den = 1
                    };
                }
                if (packet.stream_index >= number_of_streams || streams_list[packet.stream_index] < 0)
                {
                    ffmpeg.av_packet_unref(&packet);
                    continue;
                }
                packet.stream_index = streams_list[packet.stream_index];

                out_stream = output_format_context->streams[packet.stream_index];

                Console.WriteLine(output_format_context->streams[0]->time_base.num + "/ " + output_format_context->streams[0]->time_base.den);
                if (packet.stream_index == 0)
                {
                    packet.pts = index;
                    packet.dts = index;
                }
                else
                {
                }

                packet.pts = ffmpeg.av_rescale_q_rnd(packet.pts, output_format_context->streams[packet.stream_index]->codec->time_base, output_format_context->streams[packet.stream_index]->time_base, AVRounding.AV_ROUND_INF | AVRounding.AV_ROUND_PASS_MINMAX);
                packet.dts = ffmpeg.av_rescale_q_rnd(packet.dts, output_format_context->streams[packet.stream_index]->codec->time_base, output_format_context->streams[packet.stream_index]->time_base, AVRounding.AV_ROUND_INF | AVRounding.AV_ROUND_PASS_MINMAX);


                Console.WriteLine(output_format_context->streams[packet.stream_index]->codec->time_base.den);
                ///* copy packet */

                Console.WriteLine($"Packet {packet.pts} / {packet.dts} ");

                //Console.WriteLine($"Packet {packet.pts} / {packet.dts} ");
                //Console.WriteLine($"Packet {packet.pts} / {packet.dts} ");
                index++;


                var ret1 = ffmpeg.av_interleaved_write_frame(output_format_context, &packet);
                if (ret < 0)
                {
                    Console.WriteLine("write error");
                }
                //av_packet_unref(&packet);
            }
            ffmpeg.av_write_trailer(output_format_context);
        }