示例#1
0
        public static void YUV2RGB(AVFrame f, int[] rgb)
        {
            var luma = f.data_base[0];
            var cb = f.data_base[1];
            var cr = f.data_base[2];
            int stride = f.linesize[0];
            int strideChroma = f.linesize[1];

            fixed (int* rgbPtr = rgb)
            {
                for (int y = 0; y < f.imageHeight; y++)
                {
                    int lineOffLuma = y * stride;
                    int lineOffChroma = (y >> 1) * strideChroma;

                    for (int x = 0; x < f.imageWidth; x++)
                    {
                        int c = luma[lineOffLuma + x] - 16;
                        int d = cb[lineOffChroma + (x >> 1)] - 128;
                        int e = cr[lineOffChroma + (x >> 1)] - 128;

                        var c298 = 298 * c;

                        byte red = (byte)MathUtils.Clamp((c298 + 409 * e + 128) >> 8, 0, 255);
                        byte green = (byte)MathUtils.Clamp((c298 - 100 * d - 208 * e + 128) >> 8, 0, 255);
                        byte blue = (byte)MathUtils.Clamp((c298 + 516 * d + 128) >> 8, 0, 255);
                        byte alpha = 255;

                        rgbPtr[lineOffLuma + x] = (alpha << 24) | (red << 16) | (green << 8) | (blue << 0);
                    }
                }
            }
        }
示例#2
0
        /*
         #define MB_TYPE_INTRA MB_TYPE_INTRA4x4 //default mb_type if there is just one type
         #define IS_INTRA4x4(a)   ((a)&MB_TYPE_INTRA4x4)
         #define IS_INTRA16x16(a) ((a)&MB_TYPE_INTRA16x16)
         #define IS_PCM(a)        ((a)&MB_TYPE_INTRA_PCM)
         #define IS_INTRA(a)      ((a)&7)
         #define IS_INTER(a)      ((a)&(MB_TYPE_16x16|MB_TYPE_16x8|MB_TYPE_8x16|MB_TYPE_8x8))
         #define IS_SKIP(a)       ((a)&MB_TYPE_SKIP)
         #define IS_INTRA_PCM(a)  ((a)&MB_TYPE_INTRA_PCM)
         #define IS_INTERLACED(a) ((a)&MB_TYPE_INTERLACED)
         #define IS_DIRECT(a)     ((a)&MB_TYPE_DIRECT2)
         #define IS_GMC(a)        ((a)&MB_TYPE_GMC)
         #define IS_16X16(a)      ((a)&MB_TYPE_16x16)
         #define IS_16X8(a)       ((a)&MB_TYPE_16x8)
         #define IS_8X16(a)       ((a)&MB_TYPE_8x16)
         #define IS_8X8(a)        ((a)&MB_TYPE_8x8)
         #define IS_SUB_8X8(a)    ((a)&MB_TYPE_16x16) //note reused
         #define IS_SUB_8X4(a)    ((a)&MB_TYPE_16x8)  //note reused
         #define IS_SUB_4X8(a)    ((a)&MB_TYPE_8x16)  //note reused
         #define IS_SUB_4X4(a)    ((a)&MB_TYPE_8x8)   //note reused
         #define IS_ACPRED(a)     ((a)&MB_TYPE_ACPRED)
         #define IS_QUANT(a)      ((a)&MB_TYPE_QUANT)
         #define IS_DIR(a, part, list) ((a) & (MB_TYPE_P0L0<<((part)+2*(list))))
         #define USES_LIST(a, list) ((a) & ((MB_TYPE_P0L0|MB_TYPE_P1L0)<<(2*(list)))) ///< does this mb use listX, note does not work if subMBs
         #define HAS_CBP(a)        ((a)&MB_TYPE_CBP)
         */

        public static AVFrame avcodec_alloc_frame()
        {
            AVFrame ret = new AVFrame();

            ret.pts       = Constants.AV_NOPTS_VALUE;
            ret.key_frame = 1;
            return(ret);
        }
示例#3
0
 public static Bitmap imageFromFrameWithoutEdges(AVFrame f, int Width, int Height)
 {
     var XEdge = (f.imageWidth - f.imageWidthWOEdge) / 2;
     var YEdge = (f.imageHeight - f.imageHeightWOEdge) / 2;
     var Out = new Bitmap(Math.Min(Width, f.imageWidthWOEdge), Math.Min(Height, f.imageHeightWOEdge));
     var In = imageFromFrame(f);
     Graphics.FromImage(Out).DrawImage(In, new Point(-XEdge, -YEdge));
     return Out;
 }
示例#4
0
        public static void pic_as_field(AVFrame pic, int parity)
        {
            int i;

            for (i = 0; i < 4; ++i)
            {
                if (parity == Constants.PICT_BOTTOM_FIELD)
                {
                    pic.data_offset[i] += pic.linesize[i];
                }
                pic.reference    = parity;
                pic.linesize[i] *= 2;
            }
            pic.poc = pic.field_poc[(parity == Constants.PICT_BOTTOM_FIELD) ? 1
                                        : 0];
        }
示例#5
0
        public static int split_field_copy(AVFrame dest, AVFrame src,
                                           int parity, int id_add)
        {
            int match = ((src.reference & parity) != 0 ? 1 : 0);

            if (match != 0)
            {
                //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                src.copyTo(dest);
                if (parity != Constants.PICT_FRAME)
                {
                    pic_as_field(dest, parity);
                    dest.pic_id *= 2;
                    dest.pic_id += id_add;
                }
            }
            return(match);
        }
示例#6
0
        public static Bitmap imageFromFrame(AVFrame f)
        {
            Bitmap bi = new Bitmap(f.imageWidth, f.imageHeight, PixelFormat.Format32bppArgb);
            int[] rgb = new int[f.imageWidth * f.imageHeight];

            YUV2RGB(f, rgb);

            var BitmapData = bi.LockBits(new System.Drawing.Rectangle(0, 0, bi.Width, bi.Height), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
            var Ptr = (int*)BitmapData.Scan0.ToPointer();
            for (int j = 0; j < f.imageHeight; j++)
            {
                int off = j * f.imageWidth;
                for (int i = 0; i < f.imageWidth; i++)
                {
                    Ptr[j * f.imageWidth + i] = rgb[off + i];
                }
            }
            bi.UnlockBits(BitmapData);

            return bi;
        }
示例#7
0
        public AVFrame DecodeFrame()
        {
            while (hasMoreNAL)
            {
                //Console.WriteLine(avpkt.size);

                _ReadPacket();

                while (avpkt.size > 0)
                {
                    len = c.avcodec_decode_video2(picture, got_picture, avpkt);
                    //Console.WriteLine(FrameCrc.GetFrameLine(avpkt));
                    if (len < 0)
                    {
                        //Console.WriteLine("Error while decoding frame " + frame);
                        // Discard current packet and proceed to next packet
                        break;
                    }

                    if (got_picture[0] != 0)
                    {
                        picture = c.priv_data.displayPicture;

                        int bufferSize = picture.imageWidth * picture.imageHeight;
                        if (buffer == null || bufferSize != buffer.Length)
                        {
                            buffer = new int[bufferSize];
                        }
                    }
                    avpkt.size -= len;
                    avpkt.data_offset += len;
                    frame++;
                    if (got_picture[0] != 0)
                    {
                        return picture;
                    }
                }
            } // while
            throw (new EndOfStreamException());
        }
示例#8
0
        /**
         * Allocate a frame buffer
         */
        public int alloc_frame_buffer(AVFrame pic)
        {
            int r;

            /*?????????????????????????/
            if (this.avctx.hwaccel) {
                //assert(!pic.hwaccel_picture_private);
                if (this.avctx.hwaccel.priv_data_size) {
                    pic.hwaccel_picture_private = av_mallocz(this.avctx.hwaccel.priv_data_size);
                    if (!pic.hwaccel_picture_private) {
                        av_log(this.avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
                        return -1;
                    }
                }
            }
            */

            // TODO: get_buffer seems to capture only first component?
            r = this.get_buffer((AVFrame)pic);

            if (r < 0 || 0 == pic.age || 0 == pic.type || null == pic.data_base[0])
            {
                //av_log(this.avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %d %p)\n", r, pic.age, pic.type, pic.data[0]);
                //av_freep(&pic.hwaccel_picture_private);
                return -1;
            }

            if (this.linesize != 0 && (this.linesize != pic.linesize[0] || this.uvlinesize != pic.linesize[1]))
            {
                //av_log(this.avctx, AV_LOG_ERROR, "get_buffer() failed (stride changed)\n");
                // DebugTool.printDebugString("     ----- alloc_frame_buffer error case 0\n");
                free_frame_buffer(pic);
                return -1;
            }

            if (pic.linesize[1] != pic.linesize[2])
            {
                //av_log(this.avctx, AV_LOG_ERROR, "get_buffer() failed (uv stride mismatch)\n");
                // DebugTool.printDebugString("     ----- alloc_frame_buffer error case 1\n");
                free_frame_buffer(pic);
                return -1;
            }

            // DebugTool.printDebugString("     ----- alloc_frame_buffer OK.\n");
            return 0;
        }
示例#9
0
 public static void ff_copy_picture(AVFrame dst, AVFrame src)
 {
     //////////////!!!!!!!!!!!!!!!
     src.copyTo(dst);
     dst.type = FF_BUFFER_TYPE_COPY;
     //return dst;
 }
示例#10
0
        public void release_buffer(AVFrame pic)
        {
            int i;
            InternalBuffer buf, last;

            ////assert(pic->type==FF_BUFFER_TYPE_INTERNAL);
            ////assert(s->internal_buffer_count);

            buf = null; /* avoids warning */
            for (i = 0; i < this.internal_buffer_count; i++)
            { //just 3-5 checks so is not worth to optimize
                buf = this.internal_buffer[i];
                if (buf.@base[0] == pic.data_base[0] && buf.data_offset[0] == pic.data_offset[0])
                    break;
            }
            ////assert(i < s->internal_buffer_count);
            this.internal_buffer_count--;
            last = this.internal_buffer[this.internal_buffer_count];

            //FFSWAP(InternalBuffer, *buf, *last);
            InternalBuffer tmp = new InternalBuffer();
            buf.copyInto(tmp);
            last.copyInto(buf);
            tmp.copyInto(last);

            for (i = 0; i < 4; i++)
            {
                pic.data_base[i] = null;
                pic.data_offset[i] = 0;
                //	        pic->base[i]=NULL;
            }
            //printf("R%X\n", pic->opaque);

            //if(s->debug&FF_DEBUG_BUFFERS)
            //    av_log(s, AV_LOG_DEBUG, "default_release_buffer called on pic %p, %d buffers used\n", pic, s->internal_buffer_count);
        }
示例#11
0
        /**
         * generic function for encode/decode called after coding/decoding the header and before a frame is coded/decoded
         */
        public int MPV_frame_start()
        {
            int i;
            AVFrame pic;
            this.mb_skipped = 0;

            ////assert(this.last_picture_ptr==NULL || this.out_format != FMT_H264 || this.codec_id == CODEC_ID_SVQ3);

            /* mark&release old frames */
            // if (this.pict_type != FF_B_TYPE && this.last_picture_ptr && this.last_picture_ptr != this.next_picture_ptr && this.last_picture_ptr.data[0]) {
            /* Support H264
            if(this.out_format != FMT_H264 || this.codec_id == CODEC_ID_SVQ3){
                free_frame_buffer(s, this.last_picture_ptr);

              // release forgotten pictures
              // if(mpeg124/h263)
              if(!this.encoding){
                  for(i=0; i<MAX_PICTURE_COUNT; i++){
                      if(this.picture[i].data[0] && &this.picture[i] != this.next_picture_ptr && this.picture[i].reference){
                          av_log(avctx, AV_LOG_ERROR, "releasing zombie picture\n");
                          free_frame_buffer(s, &this.picture[i]);
                      }
                  }
              }
            }
            */
            //}

            if (true)
            {
                /* release non reference frames */
                for (i = 0; i < MAX_PICTURE_COUNT; i++)
                {
                    if (this.picture[i].data_base[0] != null && 0 == this.picture[i].reference && this.picture[i].type != FF_BUFFER_TYPE_SHARED)
                    {
                        // DebugTool.printDebugString("****free_frame_buffer[picture:"+i+"].\n");
                        free_frame_buffer(this.picture[i]);
                    }
                }

                if (this.current_picture_ptr != null && this.current_picture_ptr.data_base[0] == null)
                {
                    // DebugTool.printDebugString("****reuse cuurent_picture_ptr.\n");
                    pic = this.current_picture_ptr; //we already have a unused image (maybe it was set before reading the header)
                }
                else
                {
                    i = ff_find_unused_picture(0);
                    // DebugTool.printDebugString("****reuse picture:"+i+".\n");
                    pic = this.picture[i];
                }

                pic.reference = 0;
                if (0 == this.dropable)
                {
                    if (this.codec_id == H264PredictionContext.CODEC_ID_H264)
                        pic.reference = this.picture_structure;
                    else if (this.pict_type != H264Context.FF_B_TYPE)
                        pic.reference = 3;
                }

                pic.coded_picture_number = this.coded_picture_number++;

                if (this.ff_alloc_picture(pic, 0) < 0)
                {
                    // DebugTool.printDebugString("     ----- MPV_frame_start error case 0\n");
                    return -1;
                }

                this.current_picture_ptr = pic;
                //FIXME use only the vars from current_pic
                this.current_picture_ptr.top_field_first = this.top_field_first;
                //???????????????????????????????
                /* Only Support H264
                if(this.codec_id == H264PredictionContext.CODEC_ID_MPEG1VIDEO || this.codec_id == H264PredictionContext.CODEC_ID_MPEG2VIDEO) {
                    if(this.picture_structure != PICT_FRAME)
                        this.current_picture_ptr.top_field_first= (this.picture_structure == PICT_TOP_FIELD) == this.first_field;
                }
                */
                this.current_picture_ptr.interlaced_frame = (0 == this.progressive_frame && 0 == this.progressive_sequence) ? 1 : 0;
            }

            this.current_picture_ptr.pict_type = this.pict_type;
            //        if(this.flags && CODEC_FLAG_QSCALE)
            //      this.current_picture_ptr.quality= this.new_picture_ptr.quality;
            this.current_picture_ptr.key_frame = (this.pict_type == H264Context.FF_I_TYPE) ? 1 : 0;

            ff_copy_picture(this.current_picture, this.current_picture_ptr);

            if (this.pict_type != H264Context.FF_B_TYPE)
            {
                this.last_picture_ptr = this.next_picture_ptr;
                if (0 == this.dropable)
                    this.next_picture_ptr = this.current_picture_ptr;
            }
            /*    av_log(this.avctx, AV_LOG_DEBUG, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n", this.last_picture_ptr, this.next_picture_ptr,this.current_picture_ptr,
                    this.last_picture_ptr    ? this.last_picture_ptr.data[0] : NULL,
                    this.next_picture_ptr    ? this.next_picture_ptr.data[0] : NULL,
                    this.current_picture_ptr ? this.current_picture_ptr.data[0] : NULL,
                    this.pict_type, this.dropable);*/

            /* Only suport H264
            if(this.codec_id != CODEC_ID_H264){
                if((this.last_picture_ptr==NULL || this.last_picture_ptr.data[0]==NULL) && this.pict_type!=FF_I_TYPE){
                    //av_log(avctx, AV_LOG_ERROR, "warning: first frame is no keyframe\n");
                    // Allocate a dummy frame
                    i= ff_find_unused_picture(s, 0);
                    this.last_picture_ptr= &this.picture[i];
                    if(ff_alloc_picture(s, this.last_picture_ptr, 0) < 0)
                        return -1;
                }
                if((this.next_picture_ptr==NULL || this.next_picture_ptr.data[0]==NULL) && this.pict_type==FF_B_TYPE){
                    // Allocate a dummy frame
                    i= ff_find_unused_picture(s, 0);
                    this.next_picture_ptr= &this.picture[i];
                    if(ff_alloc_picture(s, this.next_picture_ptr, 0) < 0)
                        return -1;
                }
            }
            */

            if (this.last_picture_ptr != null) ff_copy_picture(this.last_picture, this.last_picture_ptr);
            if (this.next_picture_ptr != null) ff_copy_picture(this.next_picture, this.next_picture_ptr);

            ////assert(this.pict_type == FF_I_TYPE || (this.last_picture_ptr && this.last_picture_ptr.data[0]));
            //??????????????????????
            /* Only support H264
            if(this.picture_structure!=PICT_FRAME && this.out_format != FMT_H264){
                int i;
                for(i=0; i<4; i++){
                    if(this.picture_structure == PICT_BOTTOM_FIELD){
                         this.current_picture.data_offset[i] += this.current_picture.linesize[i];
                    }
                    this.current_picture.linesize[i] *= 2;
                    this.last_picture.linesize[i] *=2;
                    this.next_picture.linesize[i] *=2;
                }
            }
            */

            //??????????????????????/
            //this.hurry_up= this.avctx.hurry_up;
            this.error_recognition = 1;

            /* set dequantizer, we can't do it during init as it might change for mpeg4
               and we can't do it in the header decode as init is not called for mpeg4 there yet */
            //????????????????????????
            /*
            if(this.mpeg_quant!=0 || this.codec_id == CODEC_ID_MPEG2VIDEO){
                this.dct_unquantize_intra = this.dct_unquantize_mpeg2_intra;
                this.dct_unquantize_inter = this.dct_unquantize_mpeg2_inter;
            }else if(this.out_format == FMT_H263 || this.out_format == FMT_H261){
                this.dct_unquantize_intra = this.dct_unquantize_h263_intra;
                this.dct_unquantize_inter = this.dct_unquantize_h263_inter;
            }else{
                this.dct_unquantize_intra = this.dct_unquantize_mpeg1_intra;
                this.dct_unquantize_inter = this.dct_unquantize_mpeg1_inter;
            }
            */

            /*??????????????
             * No way in encoding
            if(this.dct_error_sum!=0){
                ////assert(this.avctx.noise_reduction && this.encoding);

                this.update_noise_reduction();
            }
            */

            //if(CONFIG_MPEG_XVMC_DECODER && this.avctx.xvmc_acceleration)
            //    return ff_xvmc_field_start(s, avctx);
            // DebugTool.printDebugString("     ----- MPV_frame_start error OK.\n");
            return 0;
        }
示例#12
0
 public static Bitmap imageFromFrameWithoutEdges(AVFrame f)
 {
     return imageFromFrameWithoutEdges(f, f.imageWidthWOEdge, f.imageHeightWOEdge);
 }
示例#13
0
        ////////////////////////////////
        // Motion functions
        //public void mc_dir_part(Picture pic, int n, int square, int chroma_height, int delta, int list,
        //        uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr,
        //        int src_x_offset, int src_y_offset,
        //        qpel_mc_func *qpix_op, h264_chroma_mc_func chroma_op){
        public void mc_dir_part(AVFrame pic, int n, int square, int chroma_height, int delta, int list,
            byte[] dest_y_base, int dest_y_offset,
            byte[] dest_cb_base, int dest_cb_offset,
            byte[] dest_cr_base, int dest_cr_offset,
            int src_x_offset, int src_y_offset,
            DSPContext.Ih264_qpel_mc_func[] qpix_op, DSPContext.Ih264_chroma_mc_func chroma_op)
        {
            int mx = this.mv_cache[list][scan8[n]][0] + src_x_offset * 8;
            int my = this.mv_cache[list][scan8[n]][1] + src_y_offset * 8;
            int luma_xy = (mx & 3) + ((my & 3) << 2);

            //uint8_t * src_y = pic.data[0] + (mx>>2) + (my>>2)*this.mb_linesize;
            //uint8_t * src_cb, * src_cr;
            byte[] src_y_base = pic.data_base[0];
            int _src_y_offset = pic.data_offset[0] + (mx >> 2) + (my >> 2) * this.mb_linesize;
            byte[] src_cb_base;
            int src_cb_offset;
            byte[] src_cr_base;
            int src_cr_offset;

            int extra_width = this.emu_edge_width;
            int extra_height = this.emu_edge_height;
            int emu = 0;
            int full_mx = mx >> 2;
            int full_my = my >> 2;
            int pic_width = 16 * s.mb_width;
            int pic_height = 16 * s.mb_height >> mb_field_decoding_flag;

            // DebugTool.printDebugString("***mc_dir_part: src_x_offset="+src_x_offset+", src_y_offset="+src_y_offset+", list="+list+", n="+n+", mv_cache[0]="+this.mv_cache[list][ scan8[n] ][0]+", mv_cache[1]="+this.mv_cache[list][ scan8[n] ][1]+"\n");
            // DebugTool.printDebugString("***mc_dir_part: mx="+mx+", my="+my+", luma_xy="+luma_xy+", _src_y_offset="+((mx>>2) + (my>>2)*this.mb_linesize)+"\n");

            if ((mx & 7) != 0) extra_width -= 3;
            if ((my & 7) != 0) extra_height -= 3;

            if (full_mx < 0 - extra_width
            || full_my < 0 - extra_height
            || full_mx + 16/*FIXME*/ > pic_width + extra_width
            || full_my + 16/*FIXME*/ > pic_height + extra_height)
            {

                // DebugTool.printDebugString("***mc_dir_part: case 1\n");

                s.dsp.ff_emulated_edge_mc(s.allocated_edge_emu_buffer, s.edge_emu_buffer_offset
                        , src_y_base, _src_y_offset - 2 - 2 * this.mb_linesize
                        , this.mb_linesize, 16 + 5, 16 + 5/*FIXME*/
                        , full_mx - 2, full_my - 2
                        , pic_width, pic_height);

                src_y_base = s.allocated_edge_emu_buffer;
                _src_y_offset = s.edge_emu_buffer_offset + 2 + 2 * this.mb_linesize;
                emu = 1;
            }

            qpix_op[luma_xy](dest_y_base, dest_y_offset, src_y_base, _src_y_offset, this.mb_linesize); //FIXME try variable height perhaps?

            if (0 == square)
            {
                // DebugTool.printDebugString("***mc_dir_part: case 2\n");

                qpix_op[luma_xy](dest_y_base, dest_y_offset + delta, src_y_base, _src_y_offset + delta, this.mb_linesize);
            }

            //if(MpegEncContext.CONFIG_GRAY !=0 && (s.flags&MpegEncContext.CODEC_FLAG_GRAY)!=0) return;

            if (mb_field_decoding_flag != 0)
            {
                // DebugTool.printDebugString("***mc_dir_part: case 3\n");

                // chroma offset when predicting from a field of opposite parity
                my += 2 * ((s.mb_y & 1) - (pic.reference - 1));
                emu |= (((my >> 3) < 0 || (my >> 3) + 8 >= (pic_height >> 1)) ? 1 : 0);
            }

            src_cb_base = pic.data_base[1];
            src_cb_offset = pic.data_offset[1] + (mx >> 3) + (my >> 3) * this.mb_uvlinesize;
            src_cr_base = pic.data_base[2];
            src_cr_offset = pic.data_offset[2] + (mx >> 3) + (my >> 3) * this.mb_uvlinesize;

            if (emu != 0)
            {
                // DebugTool.printDebugString("***mc_dir_part: case 4\n");

                s.dsp.ff_emulated_edge_mc(s.allocated_edge_emu_buffer, s.edge_emu_buffer_offset, src_cb_base, src_cb_offset, this.mb_uvlinesize, 9, 9/*FIXME*/, (mx >> 3), (my >> 3), pic_width >> 1, pic_height >> 1);
                src_cb_base = s.allocated_edge_emu_buffer;
                src_cb_offset = s.edge_emu_buffer_offset;
            }
            chroma_op(dest_cb_base, dest_cb_offset, src_cb_base, src_cb_offset, this.mb_uvlinesize, chroma_height, mx & 7, my & 7);

            if (emu != 0)
            {
                // DebugTool.printDebugString("***mc_dir_part: case 5\n");

                s.dsp.ff_emulated_edge_mc(s.allocated_edge_emu_buffer, s.edge_emu_buffer_offset, src_cr_base, src_cr_offset, this.mb_uvlinesize, 9, 9/*FIXME*/, (mx >> 3), (my >> 3), pic_width >> 1, pic_height >> 1);
                src_cr_base = s.allocated_edge_emu_buffer;
                src_cr_offset = s.edge_emu_buffer_offset;
            }
            chroma_op(dest_cr_base, dest_cr_offset, src_cr_base, src_cr_offset, this.mb_uvlinesize, chroma_height, mx & 7, my & 7);
        }
示例#14
0
        public int decode_frame(AVFrame data, int[] data_size, AVPacket avpkt)
        {
            /* const uint8_t * */
            byte[] buf_base = avpkt.data_base;
            int buf_offset = avpkt.data_offset;
            int buf_size = avpkt.size;
            //AVFrame pict = data;
            int buf_index;

            // ???????????????????????????????????????
            // s.flags= avctx.flags;
            // s.flags2= avctx.flags2;

            /* end of stream, output what is still in the buffers */
            bool loop = true;
            // out:
            do
            {
                loop = false;

                if (buf_size == 0)
                {
                    AVFrame @out;
                    int i, out_idx;

                    // FIXME factorize this with the output code below
                    @out = this.delayed_pic[0];
                    out_idx = 0;
                    for (i = 1; this.delayed_pic[i] != null
                            && 0 == this.delayed_pic[i].key_frame
                            && 0 == this.delayed_pic[i].mmco_reset; i++)
                        if (this.delayed_pic[i].poc < @out.poc)
                        {
                            @out = this.delayed_pic[i];
                            out_idx = i;
                        }

                    for (i = out_idx; this.delayed_pic[i] != null; i++)
                        this.delayed_pic[i] = this.delayed_pic[i + 1];

                    if (@out != null)
                    {
                        data_size[0] = 1;
                        @out.copyTo(displayPicture);

                        // DebugTool.dumpFrameData(displayPicture);
                    }

                    return 0;
                }

                buf_index = this.decode_nal_units(buf_base, buf_offset, buf_size);
                if (buf_index < 0)
                {
                    return -8;
                }

                if (null == s.current_picture_ptr
                        && this.nal_unit_type == NAL_END_SEQUENCE)
                {
                    buf_size = 0;
                    loop = true;
                }

            } while (loop);

            if (0 == (s.flags2 & MpegEncContext.CODEC_FLAG2_CHUNKS) && null == s.current_picture_ptr)
            {
                if (s.skip_frame >= MpegEncContext.AVDISCARD_NONREF || s.hurry_up != 0) return 0;
                // av_log(avctx, AV_LOG_ERROR, "no frame!\n");
                //Console.WriteLine("!!!! NO FRAME !!!!");
                //return s.get_consumed_bytes(buf_index, buf_size); //-1;
                return -9;
            }

            if (0 == (s.flags2 & MpegEncContext.CODEC_FLAG2_CHUNKS)
                    || (s.mb_y >= s.mb_height && s.mb_height != 0))
            {
                AVFrame @out = s.current_picture_ptr;
                AVFrame cur = s.current_picture_ptr;
                int i, pics, out_of_order, out_idx;

                this.field_end();

                if (cur.field_poc[0] == int.MaxValue
                        || cur.field_poc[1] == int.MaxValue)
                {
                    /* Wait for second field. */
                    data_size[0] = 0;

                }
                else
                {
                    cur.interlaced_frame = 0;
                    cur.repeat_pict = 0;

                    /* Signal interlacing information externally. */
                    /*
                     * Prioritize picture timing SEI information over used decoding
                     * process if it exists.
                     */

                    if (this.sps.pic_struct_present_flag != 0)
                    {
                        switch (this.sei_pic_struct)
                        {
                            case SEI_PIC_STRUCT_FRAME:
                                break;
                            case SEI_PIC_STRUCT_TOP_FIELD:
                            case SEI_PIC_STRUCT_BOTTOM_FIELD:
                                cur.interlaced_frame = 1;
                                break;
                            case SEI_PIC_STRUCT_TOP_BOTTOM:
                            case SEI_PIC_STRUCT_BOTTOM_TOP:
                                if ((mb_aff_frame != 0 || (s.picture_structure != Constants.PICT_FRAME)))
                                    cur.interlaced_frame = 1;
                                else
                                    // try to flag soft telecine progressive
                                    cur.interlaced_frame = this.prev_interlaced_frame;
                                break;
                            case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
                            case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
                                // Signal the possibility of telecined film externally
                                // (pic_struct 5,6)
                                // From these hints, let the applications decide if they
                                // apply deinterlacing.
                                cur.repeat_pict = 1;
                                break;
                            case SEI_PIC_STRUCT_FRAME_DOUBLING:
                                // Force progressive here, as doubling interlaced frame
                                // is a bad idea.
                                cur.repeat_pict = 2;
                                break;
                            case SEI_PIC_STRUCT_FRAME_TRIPLING:
                                cur.repeat_pict = 4;
                                break;
                        }

                        if ((this.sei_ct_type & 3) != 0
                                && this.sei_pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
                            cur.interlaced_frame = ((this.sei_ct_type & (1 << 1)) != 0) ? 1
                                    : 0;
                    }
                    else
                    {
                        /* Derive interlacing flag from used decoding process. */
                        cur.interlaced_frame = (mb_aff_frame != 0 || (s.picture_structure != Constants.PICT_FRAME)) ? 1
                                : 0;
                    }
                    this.prev_interlaced_frame = cur.interlaced_frame;

                    if (cur.field_poc[0] != cur.field_poc[1])
                    {
                        /* Derive top_field_first from field pocs. */
                        cur.top_field_first = (cur.field_poc[0] < cur.field_poc[1]) ? 1
                                : 0;
                    }
                    else
                    {
                        if (cur.interlaced_frame != 0
                                || this.sps.pic_struct_present_flag != 0)
                        {
                            /*
                             * Use picture timing SEI information. Even if it is a
                             * information of a past frame, better than nothing.
                             */
                            if (this.sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM
                                    || this.sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
                                cur.top_field_first = 1;
                            else
                                cur.top_field_first = 0;
                        }
                        else
                        {
                            /* Most likely progressive */
                            cur.top_field_first = 0;
                        }
                    }

                    // FIXME do something with unavailable reference frames

                    /* Sort B-frames into display order */

                    if (this.sps.bitstream_restriction_flag != 0
                            && s.has_b_frames < this.sps.num_reorder_frames)
                    {
                        s.has_b_frames = this.sps.num_reorder_frames;
                        s.low_delay = 0;
                    }

                    if (s.strict_std_compliance >= MpegEncContext.FF_COMPLIANCE_STRICT
                            && 0 == this.sps.bitstream_restriction_flag)
                    {
                        s.has_b_frames = MAX_DELAYED_PIC_COUNT;
                        s.low_delay = 0;
                    }

                    pics = 0;
                    while (this.delayed_pic[pics] != null)
                        pics++;

                    //assert(pics <= MAX_DELAYED_PIC_COUNT);

                    this.delayed_pic[pics++] = cur;
                    if (cur.reference == 0)
                        cur.reference = DELAYED_PIC_REF;

                    @out = this.delayed_pic[0];
                    out_idx = 0;
                    for (i = 1; this.delayed_pic[i] != null
                            && 0 == this.delayed_pic[i].key_frame
                            && 0 == this.delayed_pic[i].mmco_reset; i++)
                        if (this.delayed_pic[i].poc < @out.poc)
                        {
                            @out = this.delayed_pic[i];
                            out_idx = i;
                        }
                    if (s.has_b_frames == 0
                            && (this.delayed_pic[0].key_frame != 0 || this.delayed_pic[0].mmco_reset != 0))
                        this.outputed_poc = int.MinValue;
                    out_of_order = (@out.poc < this.outputed_poc) ? 1 : 0;

                    if (this.sps.bitstream_restriction_flag != 0
                            && s.has_b_frames >= this.sps.num_reorder_frames)
                    {
                    }
                    else if ((out_of_order != 0 && pics - 1 == s.has_b_frames && s.has_b_frames < MAX_DELAYED_PIC_COUNT)
                          || (s.low_delay != 0 && ((this.outputed_poc != int.MinValue && @out.poc > this.outputed_poc + 2) || cur.pict_type == FF_B_TYPE)))
                    {
                        s.low_delay = 0;
                        s.has_b_frames++;
                    }

                    if (0 != out_of_order || pics > s.has_b_frames)
                    {
                        @out.reference &= ~DELAYED_PIC_REF;
                        for (i = out_idx; this.delayed_pic[i] != null; i++)
                            this.delayed_pic[i] = this.delayed_pic[i + 1];
                    }
                    if (0 == out_of_order && pics > s.has_b_frames)
                    {
                        data_size[0] = 1;

                        if (out_idx == 0
                                && this.delayed_pic[0] != null
                                && (this.delayed_pic[0].key_frame != 0 || this.delayed_pic[0].mmco_reset != 0))
                        {
                            this.outputed_poc = int.MinValue;
                        }
                        else
                            this.outputed_poc = @out.poc;
                        @out.copyTo(displayPicture);

                        // DebugTool.dumpFrameData(displayPicture);
                    }
                    else
                    {
                        // av_log(avctx, AV_LOG_DEBUG, "no picture\n");
                    }
                }
            }

            // //assert(pict.data[0] || !*data_size);
            // ff_print_debug_info(s, pict);
            // printf("out %d\n", (int)pict.data[0]);

            return s.get_consumed_bytes(buf_index, buf_size);
        }
示例#15
0
文件: AVFrame.cs 项目: pansk/cscodec
 public static int split_field_copy(AVFrame dest, AVFrame src,
     int parity, int id_add)
 {
     int match = ((src.reference & parity) != 0 ? 1 : 0);
     if (match != 0) {
         //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
         src.copyTo(dest);
         if (parity != Constants.PICT_FRAME)
         {
             pic_as_field(dest, parity);
             dest.pic_id *= 2;
             dest.pic_id += id_add;
         }
     }
     return match;
 }
示例#16
0
文件: AVFrame.cs 项目: pansk/cscodec
        public static int build_def_list(AVFrame[] def_base, int def_offset, AVFrame[] in_base, int in_offset, int len, int is_long, int sel)
        {
            int[] i = new int[]{0,0};
                int index=0;

                while(i[0]<len || i[1]<len){
                    while(i[0]<len && !(in_base[in_offset +  i[0] ]!=null && (in_base[in_offset +  i[0] ].reference & sel)!=0))
                        i[0]++;
                    while(i[1]<len && !(in_base[in_offset +  i[1] ]!=null && (in_base[in_offset +  i[1] ].reference & (sel^3))!=0))
                        i[1]++;
                    if(i[0] < len){
                        in_base[in_offset +  i[0] ].pic_id= (is_long!=0 ? i[0] : in_base[in_offset +  i[0] ].frame_num);
                        split_field_copy(def_base[def_offset + index++], in_base[in_offset +  i[0]++ ], sel  , 1);
                    }
                    if(i[1] < len){
                        in_base[in_offset +  i[1] ].pic_id= (is_long!=0 ? i[1] : in_base[in_offset +  i[1] ].frame_num);
                        split_field_copy(def_base[def_offset + index++], in_base[in_offset +  i[1]++ ], sel^3, 0);
                    }
                }

                return index;
        }
示例#17
0
文件: AVFrame.cs 项目: pansk/cscodec
        public static int add_sorted(AVFrame[] sorted_base, int sorted_offset, AVFrame[] src_base, int src_offset, int len, int limit, int dir)
        {
            int i, best_poc;
                int out_i= 0;

                for(;;){
                    best_poc= (dir!=0 ? int.MinValue : int.MaxValue);

                    for(i=0; i<len; i++){
                        int poc= src_base[src_offset + i].poc;
                        if(((poc > limit) ^ (dir!=0)) && ((poc < best_poc) ^ (dir!=0))){
                            best_poc= poc;
                            sorted_base[sorted_offset + out_i]= src_base[src_offset + i];
                        }
                    }
                    if(best_poc == (dir!=0 ? int.MinValue : int.MaxValue))
                        break;
                    limit= sorted_base[sorted_offset + out_i++].poc - dir;
                }
                return out_i;
        }
示例#18
0
 public int avcodec_close()
 {
     if (this.codec != null)
         this.codec.close(this);
     avcodec_default_free_buffers();
     this.coded_frame = null;
     this.priv_data = null;
     this.codec = null;
     return 0;
 }
示例#19
0
 private void Close()
 {
     c.avcodec_close();
     c = null;
     picture = null;
 }
示例#20
0
        private void Init()
        {
            avpkt.av_init_packet();

            // Set end of buffer to 0 (this ensures that no overreading happens for damaged mpeg streams)
            Arrays.Fill(inbuf, INBUF_SIZE, MpegEncContext.FF_INPUT_BUFFER_PADDING_SIZE + INBUF_SIZE, (sbyte)0);

            // Find the mpeg1 video decoder
            codec = new H264Decoder();
            if (codec == null)
            {
                throw (new Exception("codec not found"));
            }

            c = MpegEncContext.avcodec_alloc_context();
            picture = AVFrame.avcodec_alloc_frame();

            // We do not send complete frames
            if ((codec.capabilities & H264Decoder.CODEC_CAP_TRUNCATED) != 0)
            {
                c.flags |= MpegEncContext.CODEC_FLAG_TRUNCATED;
            }

            // For some codecs, such as msmpeg4 and mpeg4, width and height
            // MUST be initialized there because this information is not
            // available in the bitstream.

            // Open it
            if (c.avcodec_open(codec) < 0)
            {
                throw (new Exception("could not open codec"));
            }

            // The codec gives us the frame size, in samples

            frame = 0;

            // avpkt must contain exactly 1 NAL Unit in order for decoder to decode correctly.
            // thus we must read until we get next NAL header before sending it to decoder.
            // Find 1st NAL
            cacheRead[0] = fin.ReadByte();
            cacheRead[1] = fin.ReadByte();
            cacheRead[2] = fin.ReadByte();

            while (!(cacheRead[0] == 0x00 && cacheRead[1] == 0x00 && cacheRead[2] == 0x01))
            {
                cacheRead[0] = cacheRead[1];
                cacheRead[1] = cacheRead[2];
                cacheRead[2] = fin.ReadByte();
                if (cacheRead[2] == -1) throw(new EndOfStreamException());
            } // while

            // 4 first bytes always indicate NAL header
            inbuf_int[0] = inbuf_int[1] = inbuf_int[2] = 0x00;
            inbuf_int[3] = 0x01;

            hasMoreNAL = true;
        }
示例#21
0
        public int b_frame_score;                   /* */

        public AVFrame copyTo(AVFrame ret)
        {
            /////////////???????????????????????????
            //To do: Implement this method!
            ret.age           = age;
            ret.b_frame_score = b_frame_score;
            for (int i = 0; i < @base.Length; i++)
            {
                ret.@base[i] = @base[i];
            }
            ret.buffer_hints         = buffer_hints;
            ret.coded_picture_number = coded_picture_number;
            for (int i = 0; i < data_base.Length; i++)
            {
                ret.data_base[i]   = data_base[i];
                ret.data_offset[i] = data_offset[i];
            }      // for
            ret.dct_coeff = dct_coeff;
            ret.display_picture_number = display_picture_number;
            Array.Copy(error, 0, ret.error, 0, error.Length);
            Array.Copy(field_poc, 0, ret.field_poc, 0, field_poc.Length);
            ret.frame_num         = frame_num;
            ret.imageWidth        = imageWidth;
            ret.imageHeight       = imageHeight;
            ret.imageWidthWOEdge  = imageWidthWOEdge;
            ret.imageHeightWOEdge = imageHeightWOEdge;
            ret.interlaced_frame  = interlaced_frame;
            Array.Copy(interpolated, 0, ret.interpolated, 0, interpolated.Length);
            ret.key_frame = key_frame;
            Array.Copy(linesize, 0, ret.linesize, 0, linesize.Length);
            ret.long_ref              = long_ref;
            ret.mb_cmp_score          = mb_cmp_score;
            ret.mb_mean               = mb_mean;
            ret.mb_type_base          = mb_type_base;
            ret.mb_type_offset        = mb_type_offset;
            ret.mb_var                = mb_var;
            ret.mb_var_sum            = mb_var_sum;
            ret.mbaff                 = mbaff;
            ret.mbskip_table          = mbskip_table;
            ret.mc_mb_var             = mc_mb_var;
            ret.mc_mb_var_sum         = mc_mb_var_sum;
            ret.mmco_reset            = mmco_reset;
            ret.motion_subsample_log2 = motion_subsample_log2;
            //??????????????? Can we copy it at this depth?
            Array.Copy(motion_val_base, 0, ret.motion_val_base, 0, motion_val_base.Length);
            Array.Copy(motion_val_offset, 0, ret.motion_val_offset, 0, motion_val_offset.Length);
            ret.opaque = opaque;
            ret.palette_has_changed = palette_has_changed;
            ret.pan_scan            = pan_scan;
            ret.pic_id       = pic_id;
            ret.pict_type    = pict_type;
            ret.pkt_dts      = pkt_dts;
            ret.pkt_pts      = pkt_pts;
            ret.poc          = poc;
            ret.pts          = pts;
            ret.qscale_table = qscale_table;
            ret.qscale_type  = qscale_type;
            ret.qstride      = qstride;
            ret.quality      = quality;
            ret.ref_count    = (int[][])ref_count.Clone();
            Array.Copy(ref_index, 0, ret.ref_index, 0, ref_index.Length);
            ret.ref_poc          = (int[][][])ref_poc.Clone();
            ret.reference        = reference;
            ret.reordered_opaque = reordered_opaque;
            ret.repeat_pict      = repeat_pict;
            ret.top_field_first  = top_field_first;
            ret.type             = type;
            return(ret);
        }
示例#22
0
 public int decode(MpegEncContext s, AVFrame outdata, int[] outdata_size, AVPacket avpkt)
 {
     return s.priv_data.decode_frame(outdata, outdata_size, avpkt);
 }
示例#23
0
        public int avcodec_decode_video2(AVFrame picture, int[] got_picture_ptr /* [0] = in/out param */, AVPacket avpkt)
        {
            int ret;

            got_picture_ptr[0] = 0;
            if ((this.coded_width != 0 || this.coded_height != 0) && av_image_check_size(this.coded_width, this.coded_height, 0, this) != 0)
            {
                return -7;
            }

            this.pkt = avpkt;

            if ((this.codec.capabilities & H264Decoder.CODEC_CAP_DELAY) != 0 || avpkt.size != 0)
            {
                ret = this.codec.decode(this, picture, got_picture_ptr, avpkt);

                // emms_c(); //needed to avoid an emms_c() call before every return;

                picture.pkt_dts = avpkt.dts;

                if (got_picture_ptr[0] != 0)
                {
                    this.frame_number++;
                }
            }
            else
            {
                ret = 0;
            }

            return ret;
        }
示例#24
0
        /**
         * allocates a Picture
         * The pixels are allocated/set by calling get_buffer() if shared=0
         */
        public int ff_alloc_picture(AVFrame pic, int shared)
        {
            int big_mb_num = this.mb_stride * (this.mb_height + 1) + 1; //the +1 is needed so memset(,,stride*height) does not sig11
            int mb_array_size = this.mb_stride * this.mb_height;
            //int b8_array_size= this.b8_stride*this.mb_height*2;
            int b4_array_size = this.b4_stride * this.mb_height * 4;
            int i;
            //int r= -1;

            if (shared != 0)
            {
                ////assert(pic.data[0]);
                ////assert(pic.type == 0 || pic.type == FF_BUFFER_TYPE_SHARED);
                pic.type = FF_BUFFER_TYPE_SHARED;
            }
            else
            {
                ////assert(!pic.data[0]);

                if (alloc_frame_buffer(pic) < 0)
                {
                    // DebugTool.printDebugString("     ----- ff_alloc_picture error case 0\n");
                    return -1;
                }

                this.linesize = pic.linesize[0];
                this.uvlinesize = pic.linesize[1];
            }

            if (pic.qscale_table == null)
            {
                /*
                if (this.encoding) {
                    FF_ALLOCZ_OR_GOTO(this.avctx, pic.mb_var   , mb_array_size * sizeof(int16_t)  , fail)
                    FF_ALLOCZ_OR_GOTO(this.avctx, pic.mc_mb_var, mb_array_size * sizeof(int16_t)  , fail)
                    FF_ALLOCZ_OR_GOTO(this.avctx, pic.mb_mean  , mb_array_size * sizeof(int8_t )  , fail)
                }
                */

                //FF_ALLOCZ_OR_GOTO(this.avctx, pic.mbskip_table , mb_array_size * sizeof(uint8_t)+2, fail) //the +2 is for the slice end check
                pic.mbskip_table = new int[mb_array_size + 2];

                //FF_ALLOCZ_OR_GOTO(this.avctx, pic.qscale_table , mb_array_size * sizeof(uint8_t)  , fail)
                pic.qscale_table = new int[mb_array_size];

                //FF_ALLOCZ_OR_GOTO(this.avctx, pic.mb_type_base , (big_mb_num + this.mb_stride) * sizeof(uint32_t), fail)
                pic.mb_type_base = new long[(big_mb_num + this.mb_stride)];

                pic.mb_type_offset = /*pic.mb_type_base + */2 * this.mb_stride + 1;
                if (this.out_format == FMT_H264)
                {
                    for (i = 0; i < 2; i++)
                    {
                        //FF_ALLOCZ_OR_GOTO(this.avctx, pic.motion_val_base[i], 2 * (b4_array_size+4)  * sizeof(int16_t), fail)
                        pic.motion_val_base[i] = Arrays.Create<int>(b4_array_size + 4, 2);

                        pic.motion_val_offset[i] = 4;

                        //FF_ALLOCZ_OR_GOTO(this.avctx, pic.ref_index[i], 4*mb_array_size * sizeof(uint8_t), fail)
                        pic.ref_index[i] = new int[4 * mb_array_size];
                    }
                    pic.motion_subsample_log2 = 2;
                }/*////??????????????
                  else if(this.out_format == FMT_H263 || this.encoding || (this.avctx.debug&FF_DEBUG_MV) || (this.avctx.debug_mv)){
                    for(i=0; i<2; i++){
                        FF_ALLOCZ_OR_GOTO(this.avctx, pic.motion_val_base[i], 2 * (b8_array_size+4) * sizeof(int16_t), fail)
                        pic.motion_val[i]= pic.motion_val_base[i]+4;
                        FF_ALLOCZ_OR_GOTO(this.avctx, pic.ref_index[i], 4*mb_array_size * sizeof(uint8_t), fail)
                    }
                    pic.motion_subsample_log2= 3;
                }
                */
                //???????????????????
                /* No DEBUG
                if(this.avctx.debug&FF_DEBUG_DCT_COEFF) {
                    FF_ALLOCZ_OR_GOTO(this.avctx, pic.dct_coeff, 64 * mb_array_size * sizeof(DCTELEM)*6, fail)
                }
                */
                pic.qstride = this.mb_stride;
                //FF_ALLOCZ_OR_GOTO(this.avctx, pic.pan_scan , 1 * sizeof(AVPanScan), fail)
                pic.pan_scan = new AVPanScan();
            }

            /* It might be nicer if the application would keep track of these
             * but it would require an API change. */
            //memmove(this.prev_pict_types+1, this.prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE-1);
            for (int k = 1; k < PREV_PICT_TYPES_BUFFER_SIZE; k++)
                this.prev_pict_types[k] = this.prev_pict_types[k - 1];

            this.prev_pict_types[0] = (this.dropable != 0) ? H264Context.FF_B_TYPE : this.pict_type;
            if (pic.age < PREV_PICT_TYPES_BUFFER_SIZE && this.prev_pict_types[pic.age] == H264Context.FF_B_TYPE)
                pic.age = int.MaxValue; // Skipped MBs in B-frames are quite rare in MPEG-1/2 and it is a bit tricky to skip them anyway.

            // DebugTool.printDebugString("     ----- ff_alloc_picture error OK.\n");
            return 0;
        }
示例#25
0
文件: AVFrame.cs 项目: pansk/cscodec
 /*
  #define MB_TYPE_INTRA MB_TYPE_INTRA4x4 //default mb_type if there is just one type
  #define IS_INTRA4x4(a)   ((a)&MB_TYPE_INTRA4x4)
  #define IS_INTRA16x16(a) ((a)&MB_TYPE_INTRA16x16)
  #define IS_PCM(a)        ((a)&MB_TYPE_INTRA_PCM)
  #define IS_INTRA(a)      ((a)&7)
  #define IS_INTER(a)      ((a)&(MB_TYPE_16x16|MB_TYPE_16x8|MB_TYPE_8x16|MB_TYPE_8x8))
  #define IS_SKIP(a)       ((a)&MB_TYPE_SKIP)
  #define IS_INTRA_PCM(a)  ((a)&MB_TYPE_INTRA_PCM)
  #define IS_INTERLACED(a) ((a)&MB_TYPE_INTERLACED)
  #define IS_DIRECT(a)     ((a)&MB_TYPE_DIRECT2)
  #define IS_GMC(a)        ((a)&MB_TYPE_GMC)
  #define IS_16X16(a)      ((a)&MB_TYPE_16x16)
  #define IS_16X8(a)       ((a)&MB_TYPE_16x8)
  #define IS_8X16(a)       ((a)&MB_TYPE_8x16)
  #define IS_8X8(a)        ((a)&MB_TYPE_8x8)
  #define IS_SUB_8X8(a)    ((a)&MB_TYPE_16x16) //note reused
  #define IS_SUB_8X4(a)    ((a)&MB_TYPE_16x8)  //note reused
  #define IS_SUB_4X8(a)    ((a)&MB_TYPE_8x16)  //note reused
  #define IS_SUB_4X4(a)    ((a)&MB_TYPE_8x8)   //note reused
  #define IS_ACPRED(a)     ((a)&MB_TYPE_ACPRED)
  #define IS_QUANT(a)      ((a)&MB_TYPE_QUANT)
  #define IS_DIR(a, part, list) ((a) & (MB_TYPE_P0L0<<((part)+2*(list))))
  #define USES_LIST(a, list) ((a) & ((MB_TYPE_P0L0|MB_TYPE_P1L0)<<(2*(list)))) ///< does this mb use listX, note does not work if subMBs
  #define HAS_CBP(a)        ((a)&MB_TYPE_CBP)
  */
 public static AVFrame avcodec_alloc_frame()
 {
     AVFrame ret = new AVFrame();
     ret.pts= Constants.AV_NOPTS_VALUE;
     ret.key_frame= 1;
     return ret;
 }
示例#26
0
 /**
  * Release a frame buffer
  */
 public void free_frame_buffer(AVFrame pic)
 {
     //this.avctx->release_buffer(this.avctx, (AVFrame*)pic);
     release_buffer(pic);
     //av_freep(&pic->hwaccel_picture_private); // No H/W Acceleration
 }
示例#27
0
文件: AVFrame.cs 项目: pansk/cscodec
 public static void pic_as_field(AVFrame pic, int parity)
 {
     int i;
     for (i = 0; i < 4; ++i) {
         if (parity == Constants.PICT_BOTTOM_FIELD)
             pic.data_offset[i] += pic.linesize[i];
         pic.reference = parity;
         pic.linesize[i] *= 2;
     }
     pic.poc = pic.field_poc[(parity == Constants.PICT_BOTTOM_FIELD) ? 1
             : 0];
 }
示例#28
0
        public int get_buffer(AVFrame pic)
        {
            int i;
            int w = this.width;
            int h = this.height;
            InternalBuffer buf;
            //int[] picture_number;

            if (pic.data_base[0] != null)
            {
                // DebugTool.printDebugString("     ----- get_buffer error case 0\n");
                //av_log(s, AV_LOG_ERROR, "pic.data[0]!=NULL in avcodec_default_get_buffer\n");
                return -1;
            }
            if (this.internal_buffer_count >= INTERNAL_BUFFER_SIZE)
            {
                //av_log(s, AV_LOG_ERROR, "internal_buffer_count overflow (missing release_buffer?)\n");
                // DebugTool.printDebugString("     ----- get_buffer error case 1\n");
                return -1;
            }

            if (av_image_check_size(w, h, 0, this) != 0)
            {
                // DebugTool.printDebugString("     ----- get_buffer error case 2\n");
                return -1;
            }

            if (this.internal_buffer == null)
            {
                //this.internal_buffer= av_mallocz((INTERNAL_BUFFER_SIZE+1)*sizeof(InternalBuffer));
                internal_buffer = new InternalBuffer[INTERNAL_BUFFER_SIZE + 1];
                for (i = 0; i < INTERNAL_BUFFER_SIZE + 1; i++)
                    internal_buffer[i] = new InternalBuffer();
            }

            buf = internal_buffer[this.internal_buffer_count];
            //picture_number= &(((InternalBuffer*)this.internal_buffer)[INTERNAL_BUFFER_SIZE]).last_pic_num; //FIXME ugly hack
            //(*picture_number)++;
            this.internal_buffer[INTERNAL_BUFFER_SIZE].last_pic_num++;

            if (buf.@base[0] != null && (buf.width != w || buf.height != h || buf.pix_fmt != this.pix_fmt))
            {
                for (i = 0; i < 4; i++)
                {
                    //av_freep(&buf.base[i]);
                    buf.@base[i] = null;
                    buf.data_offset[i] = 0;
                }
            }

            if (buf.@base[0] != null)
            {
                pic.age = this.internal_buffer[INTERNAL_BUFFER_SIZE].last_pic_num - buf.last_pic_num;
                buf.last_pic_num = this.internal_buffer[INTERNAL_BUFFER_SIZE].last_pic_num;
            }
            else
            {
                int h_chroma_shift, v_chroma_shift;
                int[] size = new int[4];
                int tmpsize;
                int unaligned;
                AVPicture picture = new AVPicture();
                int[] stride_align = new int[4];

                //avcodec_get_chroma_sub_sample(this.pix_fmt, &h_chroma_shift, &v_chroma_shift);
                h_chroma_shift = ImageUtils.av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
                v_chroma_shift = ImageUtils.av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;

                int[] param = new int[] { w, h };
                avcodec_align_dimensions2(param, stride_align);
                w = param[0];
                h = param[1];
                pic.imageWidthWOEdge = w;
                pic.imageHeightWOEdge = h;

                if (0 == (this.flags & CODEC_FLAG_EMU_EDGE))
                {
                    w += EDGE_WIDTH * 2;
                    h += EDGE_WIDTH * 2;
                }
                pic.imageWidth = w;
                pic.imageHeight = h;

                do
                {
                    // NOTE: do not align linesizes individually, this breaks e.g. assumptions
                    // that linesize[0] == 2*linesize[1] in the MPEG-encoder for 4:2:2
                    av_image_fill_linesizes(picture.linesize, this.pix_fmt, w);
                    // increase alignment of w for next try (rhs gives the lowest bit set in w)
                    w += w & ~(w - 1);

                    unaligned = 0;
                    for (i = 0; i < 4; i++)
                    {
                        unaligned |= (picture.linesize[i] % stride_align[i]);
                    }
                } while (unaligned != 0);

                tmpsize = av_image_fill_pointers(picture.data_base, picture.data_offset, this.pix_fmt, h, null, picture.linesize);
                if (tmpsize < 0)
                {
                    // DebugTool.printDebugString("     ----- get_buffer error case 3\n");
                    return -1;
                }

                for (i = 0; i < 3 && picture.data_offset[i + 1] != 0; i++)
                    size[i] = picture.data_offset[i + 1] - picture.data_offset[i];
                size[i] = tmpsize - (picture.data_offset[i] - picture.data_offset[0]);

                buf.last_pic_num = -256 * 256 * 256 * 64;
                //memset(buf.base, 0, sizeof(buf.base));
                //memset(buf.data, 0, sizeof(buf.data));
                for (int k = 0; k < [email protected]; k++)
                    buf.@base[k] = null;
                //Arrays.fill(buf.base[k], 0);
                for (int k = 0; k < buf.data_offset.Length; k++)
                    buf.data_offset[k] = 0;
                //	Arrays.fill(buf.data[k], 0);

                for (i = 0; i < 4 && size[i] != 0; i++)
                {
                    int h_shift = i == 0 ? 0 : h_chroma_shift;
                    int v_shift = i == 0 ? 0 : v_chroma_shift;

                    buf.linesize[i] = picture.linesize[i];

                    //buf.base[i]= av_malloc(size[i]+16); //FIXME 16
                    buf.@base[i] = new byte[size[i] + 16];
                    if (buf.@base[i] == null) return -1;
                    //memset(buf.base[i], 128, size[i]);
                    Arrays.Fill(buf.@base[i], 0, size[i], (byte)128);

                    // no edge if EDGE EMU or not planar YUV
                    if ((this.flags & CODEC_FLAG_EMU_EDGE) != 0 || 0 == size[2])
                        buf.data_offset[i] = 0;
                    else
                        buf.data_offset[i] = ((((buf.linesize[i] * EDGE_WIDTH >> v_shift) + (EDGE_WIDTH >> h_shift)) + (stride_align[i]) - 1) & ~((stride_align[i]) - 1));
                    //+ FFALIGN((buf.linesize[i]*EDGE_WIDTH>>v_shift) + (EDGE_WIDTH>>h_shift), stride_align[i]);
                }
                if (size[1] != 0 && 0 == size[2])
                    ff_set_systematic_pal2(buf.@base[1], buf.data_offset[1], this.pix_fmt);
                buf.width = this.width;
                buf.height = this.height;
                buf.pix_fmt = this.pix_fmt;
                pic.age = 256 * 256 * 256 * 64;
            }
            pic.type = FF_BUFFER_TYPE_INTERNAL;

            for (i = 0; i < 4; i++)
            {
                pic.@base[i] = buf.@base[i];
                pic.data_base[i] = buf.@base[i];
                pic.data_offset[i] = buf.data_offset[i];
                pic.linesize[i] = buf.linesize[i];
            }
            this.internal_buffer_count++;

            // DebugTool.printDebugString("****Internal_Buffer_Count = "+this.internal_buffer_count);

            if (this.pkt != null) pic.pkt_pts = this.pkt.pts;
            else pic.pkt_pts = Constants.AV_NOPTS_VALUE;
            pic.reordered_opaque = this.reordered_opaque;

            /*
            if(this.debug&FF_DEBUG_BUFFERS)
                av_log(s, AV_LOG_DEBUG, "default_get_buffer called on pic %p, %d buffers used\n", pic, this.internal_buffer_count);
            */
            // DebugTool.printDebugString("     ----- get_buffer OK.\n");
            return 0;
        }
示例#29
0
文件: AVFrame.cs 项目: pansk/cscodec
 public AVFrame copyTo(AVFrame ret)
 {
     /////////////???????????????????????????
      //To do: Implement this method!
      ret.age = age;
      ret.b_frame_score = b_frame_score;
      for(int i=0;i<@base.Length;i++)
          ret.@base[i] = @base[i];
      ret.buffer_hints = buffer_hints;
      ret.coded_picture_number = coded_picture_number;
      for(int i=0;i<data_base.Length;i++) {
         ret.data_base[i] = data_base[i];
      		ret.data_offset[i] = data_offset[i];
      } // for
      ret.dct_coeff = dct_coeff;
      ret.display_picture_number = display_picture_number;
      Array.Copy(error, 0, ret.error, 0, error.Length);
      Array.Copy(field_poc, 0, ret.field_poc, 0, field_poc.Length);
      ret.frame_num = frame_num;
      ret.imageWidth = imageWidth;
      ret.imageHeight = imageHeight;
      ret.imageWidthWOEdge = imageWidthWOEdge;
      ret.imageHeightWOEdge = imageHeightWOEdge;
      ret.interlaced_frame = interlaced_frame;
      Array.Copy(interpolated, 0, ret.interpolated, 0, interpolated.Length);
      ret.key_frame = key_frame;
      Array.Copy(linesize, 0, ret.linesize, 0, linesize.Length);
      ret.long_ref = long_ref;
      ret.mb_cmp_score = mb_cmp_score;
      ret.mb_mean = mb_mean;
      ret.mb_type_base = mb_type_base;
      ret.mb_type_offset = mb_type_offset;
      ret.mb_var = mb_var;
      ret.mb_var_sum = mb_var_sum;
      ret.mbaff = mbaff;
      ret.mbskip_table = mbskip_table;
      ret.mc_mb_var = mc_mb_var;
      ret.mc_mb_var_sum = mc_mb_var_sum;
      ret.mmco_reset = mmco_reset;
      ret.motion_subsample_log2 = motion_subsample_log2;
      //??????????????? Can we copy it at this depth?
      Array.Copy(motion_val_base, 0, ret.motion_val_base, 0, motion_val_base.Length);
      Array.Copy(motion_val_offset, 0, ret.motion_val_offset, 0, motion_val_offset.Length);
      ret.opaque = opaque;
      ret.palette_has_changed = palette_has_changed;
      ret.pan_scan = pan_scan;
      ret.pic_id = pic_id;
      ret.pict_type = pict_type;
      ret.pkt_dts = pkt_dts;
      ret.pkt_pts = pkt_pts;
      ret.poc = poc;
      ret.pts = pts;
      ret.qscale_table = qscale_table;
      ret.qscale_type = qscale_type;
      ret.qstride = qstride;
      ret.quality = quality;
      ret.ref_count = (int[][])ref_count.Clone();
      Array.Copy(ref_index, 0, ret.ref_index, 0, ref_index.Length);
      ret.ref_poc = (int[][][])ref_poc.Clone();
      ret.reference = reference;
      ret.reordered_opaque = reordered_opaque;
      ret.repeat_pict = repeat_pict;
      ret.top_field_first = top_field_first;
      ret.type = type;
      return ret;
 }
示例#30
0
        /* init common structure for both encoder and decoder */
        public void MPV_common_end()
        {
            int i;
            //, j, k;

            for (i = 0; i < this.thread_count; i++)
            {
                //??????
                //free_duplicate_context(this.thread_context[i] = null;
            }
            /*
            for(i=1; i<this.avctx->thread_count; i++){
                av_freep(&this.thread_context[i] = null;
            }
            */

            this.parse_context.buffer_base = null;
            this.parse_context.buffer_size = 0;

            /* Encoding param??
            this.mb_type = null;
            this.p_mv_table_base = null;
            this.b_forw_mv_table_base = null;
            this.b_back_mv_table_base = null;
            this.b_bidir_forw_mv_table_base = null;
            this.b_bidir_back_mv_table_base = null;
            this.b_direct_mv_table_base = null;
            this.p_mv_table= NULL;
            this.b_forw_mv_table= NULL;
            this.b_back_mv_table= NULL;
            this.b_bidir_forw_mv_table= NULL;
            this.b_bidir_back_mv_table= NULL;
            this.b_direct_mv_table= NULL;
            for(i=0; i<2; i++){
                for(j=0; j<2; j++){
                    for(k=0; k<2; k++){
                        this.b_field_mv_table_base[i][j][k] = null;
                        this.b_field_mv_table[i][j][k]=NULL;
                    }
                    this.b_field_select_table[i][j] = null;
                    this.p_field_mv_table_base[i][j] = null;
                    this.p_field_mv_table[i][j]=NULL;
                }
                this.p_field_select_table[i] = null;
            }

            this.dc_val_base = null;
            this.coded_block_base = null;
            this.mbintra_table = null;
            this.cbp_table = null;
            this.pred_dir_table = null;

            this.mbskip_table = null;
            this.prev_pict_types = null;
            this.bitstream_buffer = null;
            this.allocated_bitstream_buffer_size=0;

            this.avctx->stats_out = null;
            this.ac_stats = null;
            this.error_status_table = null;
            this.mb_index2xy = null;
            this.lambda_table = null;
            this.q_intra_matrix = null;
            this.q_inter_matrix = null;
            this.q_intra_matrix16 = null;
            this.q_inter_matrix16 = null;
            this.input_picture = null;
            this.reordered_input_picture = null;
            this.dct_offset = null;
              */
            if (this.picture != null)
            {
                for (i = 0; i < MAX_PICTURE_COUNT; i++)
                {
                    //this.free_picture(s, &this.picture[i] = null;
                }
            }
            this.picture = null;
            this.context_initialized = 0;
            this.last_picture_ptr =
            this.next_picture_ptr =
            this.current_picture_ptr = null;
            this.linesize = this.uvlinesize = 0;

            //for(i=0; i<3; i++)
            //    this.visualization_buffer[i] = null;

            //avcodec_default_free_buffers(this.avctx = null;
        }
示例#31
0
        public int ff_h264_fill_default_ref_list()
        {
            int i, len;

            for (int p = 0; p < this.default_ref_list.Length; p++)
                for (int k = 0; k < this.default_ref_list[p].Length; k++)
                    if (this.default_ref_list[p][k] == null)
                        this.default_ref_list[p][k] = new AVFrame();

            if (this.slice_type_nos == FF_B_TYPE)
            {
                AVFrame[] sorted = new AVFrame[32];
                int cur_poc, list;
                int[] lens = new int[2];

                if ((s.picture_structure != Constants.PICT_FRAME))
                    cur_poc = s.current_picture_ptr.field_poc[(s.picture_structure == Constants.PICT_BOTTOM_FIELD) ? 1 : 0];
                else
                    cur_poc = s.current_picture_ptr.poc;

                for (list = 0; list < 2; list++)
                {
                    len = AVFrame.add_sorted(sorted, 0, this.short_ref, 0, this.short_ref_count, cur_poc, 1 ^ list);
                    len += AVFrame.add_sorted(sorted, len, this.short_ref, 0, this.short_ref_count, cur_poc, 0 ^ list);
                    //assert(len<=32);
                    len = AVFrame.build_def_list(this.default_ref_list[list], 0, sorted, 0, len, 0, s.picture_structure);
                    len += AVFrame.build_def_list(this.default_ref_list[list], len, this.long_ref, 0, 16, 1, s.picture_structure);
                    //assert(len<=32);

                    if (len < this.ref_count[list])
                    {
                        //memset(this.default_ref_list[list][len], 0, sizeof(Picture)*(this.ref_count[list] - len));
                        for (int k = len; k < this.ref_count[list]; k++)
                            this.default_ref_list[list][k].resetToZero();
                    }
                    lens[list] = len;
                }

                if (lens[0] == lens[1] && lens[1] > 1)
                {
                    for (i = 0; this.default_ref_list[0][i].data_base[0] == this.default_ref_list[1][i].data_base[0]
                     && this.default_ref_list[0][i].data_offset[0] == this.default_ref_list[1][i].data_offset[0] && i < lens[0]; i++) ;
                    if (i == lens[0])
                    {
                        //FFSWAP(Picture, this.default_ref_list[1][0], this.default_ref_list[1][1]);
                        AVFrame tmp = this.default_ref_list[1][0];
                        this.default_ref_list[1][0] = this.default_ref_list[1][1];
                        this.default_ref_list[1][1] = tmp;
                    }
                }
            }
            else
            {
                len = AVFrame.build_def_list(this.default_ref_list[0], 0, this.short_ref, 0, this.short_ref_count, 0, s.picture_structure);
                len += AVFrame.build_def_list(this.default_ref_list[0], len, this.long_ref, 0, 16, 1, s.picture_structure);
                //assert(len <= 32);
                if (len < this.ref_count[0])
                {
                    //memset(&this.default_ref_list[0][len], 0, sizeof(Picture)*(this.ref_count[0] - len));
                    for (int k = len; k < this.ref_count[0]; k++)
                        this.default_ref_list[0][k].resetToZero();
                }
            }
            /*
            #ifdef TRACE
                for (i=0; i<this.ref_count[0]; i++) {
                    tprintf(this.s.avctx, "List0: %s fn:%d 0x%p\n", (this.default_ref_list[0][i].long_ref ? "LT" : "ST"), this.default_ref_list[0][i].pic_id, this.default_ref_list[0][i].data[0]);
                }
                if(this.slice_type_nos==FF_B_TYPE){
                    for (i=0; i<this.ref_count[1]; i++) {
                        tprintf(this.s.avctx, "List1: %s fn:%d 0x%p\n", (this.default_ref_list[1][i].long_ref ? "LT" : "ST"), this.default_ref_list[1][i].pic_id, this.default_ref_list[1][i].data[0]);
                    }
                }
            #endif
            */
            return 0;
        }
示例#32
0
        /**
         * init common structure for both encoder and decoder.
         * this assumes that some variables like width/height are already set
         */
        public int MPV_common_init()
        {
            int y_size, c_size, yc_size, i, mb_array_size, x, y, threads;

            // Always H264
            //if(this.codec_id == CODEC_ID_MPEG2VIDEO && !this.progressive_sequence)
            //    this.mb_height = (this.height + 31) / 32 * 2;
            //else if (this.codec_id != CODEC_ID_H264)
            //    this.mb_height = (this.height + 15) / 16;

            if (this.pix_fmt == PIX_FMT_NONE)
            {
                //av_log(this.avctx, AV_LOG_ERROR, "decoding to PIX_FMT_NONE is not supported.\n");
                return -1;
            }

            if (this.thread_count > H264Context.MAX_THREADS || (this.thread_count > this.mb_height && this.mb_height != 0))
            {
                //av_log(this.avctx, AV_LOG_ERROR, "too many threads\n");
                return -1;
            }

            if ((this.width != 0 || this.height != 0) && av_image_check_size(this.width, this.height, 0, this) != 0)
                return -1;

            this.dsp.dsputil_init(this);
            this.ff_dct_common_init();

            //this.flags= this.avctx.flags;
            //this.flags2= this.avctx.flags2;

            this.mb_width = (this.width + 15) / 16;
            this.mb_stride = this.mb_width + 1;
            this.b8_stride = this.mb_width * 2 + 1;
            this.b4_stride = this.mb_width * 4 + 1;
            mb_array_size = this.mb_height * this.mb_stride;
            //mv_table_size= (this.mb_height+2) * this.mb_stride + 1;

            /* set chroma shifts */
            //avcodec_get_chroma_sub_sample(this.pix_fmt,&(this.chroma_x_shift),
            //                                                &(this.chroma_y_shift) );
            this.chroma_x_shift = ImageUtils.av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
            this.chroma_y_shift = ImageUtils.av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;

            /* set default edge pos, will be overriden in decode_header if needed */
            this.h_edge_pos = this.mb_width * 16;
            this.v_edge_pos = this.mb_height * 16;

            this.mb_num = this.mb_width * this.mb_height;

            this.block_wrap[0] =
            this.block_wrap[1] =
            this.block_wrap[2] =
            this.block_wrap[3] = this.b8_stride;
            this.block_wrap[4] =
            this.block_wrap[5] = this.mb_stride;

            y_size = this.b8_stride * (2 * this.mb_height + 1);
            c_size = this.mb_stride * (this.mb_height + 1);
            yc_size = y_size + 2 * c_size;

            /* convert fourcc to upper case */
            //?????????????????????????????????????????????????????????
            // We need this????
            //this.codec_tag = ff_toupper4(this.codec_tag);
            //this.stream_codec_tag = ff_toupper4(this.stream_codec_tag);

            this.coded_frame = (AVFrame)this.current_picture;

            //FF_ALLOCZ_OR_GOTO(this.avctx, this.mb_index2xy, (this.mb_num+1)*sizeof(int), fail) //error ressilience code looks cleaner with this
            this.mb_index2xy = new int[this.mb_num + 1];
            for (y = 0; y < this.mb_height; y++)
            {
                for (x = 0; x < this.mb_width; x++)
                {
                    this.mb_index2xy[x + y * this.mb_width] = x + y * this.mb_stride;
                }
            }
            this.mb_index2xy[this.mb_height * this.mb_width] = (this.mb_height - 1) * this.mb_stride + this.mb_width; //FIXME really needed?

            /* NOT ENCODE! */
            /*
            if (this.encoding) {
                // Allocate MV tables
                FF_ALLOCZ_OR_GOTO(this.avctx, this.p_mv_table_base            , mv_table_size * 2 * sizeof(int16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.b_forw_mv_table_base       , mv_table_size * 2 * sizeof(int16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.b_back_mv_table_base       , mv_table_size * 2 * sizeof(int16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.b_bidir_forw_mv_table_base , mv_table_size * 2 * sizeof(int16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.b_bidir_back_mv_table_base , mv_table_size * 2 * sizeof(int16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.b_direct_mv_table_base     , mv_table_size * 2 * sizeof(int16_t), fail)
                this.p_mv_table           = this.p_mv_table_base            + this.mb_stride + 1;
                this.b_forw_mv_table      = this.b_forw_mv_table_base       + this.mb_stride + 1;
                this.b_back_mv_table      = this.b_back_mv_table_base       + this.mb_stride + 1;
                this.b_bidir_forw_mv_table= this.b_bidir_forw_mv_table_base + this.mb_stride + 1;
                this.b_bidir_back_mv_table= this.b_bidir_back_mv_table_base + this.mb_stride + 1;
                this.b_direct_mv_table    = this.b_direct_mv_table_base     + this.mb_stride + 1;

                if(this.msmpeg4_version){
                    FF_ALLOCZ_OR_GOTO(this.avctx, this.ac_stats, 2*2*(MAX_LEVEL+1)*(MAX_RUN+1)*2*sizeof(int), fail);
                }
                FF_ALLOCZ_OR_GOTO(this.avctx, this.avctx.stats_out, 256, fail);

                // Allocate MB type table
                FF_ALLOCZ_OR_GOTO(this.avctx, this.mb_type  , mb_array_size * sizeof(uint16_t), fail) //needed for encoding

                FF_ALLOCZ_OR_GOTO(this.avctx, this.lambda_table, mb_array_size * sizeof(int), fail)

                FF_ALLOCZ_OR_GOTO(this.avctx, this.q_intra_matrix  , 64*32   * sizeof(int), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.q_inter_matrix  , 64*32   * sizeof(int), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.q_intra_matrix16, 64*32*2 * sizeof(uint16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.q_inter_matrix16, 64*32*2 * sizeof(uint16_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.input_picture, MAX_PICTURE_COUNT * sizeof(Picture*), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.reordered_input_picture, MAX_PICTURE_COUNT * sizeof(Picture*), fail)

                if(this.avctx->noise_reduction){
                    FF_ALLOCZ_OR_GOTO(this.avctx, this.dct_offset, 2 * 64 * sizeof(uint16_t), fail)
                }
            }
            */
            //FF_ALLOCZ_OR_GOTO(this.avctx, this.picture, MAX_PICTURE_COUNT * sizeof(Picture), fail)
            this.picture = new AVFrame[MAX_PICTURE_COUNT];
            for (i = 0; i < MAX_PICTURE_COUNT; i++)
            {
                //avcodec_get_frame_defaults((AVFrame)this.picture[i]);
                this.picture[i] = new AVFrame();
                this.picture[i].pts = Constants.AV_NOPTS_VALUE;
                this.picture[i].key_frame = 1;
                //????????????????????????????
                // set everything in picture[i] to zero
            }

            //FF_ALLOCZ_OR_GOTO(this.avctx, this.error_status_table, mb_array_size*sizeof(uint8_t), fail)
            this.error_status_table = new int[mb_array_size];
            /*
            if(this.codec_id==CODEC_ID_MPEG4 || (this.flags & CODEC_FLAG_INTERLACED_ME)){
                // interlaced direct mode decoding tables
                    for(i=0; i<2; i++){
                        int j, k;
                        for(j=0; j<2; j++){
                            for(k=0; k<2; k++){
                                FF_ALLOCZ_OR_GOTO(this.avctx,    this.b_field_mv_table_base[i][j][k], mv_table_size * 2 * sizeof(int16_t), fail)
                                this.b_field_mv_table[i][j][k] = this.b_field_mv_table_base[i][j][k] + this.mb_stride + 1;
                            }
                            FF_ALLOCZ_OR_GOTO(this.avctx, this.b_field_select_table [i][j], mb_array_size * 2 * sizeof(uint8_t), fail)
                            FF_ALLOCZ_OR_GOTO(this.avctx, this.p_field_mv_table_base[i][j], mv_table_size * 2 * sizeof(int16_t), fail)
                            this.p_field_mv_table[i][j] = this.p_field_mv_table_base[i][j]+ this.mb_stride + 1;
                        }
                        FF_ALLOCZ_OR_GOTO(this.avctx, this.p_field_select_table[i], mb_array_size * 2 * sizeof(uint8_t), fail)
                    }
            }
            */
            /*
            if (this.out_format == FMT_H263) {
                // cbp values
                FF_ALLOCZ_OR_GOTO(this.avctx, this.coded_block_base, y_size, fail);
                this.coded_block= this.coded_block_base + this.b8_stride + 1;

                // cbp, ac_pred, pred_dir
                FF_ALLOCZ_OR_GOTO(this.avctx, this.cbp_table     , mb_array_size * sizeof(uint8_t), fail)
                FF_ALLOCZ_OR_GOTO(this.avctx, this.pred_dir_table, mb_array_size * sizeof(uint8_t), fail)
            }
            */
            //if (this.h263_pred || this.h263_plus || !this.encoding) {
            // dc values
            //MN: we need these for error resilience of intra-frames
            //FF_ALLOCZ_OR_GOTO(this.avctx, this.dc_val_base, yc_size * sizeof(int16_t), fail);
            ErrorResilience.dc_val_base = new int[yc_size];
            ErrorResilience.dc_val[0] = this.b8_stride + 1;
            ErrorResilience.dc_val[1] = y_size + this.mb_stride + 1;
            ErrorResilience.dc_val[2] = ErrorResilience.dc_val[1] + c_size;
            for (i = 0; i < yc_size; i++)
                ErrorResilience.dc_val_base[i] = 1024;
            //}

            /* which mb is a intra block */
            //FF_ALLOCZ_OR_GOTO(this.avctx, this.mbintra_table, mb_array_size, fail);
            this.mbintra_table = new int[mb_array_size];
            //memset(this.mbintra_table, 1, mb_array_size);
            Arrays.Fill(this.mbintra_table, 1);

            /* init macroblock skip table */
            //FF_ALLOCZ_OR_GOTO(this.avctx, this.mbskip_table, mb_array_size+2, fail);
            this.mbskip_table = new int[mb_array_size + 2];

            //Note the +1 is for a quicker mpeg4 slice_end detection
            //FF_ALLOCZ_OR_GOTO(this.avctx, this.prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE, fail);
            this.prev_pict_types = new int[PREV_PICT_TYPES_BUFFER_SIZE];

            this.parse_context.state = -1;
            /*// No debug
            if((this.debug&(FF_DEBUG_VIS_QP|FF_DEBUG_VIS_MB_TYPE)) || (this.avctx->debug_mv)){
               this.visualization_buffer[0] = av_malloc((this.mb_width*16 + 2*EDGE_WIDTH) * this.mb_height*16 + 2*EDGE_WIDTH);
               this.visualization_buffer[1] = av_malloc((this.mb_width*16 + 2*EDGE_WIDTH) * this.mb_height*16 + 2*EDGE_WIDTH);
               this.visualization_buffer[2] = av_malloc((this.mb_width*16 + 2*EDGE_WIDTH) * this.mb_height*16 + 2*EDGE_WIDTH);
            }
            */

            this.context_initialized = 1;

            this.thread_context[0] = this;
            threads = this.thread_count;

            /*
            for(i=1; i<threads; i++){
                this.thread_context[i]= av_malloc(sizeof(MpegEncContext));
                memcpy(this.thread_context[i], s, sizeof(MpegEncContext));
            }
            */

            for (i = 0; i < threads; i++)
            {
                if (init_duplicate_context(this.thread_context[i], this) < 0)
                {
                    this.MPV_common_end();
                    return -1;
                } // if
                this.thread_context[i].start_mb_y = (this.mb_height * (i) + this.thread_count / 2) / this.thread_count;
                this.thread_context[i].end_mb_y = (this.mb_height * (i + 1) + this.thread_count / 2) / this.thread_count;
            }

            return 0;
        }
示例#33
0
 /**
  * Mark a picture as no longer needed for reference. The refmask
  * argument allows unreferencing of individual fields or the whole frame.
  * If the picture becomes entirely unreferenced, but is being held for
  * display purposes, it is marked as such.
  * @param refmask mask of fields to unreference; the mask is bitwise
  *                anded with the reference marking of pic
  * @return non-zero if pic becomes entirely unreferenced (except possibly
  *         for display purposes) zero if one of the fields remains in
  *         reference
  */
 /*static inline*/
 public int unreference_pic(AVFrame pic, int refmask)
 {
     int i;
     if ((pic.reference &= refmask) != 0)
     {
         return 0;
     }
     else
     {
         for (i = 0; delayed_pic[i] != null; i++)
             if (pic == delayed_pic[i])
             {
                 pic.reference = DELAYED_PIC_REF;
                 break;
             }
         return 1;
     }
 }
示例#34
0
        /* generic function for encode/decode called after a frame has been coded/decoded */
        public void MPV_frame_end()
        {
            //int i;
            /* draw edge for correct motion prediction if outside */
            //just to make sure that all data is rendered.
            /*if(CONFIG_MPEG_XVMC_DECODER && this.avctx->xvmc_acceleration){
                ff_xvmc_field_end(s);
            }else*/
            if (//0==this.hwaccel
                //&& 0==(this.codec->capabilities&CODEC_CAP_HWACCEL_VDPAU)
                0 != this.unrestricted_mv
                && 0 != this.current_picture.reference
                && 0 == this.intra_only
                && 0 == (this.flags & CODEC_FLAG_EMU_EDGE))
            {
                this.dsp.draw_edges(this.current_picture.data_base[0], this.current_picture.data_offset[0], this.linesize, this.h_edge_pos, this.v_edge_pos, EDGE_WIDTH);
                this.dsp.draw_edges(this.current_picture.data_base[1], this.current_picture.data_offset[1], this.uvlinesize, this.h_edge_pos >> 1, this.v_edge_pos >> 1, EDGE_WIDTH / 2);
                this.dsp.draw_edges(this.current_picture.data_base[2], this.current_picture.data_offset[2], this.uvlinesize, this.h_edge_pos >> 1, this.v_edge_pos >> 1, EDGE_WIDTH / 2);
            }
            //emms_c(); // not using MMX

            this.last_pict_type = this.pict_type;
            this.last_lambda_for[this.pict_type] = this.current_picture_ptr.quality;
            if (this.pict_type != H264Context.FF_B_TYPE)
            {
                this.last_non_b_pict_type = this.pict_type;
            }

            // clear copies, to avoid confusion
            this.coded_frame = (AVFrame)this.current_picture_ptr;
        }