Example #1
0
        public static string GetFrameLine(AVPacket pkt)
        {
            //pkt.data
            uint crc = BitConverter.ToUInt32(new CRC32().ComputeHash(pkt.data_base.Select(Item => (byte)Item).ToArray(), pkt.data_offset, pkt.size), 0);

            //snprintf(buf, sizeof(buf), "%d, %10"PRId64", %10"PRId64", %8d, %8d, 0x%08x",
            //		 pkt->stream_index, pkt->dts, pkt->pts, pkt->duration, pkt->size, crc);
            //if (pkt->flags != AV_PKT_FLAG_KEY)
            //	av_strlcatf(buf, sizeof(buf), ", F=0x%0X", pkt->flags);
            //if (pkt->side_data_elems)
            //	av_strlcatf(buf, sizeof(buf), ", S=%d", pkt->side_data_elems);
            //av_strlcatf(buf, sizeof(buf), "\n");
            //avio_write(s->pb, buf, strlen(buf));
            //avio_flush(s->pb);
            return String.Format(
                "{0}, {1}, {2}, {3}, {4}, 0x{5:X8}",
                pkt.stream_index, pkt.dts, pkt.pts, pkt.duration, pkt.size, crc
            );
        }
Example #2
0
        public int decode_frame(AVFrame data, int[] data_size, AVPacket avpkt)
        {
            /* const uint8_t * */
            byte[] buf_base = avpkt.data_base;
            int buf_offset = avpkt.data_offset;
            int buf_size = avpkt.size;
            //AVFrame pict = data;
            int buf_index;

            // ???????????????????????????????????????
            // s.flags= avctx.flags;
            // s.flags2= avctx.flags2;

            /* end of stream, output what is still in the buffers */
            bool loop = true;
            // out:
            do
            {
                loop = false;

                if (buf_size == 0)
                {
                    AVFrame @out;
                    int i, out_idx;

                    // FIXME factorize this with the output code below
                    @out = this.delayed_pic[0];
                    out_idx = 0;
                    for (i = 1; this.delayed_pic[i] != null
                            && 0 == this.delayed_pic[i].key_frame
                            && 0 == this.delayed_pic[i].mmco_reset; i++)
                        if (this.delayed_pic[i].poc < @out.poc)
                        {
                            @out = this.delayed_pic[i];
                            out_idx = i;
                        }

                    for (i = out_idx; this.delayed_pic[i] != null; i++)
                        this.delayed_pic[i] = this.delayed_pic[i + 1];

                    if (@out != null)
                    {
                        data_size[0] = 1;
                        @out.copyTo(displayPicture);

                        // DebugTool.dumpFrameData(displayPicture);
                    }

                    return 0;
                }

                buf_index = this.decode_nal_units(buf_base, buf_offset, buf_size);
                if (buf_index < 0)
                {
                    return -8;
                }

                if (null == s.current_picture_ptr
                        && this.nal_unit_type == NAL_END_SEQUENCE)
                {
                    buf_size = 0;
                    loop = true;
                }

            } while (loop);

            if (0 == (s.flags2 & MpegEncContext.CODEC_FLAG2_CHUNKS) && null == s.current_picture_ptr)
            {
                if (s.skip_frame >= MpegEncContext.AVDISCARD_NONREF || s.hurry_up != 0) return 0;
                // av_log(avctx, AV_LOG_ERROR, "no frame!\n");
                //Console.WriteLine("!!!! NO FRAME !!!!");
                //return s.get_consumed_bytes(buf_index, buf_size); //-1;
                return -9;
            }

            if (0 == (s.flags2 & MpegEncContext.CODEC_FLAG2_CHUNKS)
                    || (s.mb_y >= s.mb_height && s.mb_height != 0))
            {
                AVFrame @out = s.current_picture_ptr;
                AVFrame cur = s.current_picture_ptr;
                int i, pics, out_of_order, out_idx;

                this.field_end();

                if (cur.field_poc[0] == int.MaxValue
                        || cur.field_poc[1] == int.MaxValue)
                {
                    /* Wait for second field. */
                    data_size[0] = 0;

                }
                else
                {
                    cur.interlaced_frame = 0;
                    cur.repeat_pict = 0;

                    /* Signal interlacing information externally. */
                    /*
                     * Prioritize picture timing SEI information over used decoding
                     * process if it exists.
                     */

                    if (this.sps.pic_struct_present_flag != 0)
                    {
                        switch (this.sei_pic_struct)
                        {
                            case SEI_PIC_STRUCT_FRAME:
                                break;
                            case SEI_PIC_STRUCT_TOP_FIELD:
                            case SEI_PIC_STRUCT_BOTTOM_FIELD:
                                cur.interlaced_frame = 1;
                                break;
                            case SEI_PIC_STRUCT_TOP_BOTTOM:
                            case SEI_PIC_STRUCT_BOTTOM_TOP:
                                if ((mb_aff_frame != 0 || (s.picture_structure != Constants.PICT_FRAME)))
                                    cur.interlaced_frame = 1;
                                else
                                    // try to flag soft telecine progressive
                                    cur.interlaced_frame = this.prev_interlaced_frame;
                                break;
                            case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
                            case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
                                // Signal the possibility of telecined film externally
                                // (pic_struct 5,6)
                                // From these hints, let the applications decide if they
                                // apply deinterlacing.
                                cur.repeat_pict = 1;
                                break;
                            case SEI_PIC_STRUCT_FRAME_DOUBLING:
                                // Force progressive here, as doubling interlaced frame
                                // is a bad idea.
                                cur.repeat_pict = 2;
                                break;
                            case SEI_PIC_STRUCT_FRAME_TRIPLING:
                                cur.repeat_pict = 4;
                                break;
                        }

                        if ((this.sei_ct_type & 3) != 0
                                && this.sei_pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
                            cur.interlaced_frame = ((this.sei_ct_type & (1 << 1)) != 0) ? 1
                                    : 0;
                    }
                    else
                    {
                        /* Derive interlacing flag from used decoding process. */
                        cur.interlaced_frame = (mb_aff_frame != 0 || (s.picture_structure != Constants.PICT_FRAME)) ? 1
                                : 0;
                    }
                    this.prev_interlaced_frame = cur.interlaced_frame;

                    if (cur.field_poc[0] != cur.field_poc[1])
                    {
                        /* Derive top_field_first from field pocs. */
                        cur.top_field_first = (cur.field_poc[0] < cur.field_poc[1]) ? 1
                                : 0;
                    }
                    else
                    {
                        if (cur.interlaced_frame != 0
                                || this.sps.pic_struct_present_flag != 0)
                        {
                            /*
                             * Use picture timing SEI information. Even if it is a
                             * information of a past frame, better than nothing.
                             */
                            if (this.sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM
                                    || this.sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
                                cur.top_field_first = 1;
                            else
                                cur.top_field_first = 0;
                        }
                        else
                        {
                            /* Most likely progressive */
                            cur.top_field_first = 0;
                        }
                    }

                    // FIXME do something with unavailable reference frames

                    /* Sort B-frames into display order */

                    if (this.sps.bitstream_restriction_flag != 0
                            && s.has_b_frames < this.sps.num_reorder_frames)
                    {
                        s.has_b_frames = this.sps.num_reorder_frames;
                        s.low_delay = 0;
                    }

                    if (s.strict_std_compliance >= MpegEncContext.FF_COMPLIANCE_STRICT
                            && 0 == this.sps.bitstream_restriction_flag)
                    {
                        s.has_b_frames = MAX_DELAYED_PIC_COUNT;
                        s.low_delay = 0;
                    }

                    pics = 0;
                    while (this.delayed_pic[pics] != null)
                        pics++;

                    //assert(pics <= MAX_DELAYED_PIC_COUNT);

                    this.delayed_pic[pics++] = cur;
                    if (cur.reference == 0)
                        cur.reference = DELAYED_PIC_REF;

                    @out = this.delayed_pic[0];
                    out_idx = 0;
                    for (i = 1; this.delayed_pic[i] != null
                            && 0 == this.delayed_pic[i].key_frame
                            && 0 == this.delayed_pic[i].mmco_reset; i++)
                        if (this.delayed_pic[i].poc < @out.poc)
                        {
                            @out = this.delayed_pic[i];
                            out_idx = i;
                        }
                    if (s.has_b_frames == 0
                            && (this.delayed_pic[0].key_frame != 0 || this.delayed_pic[0].mmco_reset != 0))
                        this.outputed_poc = int.MinValue;
                    out_of_order = (@out.poc < this.outputed_poc) ? 1 : 0;

                    if (this.sps.bitstream_restriction_flag != 0
                            && s.has_b_frames >= this.sps.num_reorder_frames)
                    {
                    }
                    else if ((out_of_order != 0 && pics - 1 == s.has_b_frames && s.has_b_frames < MAX_DELAYED_PIC_COUNT)
                          || (s.low_delay != 0 && ((this.outputed_poc != int.MinValue && @out.poc > this.outputed_poc + 2) || cur.pict_type == FF_B_TYPE)))
                    {
                        s.low_delay = 0;
                        s.has_b_frames++;
                    }

                    if (0 != out_of_order || pics > s.has_b_frames)
                    {
                        @out.reference &= ~DELAYED_PIC_REF;
                        for (i = out_idx; this.delayed_pic[i] != null; i++)
                            this.delayed_pic[i] = this.delayed_pic[i + 1];
                    }
                    if (0 == out_of_order && pics > s.has_b_frames)
                    {
                        data_size[0] = 1;

                        if (out_idx == 0
                                && this.delayed_pic[0] != null
                                && (this.delayed_pic[0].key_frame != 0 || this.delayed_pic[0].mmco_reset != 0))
                        {
                            this.outputed_poc = int.MinValue;
                        }
                        else
                            this.outputed_poc = @out.poc;
                        @out.copyTo(displayPicture);

                        // DebugTool.dumpFrameData(displayPicture);
                    }
                    else
                    {
                        // av_log(avctx, AV_LOG_DEBUG, "no picture\n");
                    }
                }
            }

            // //assert(pict.data[0] || !*data_size);
            // ff_print_debug_info(s, pict);
            // printf("out %d\n", (int)pict.data[0]);

            return s.get_consumed_bytes(buf_index, buf_size);
        }
Example #3
0
        public int avcodec_decode_video2(AVFrame picture, int[] got_picture_ptr /* [0] = in/out param */, AVPacket avpkt)
        {
            int ret;

            got_picture_ptr[0] = 0;
            if ((this.coded_width != 0 || this.coded_height != 0) && av_image_check_size(this.coded_width, this.coded_height, 0, this) != 0)
            {
                return -7;
            }

            this.pkt = avpkt;

            if ((this.codec.capabilities & H264Decoder.CODEC_CAP_DELAY) != 0 || avpkt.size != 0)
            {
                ret = this.codec.decode(this, picture, got_picture_ptr, avpkt);

                // emms_c(); //needed to avoid an emms_c() call before every return;

                picture.pkt_dts = avpkt.dts;

                if (got_picture_ptr[0] != 0)
                {
                    this.frame_number++;
                }
            }
            else
            {
                ret = 0;
            }

            return ret;
        }
Example #4
0
 public int decode(MpegEncContext s, AVFrame outdata, int[] outdata_size, AVPacket avpkt)
 {
     return s.priv_data.decode_frame(outdata, outdata_size, avpkt);
 }