Example #1
0
        public AVStream(IntPtr fileContext)
        {
            rawFormatCtx = fileContext;
            pFormatCtx = new NativeGetter<AV.AVFormatContext>(fileContext).Get();
            for (var i = 0; i < pFormatCtx.nb_streams; i++)
            {
                var stream = new NativeGetter<AV.AVStream>(pFormatCtx.Streams[i]).Get();
                var codecContext = new NativeGetter<AV.AVCodecContext>(stream.codec).Get();
                IntPtr codec = AV.avcodec_find_decoder(codecContext.codec_id);
                if (codec != IntPtr.Zero)
                {
                    var codecHandle = new NativeGetter<AV.AVCodec>(codec).Get();
                    if ((codecHandle.capabilities & AV.CODEC_FLAG_TRUNCATED) != 0)
                    {
                        codecContext.flags |= AV.CODEC_FLAG_TRUNCATED;
                        new NativeSetter<AV.AVCodecContext>(stream.codec).Set(codecContext);
                    }

                    int ret = AV.avcodec_open2(stream.codec, codec, IntPtr.Zero);
                    if (ret < 0)
                        throw new Exception("Can not open codec for type " + codecContext.codec_type.ToString());

                    decoderTable.Add(i, stream.codec);
                    mediaTypeTable.Add(i, codecContext.codec_type);
                }
            }

        }
Example #2
0
        private void ConvertToBitmap(ref VideoFrameType t)
        {
            var frame = avFrame;
            //FFmpeg.AVFrame final = gcnew AvFrame(PIX_FMT_BGR24, this->size);
            IntPtr final = AV.avcodec_alloc_frame();
            AV.AVFrame finalFrame = new NativeGetter<AV.AVFrame>(final).Get();

            var dst_fmt = AV.AVPixelFormat.AV_PIX_FMT_BGR24;

            int count = AV.avpicture_get_size(dst_fmt, codecCtx.width, codecCtx.height);

            IntPtr bufferArr = Marshal.AllocHGlobal(count);

            AV.avpicture_fill(final, bufferArr, dst_fmt, codecCtx.width, codecCtx.height);

            IntPtr swsContext = AV.sws_getContext(codecCtx.width, codecCtx.height, codecCtx.pix_fmt,
                codecCtx.width, codecCtx.height, dst_fmt, AV.SWS_BICUBIC, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
            if (swsContext == IntPtr.Zero)
                throw new Exception();

            finalFrame = new NativeGetter<AV.AVFrame>(final).Get();
            AV.sws_scale(swsContext, frame.data, frame.linesize, 0, codecCtx.height, finalFrame.data, finalFrame.linesize);

            new NativeSetter<AV.AVFrame>(final).Set(finalFrame);
            // Array::Reverse(bufferArr);

            byte[] buffer = new byte[count];
            Marshal.Copy(bufferArr, buffer, 0, count);
            AV.av_free(final);
            Marshal.FreeHGlobal(bufferArr);


            t.width = codecCtx.width;
            t.height = codecCtx.height;
            t.SourceFormat = codecCtx.pix_fmt;
            t.DestFormat = dst_fmt;
            t.managedData = buffer;
            t.linesize = finalFrame.linesize[0];
        }
Example #3
0
        public IAVFrame GetNext()
        {
            IAVFrame frame = null;
            IntPtr pPacket = Marshal.AllocHGlobal(Marshal.SizeOf(new AV.AVPacket()));
            if (AV.av_read_frame(rawFormatCtx, pPacket) != 0)
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return null;
            }


            AV.AVPacket packet = new NativeGetter<AV.AVPacket>(pPacket).Get();
            if (! decoderTable.ContainsKey(packet.stream_index) ||
                ! mediaTypeTable.ContainsKey(packet.stream_index))
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return null;
            }

            var codec = decoderTable[packet.stream_index];
            var type = mediaTypeTable[packet.stream_index];
            switch(type)
            {
                case AV.AVMediaType.AVMEDIA_TYPE_AUDIO:
                    frame = new AudioFrame(pPacket, codec);
                    return frame;
                case AV.AVMediaType.AVMEDIA_TYPE_VIDEO:
                    frame = new VideoFrame(pPacket, codec);
                    return frame;
                default:
                    throw new Exception("Not support media type " + type.ToString());
            }
            return null;

        }