Esempio n. 1
0
 /// <summary>Initializes a new instance of the <see cref="StreamTranscoder"/> class.</summary>
 /// <param name="inStream">The in stream.</param>
 /// <param name="config">The configuration.</param>
 /// <param name="impl">The implementation.</param>
 /// <param name="forceSystemMemory">if set to <c>true</c> [force system memory].</param>
 public StreamTranscoder(Stream inStream, TranscoderConfiguration config, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO, bool forceSystemMemory = false)
 {
     this.config        = config;
     this.inStream      = inStream;
     lowLevelTranscoder = new LowLevelTranscoderCSharp(config, impl, forceSystemMemory);
     //lowLevelTranscoder = new LowLevelTranscoderVidMemSysMem(config, impl, forceSystemMemory);
 }
Esempio n. 2
0
        /// <summary>Attempts to decode a stream using codecId as the format indicator.
        /// Only enough of the stream is decoded to return stream parameters such as width, height, etc...</summary>
        /// <param name="stream">The stream.</param>
        /// <param name="codecId">The codec identifier.</param>
        /// <param name="impl">The implementation.</param>
        /// <returns>A video parameter structure describing the bitstream.</returns>
        public static mfxVideoParam DecodeHeader(Stream stream, CodecId codecId, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO)
        {
            var buf = new byte[65536]; //avail after init
            int n   = stream.Read(buf, 0, buf.Length);

            if (n < buf.Length)
            {
                Array.Resize(ref buf, n);
            }

            return(DecodeHeader(buf, codecId, impl));
        }
Esempio n. 3
0
        /// <summary>Reads the file header information.</summary>
        /// <param name="codecId">The codec identifier.</param>
        /// <param name="impl">The implementation.</param>
        /// <param name="infs">The infs.</param>
        /// <param name="outIOPattern">The out io pattern.</param>
        /// <returns></returns>
        public static unsafe mfxVideoParam ReadFileHeaderInfo(CodecId codecId, mfxIMPL impl, Stream infs, IOPattern outIOPattern)
        {
            long oldposition = infs.Position;

            var buf = new byte[65536]; //avail after init
            int n   = infs.Read(buf, 0, buf.Length);

            if (n < buf.Length)
            {
                Array.Resize(ref buf, n);
            }

            infs.Position = oldposition;
            var decoderParameters = QuickSyncStatic.DecodeHeader(buf, codecId, impl);

            decoderParameters.IOPattern = outIOPattern;
            return(decoderParameters);
        }
Esempio n. 4
0
        //static public IEnumerable<byte[]> DecodeStream(Stream s, FourCC fourcc = FourCC.NV12, AccelerationLevel acceleration = AccelerationLevel.BestAvailableAccelerationUseGPUorCPU)
        //{
        //    return null;
        //}


        /// <summary>
        /// Construct the decoder.
        /// </summary>
        /// <param name="stream">Stream be read from</param>
        /// <param name="codecId">What format the bitstream is in: AVC, HEVC, MJPEG, ...</param>
        /// <param name="impl">implementation to use</param>
        /// <param name="outIOPattern">memory type for decoding</param>
        public StreamDecoder(Stream stream, CodecId codecId, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO, IOPattern outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY)
        {
            long oldposition = stream.Position;

            var buf = new byte[65536]; //avail after init
            int n   = stream.Read(buf, 0, buf.Length);

            if (n < buf.Length)
            {
                Array.Resize(ref buf, n);
            }

            stream.Position                  = oldposition;
            this.decoderParameters           = QuickSyncStatic.DecodeHeader(buf, codecId, impl);
            this.decoderParameters.IOPattern = outIOPattern;

            lowLevelDecoder = new LowLevelDecoder(decoderParameters, null, impl);
            Init(stream);
        }
Esempio n. 5
0
        /// <summary>
        /// generate human readable description of an mfxIMPL type
        /// </summary>
        /// <param name="impl"></param>
        public static string ImplementationString(mfxIMPL impl)
        {
            string part1 = ((mfxIMPL)((int)impl & 0xff)).ToString();

            if (part1.Contains("SOFTWARE"))
            {
                part1 = "CPU: " + part1;
            }
            if (part1.Contains("HARDWARE"))
            {
                part1 = "GPU: " + part1;
            }

            impl = (mfxIMPL)((int)impl & ~0xff);

            switch (impl)
            {
            case mfxIMPL.MFX_IMPL_VIA_ANY:
                part1 += "|VIA_ANY";
                break;

            case mfxIMPL.MFX_IMPL_VIA_D3D9:
                part1 += "|VIA_D3D9";
                break;

            case mfxIMPL.MFX_IMPL_VIA_D3D11:
                part1 += "|VIA_D3D11";
                break;

            case mfxIMPL.MFX_IMPL_VIA_VAAPI:
                part1 += "|VIA_VAAPI";
                break;

            case mfxIMPL.MFX_IMPL_AUDIO:
                part1 += "|AUDIO";
                break;

            default:
                break;
            }
            return(part1);
        }
Esempio n. 6
0
        /// <summary>Attempts to decode a byte array using codecId as the format indicator.
        /// If the array is decodable stream parameters such as width, height, etc... will be returned</summary>
        /// <param name="bitstream">The bitstream.</param>
        /// <param name="codecId">The codec identifier.</param>
        /// <param name="impl">The implementation.</param>
        /// <returns>A video parameter structure describing the bitstream.</returns>
        public static mfxVideoParam DecodeHeader(byte[] bitstream, CodecId codecId, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO)
        {
            mfxVideoParam mfxDecParam;

            mfxStatus sts;
            var       v = new mfxVersion();

            v.Major = 1;
            v.Minor = 0;

            var session = new mfxSession();

            sts = UnsafeNativeMethods.MFXInit(impl, &v, &session);
            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit");

            try
            {
                mfxBitstream bs;
                mfxDecParam.mfx.CodecId = codecId;
                mfxDecParam.IOPattern   = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY;

                fixed(byte *pp = &bitstream[0])
                {
                    // bs.Data_ptr = p;
                    bs.Data       = (IntPtr)pp;
                    bs.DataLength = (uint)bitstream.Length;
                    bs.MaxLength  = (uint)bitstream.Length;
                    bs.DataOffset = 0;

                    sts = UnsafeNativeMethods.MFXVideoDECODE_DecodeHeader(session, &bs, &mfxDecParam);
                    QuickSyncStatic.ThrowOnBadStatus(sts, "decodeheader");
                }
            }
            finally
            {
                UnsafeNativeMethods.MFXClose(session);
            }

            mfxDecParam.IOPattern = (IOPattern)0;       // we do not want this to be the source of IOPattern
                                                        // must be set it another place so it doesnt default to sysmem
            return(mfxDecParam);
        }
        unsafe public LowLevelEncoderNative(mfxVideoParam mfxEncParams, mfxIMPL impl)
        {
            mfxStatus sts;

            this._session = new mfxSession();
            var ver = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            fixed(mfxSession *s = &_session)
            sts = UnsafeNativeMethods.MFXInit(impl, &ver, s);

            QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXInit));



            h = NativeLLEncoderUnsafeNativeMethods.NativeEncoder_New();
            Trace.Assert(h != IntPtr.Zero);
            shared               = (EncoderShared *)h;
            shared->session      = _session;
            shared->mfxEncParams = mfxEncParams;
            Trace.Assert(shared->safety == sizeof(EncoderShared));



            sts = NativeLLEncoderUnsafeNativeMethods.NativeEncoder_Init(h);


            QuickSyncStatic.ThrowOnBadStatus(sts, nameof(NativeLLEncoderUnsafeNativeMethods.NativeEncoder_Init));

            frameIntPtrs = new IntPtr[shared->nEncSurfNum];
            for (int i = 0; i < frameIntPtrs.Length; i++)
            {
                frameIntPtrs[i] = (IntPtr)shared->pmfxSurfaces[i];
            }

            GetAndPrintWarnings();
        }
        /// <summary>
        /// Constructor
        /// </summary>
        public LowLevelDecoderCSharp(mfxVideoParam mfxDecParamsX,
                                     mfxVideoParam?VPPParamsX = null,
                                     mfxIMPL impl             = mfxIMPL.MFX_IMPL_AUTO)
        {
            mfxStatus sts;
            bool      enableVPP = VPPParamsX != null;


            if (VPPParamsX == null)
            {
                // Create a default VPPParamsX
                var foo = new mfxVideoParam();
                foo.AsyncDepth = 1;
                foo.IOPattern  = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY | IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY;
                foo.vpp.In     = mfxDecParamsX.mfx.FrameInfo;
                foo.vpp.Out    = mfxDecParamsX.mfx.FrameInfo;
                VPPParamsX     = foo;
            }


            mfxVideoParam VPPParams    = VPPParamsX != null ? VPPParamsX.Value : new mfxVideoParam();
            mfxVideoParam mfxDecParams = mfxDecParamsX;

            // NOTE
            // IF I am worried about interop issues with stuff moving due to GC,
            // just pin ever single blitable here
            pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces, GCHandleType.Pinned));
            pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces2, GCHandleType.Pinned));
            //pinningHandles.Add(GCHandle.Alloc(struct1, GCHandleType.Pinned));
            //pinningHandles.Add(GCHandle.Alloc(struct1, GCHandleType.Pinned));


            this.videoParam = mfxDecParams;
            this.enableVPP  = enableVPP;



            session = new mfxSession();
            var ver = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            fixed(mfxSession *s = &session)
            sts = UnsafeNativeMethods.MFXInit(impl, &ver, s);

            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit");



            bool decVideoMemOut = (mfxDecParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY) != 0;
            bool vppVideoMemIn  = (VPPParams.IOPattern & IOPattern.MFX_IOPATTERN_IN_VIDEO_MEMORY) != 0;
            bool vppVideoMemOut = (VPPParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY) != 0;

            Trace.Assert(!enableVPP || decVideoMemOut == vppVideoMemIn, "When the VPP is enabled, the memory type from DEC into VPP must be of same type");



            if (vppVideoMemIn || vppVideoMemOut)
            {
                //if you want to use video memory, you need to have a way to allocate the Direct3D or Vaapi frames
                videoAccelerationSupport = new VideoAccelerationSupport(session);
            }

            fixed(mfxFrameAllocRequest *p = &DecRequest)
            sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, p);

            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "DECODE_QueryIOSurf");


            if (enableVPP)
            {
                fixed(mfxFrameAllocRequest *p = &VPPRequest[0])
                sts = UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf(session, &VPPParams, p);

                if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
                {
                    sts = 0;
                }
                QuickSyncStatic.ThrowOnBadStatus(sts, "VPP_QueryIOSurf");


                VPPRequest[1].Type |= FrameMemoryType.WILL_READ;
            }



            //mfxU16 nSurfNumDecVPP = DecRequest.NumFrameSuggested + VPPRequest[0].NumFrameSuggested;
            //mfxU16 nSurfNumVPPOut = VPPRequest[1].NumFrameSuggested;

            int nSurfNumVPPOut = 0;

            var numSurfaces = DecRequest.NumFrameSuggested + VPPRequest[0].NumFrameSuggested + VPPParams.AsyncDepth;

            if (enableVPP)
            {
                nSurfNumVPPOut = 0 + VPPRequest[1].NumFrameSuggested + VPPParams.AsyncDepth;
            }



            bitstreamBuffer      = Marshal.AllocHGlobal(defaultBitstreamBufferSize);
            bitstream.Data       = bitstreamBuffer;
            bitstream.DataLength = 0;
            bitstream.MaxLength  = (uint)defaultBitstreamBufferSize;
            bitstream.DataOffset = 0;


            //mfxFrameAllocRequest DecRequest;
            //sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, &DecRequest);
            //if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) sts = 0;
            //Trace.Assert(sts == mfxStatus.MFX_ERR_NONE);



            //allocate decoder frames via directx
            mfxFrameAllocResponse DecResponse = new mfxFrameAllocResponse();

            if (decVideoMemOut)
            {
                DecRequest.NumFrameMin = DecRequest.NumFrameSuggested = (ushort)numSurfaces;

                fixed(mfxFrameAllocRequest *p = &DecRequest)
                videoAccelerationSupport.AllocFrames(p, &DecResponse);
            }


            //allocate vpp frames via directx
            mfxFrameAllocResponse EncResponse = new mfxFrameAllocResponse();

            if (vppVideoMemOut)
            {
                VPPRequest[1].NumFrameMin = VPPRequest[1].NumFrameSuggested = (ushort)nSurfNumVPPOut;

                fixed(mfxFrameAllocRequest *p = &VPPRequest[1])
                videoAccelerationSupport.AllocFrames(p, &EncResponse);
            }



            // Allocate surfaces for decoder
            // - Width and height of buffer must be aligned, a multiple of 32
            // - Frame surface array keeps pointers all surface planes and general frame info
            UInt16 width        = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Width);
            UInt16 height       = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Height);
            int    bitsPerPixel = VideoUtility.GetBitsPerPixel(mfxDecParams.mfx.FrameInfo.FourCC);
            int    surfaceSize  = width * height * bitsPerPixel / 8;

            //byte[] surfaceBuffers = new byte[surfaceSize * numSurfaces]; //XXX

            if (!decVideoMemOut)
            {
                surfaceBuffers = Marshal.AllocHGlobal(surfaceSize * numSurfaces);
            }



            //         // Allocate surface headers (mfxFrameSurface1) for decoder
            pmfxSurfaces = new mfxFrameSurface1[numSurfaces];
            pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces, GCHandleType.Pinned));

            //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC);
            for (int i = 0; i < numSurfaces; i++)
            {
                pmfxSurfaces[i]      = new mfxFrameSurface1();
                pmfxSurfaces[i].Info = mfxDecParams.mfx.FrameInfo;
                if (!decVideoMemOut)
                {
                    switch (mfxDecParams.mfx.FrameInfo.FourCC)
                    {
                    case FourCC.NV12:
                        pmfxSurfaces[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize;
                        pmfxSurfaces[i].Data.U_ptr = pmfxSurfaces[i].Data.Y_ptr + width * height;
                        pmfxSurfaces[i].Data.V_ptr = pmfxSurfaces[i].Data.U_ptr + 1;
                        pmfxSurfaces[i].Data.Pitch = width;
                        break;

                    case FourCC.YUY2:
                        pmfxSurfaces[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize;
                        pmfxSurfaces[i].Data.U_ptr = pmfxSurfaces[i].Data.Y_ptr + 1;
                        pmfxSurfaces[i].Data.V_ptr = pmfxSurfaces[i].Data.U_ptr + 3;
                        pmfxSurfaces[i].Data.Pitch = (ushort)(width * 2);
                        break;

                    default:      //find sysmem_allocator.cpp for more help
                        throw new NotImplementedException();
                    }
                }
                else
                {
                    pmfxSurfaces[i].Data.MemId = DecResponse.mids_ptr[i];   // MID (memory id) represent one D3D NV12 surface
                }
            }



            if (enableVPP)
            {
                UInt16 width2        = (UInt16)QuickSyncStatic.ALIGN32(VPPRequest[1].Info.CropW);
                UInt16 height2       = (UInt16)QuickSyncStatic.ALIGN32(VPPRequest[1].Info.CropH);
                int    bitsPerPixel2 = VideoUtility.GetBitsPerPixel(VPPParams.vpp.Out.FourCC);     // NV12 format is a 12 bits per pixel format
                int    surfaceSize2  = width2 * height2 * bitsPerPixel2 / 8;
                int    pitch2        = width2 * bitsPerPixel2 / 8;

                if (!vppVideoMemOut)
                {
                    surfaceBuffers2 = Marshal.AllocHGlobal(surfaceSize2 * nSurfNumVPPOut);
                }

                pmfxSurfaces2 = new mfxFrameSurface1[nSurfNumVPPOut];
                pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces2, GCHandleType.Pinned));
                //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC);
                for (int i = 0; i < nSurfNumVPPOut; i++)
                {
                    pmfxSurfaces2[i]      = new mfxFrameSurface1();
                    pmfxSurfaces2[i].Info = VPPParams.vpp.Out;

                    if (!vppVideoMemOut)
                    {
                        pmfxSurfaces2[i].Data.Pitch = (ushort)pitch2;
                        switch (VPPParams.vpp.Out.FourCC)
                        {
                        case FourCC.NV12:
                            pmfxSurfaces2[i].Data.Y_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2;
                            pmfxSurfaces2[i].Data.U_ptr = pmfxSurfaces2[i].Data.Y_ptr + width * height;
                            pmfxSurfaces2[i].Data.V_ptr = pmfxSurfaces2[i].Data.U_ptr + 1;
                            break;

                        case FourCC.RGB4:
                            pmfxSurfaces2[i].Data.B_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2;
                            pmfxSurfaces2[i].Data.G_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2 + 1;
                            pmfxSurfaces2[i].Data.R_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2 + 2;
                            // pmfxSurfaces2[i].Data.A_ptr = (byte*)surfaceBuffers2 + i * surfaceSize2+3;
                            //   pmfxSurfaces2[i].Data. = pmfxSurfaces2[i].Data.Y_ptr + width * height;
                            //  pmfxSurfaces2[i].Data.V_ptr = pmfxSurfaces2[i].Data.U_ptr + 1;
                            break;

                        default:
                            break;
                        }
                    }
                    else
                    {
                        pmfxSurfaces2[i].Data.MemId = EncResponse.mids_ptr[i];   // MID (memory id) represent one D3D NV12 surface
                    }
                }
            }



            sts = UnsafeNativeMethods.MFXVideoDECODE_Init(session, &mfxDecParams);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXVideoDECODE_Init");


            if (enableVPP)
            {
                sts = UnsafeNativeMethods.MFXVideoVPP_Init(session, &VPPParams);
                if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
                {
                    sts = 0;
                }
                QuickSyncStatic.ThrowOnBadStatus(sts, "MFXVideoVPP_Init");
            }
        }
Esempio n. 9
0
 /// <summary>Initializes a new instance of the <see cref="StreamDecoder"/> class.
 /// Fully specify decode params, and optionally VPP params</summary>
 /// <param name="stream">The stream.</param>
 /// <param name="decodeParameters">The decode parameters.</param>
 /// <param name="mfxVPPParams">The MFX VPP parameters.</param>
 /// <param name="impl">The implementation.</param>
 public StreamDecoder(Stream stream, mfxVideoParam decodeParameters, mfxVideoParam?mfxVPPParams = null, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO)
 {
     this.decoderParameters = decodeParameters;
     lowLevelDecoder        = new LowLevelDecoder(decodeParameters, mfxVPPParams, impl);
     Init(stream);
 }
Esempio n. 10
0
        /// <summary>Initializes a new instance of the <see cref="LowLevelTranscoderCSharp"/> class.</summary>
        /// <param name="config">The configuration.</param>
        /// <param name="impl">The implementation.</param>
        /// <param name="forceSystemMemory">if set to <c>true</c> [force system memory].</param>
        public LowLevelTranscoderCSharp(TranscoderConfiguration config, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO, bool forceSystemMemory = false)
        {
            mfxStatus sts;

            mfxVideoParam mfxDecParams = config.decParams;
            mfxVideoParam mfxVPPParams = config.vppParams;
            mfxVideoParam mfxEncParams = config.encParams;


            session = new mfxSession();
            var ver = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            fixed(mfxSession *s = &session)
            sts = UnsafeNativeMethods.MFXInit(impl, &ver, s);

            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit");
            //deviceSetup = new DeviceSetup(session, forceSystemMemory);



            //  mfxVideoParam mfxDecParams = new mfxVideoParam();
            //  mfxDecParams.mfx.CodecId = CodecId.MFX_CODEC_AVC;



            int bufsize = (int)1e6;

            mfxBS             = (mfxBitstream *)MyAllocHGlobalAndZero(sizeof(mfxBitstream));
            mfxBS->Data       = MyAllocHGlobalAndZero(bufsize);
            mfxBS->DataLength = (uint)0;
            mfxBS->MaxLength  = (uint)bufsize;
            mfxBS->DataOffset = 0;


            int outwidth  = mfxDecParams.mfx.FrameInfo.CropW;
            int outheight = mfxDecParams.mfx.FrameInfo.CropH;



            // Query number of required surfaces for VPP
            //mfxFrameAllocRequest[] VPPRequest = new mfxFrameAllocRequest[2];     // [0] - in, [1] - out
            TwoMfxFrameAllocRequest VPPRequest;

            sts = UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf(session, &mfxVPPParams, (mfxFrameAllocRequest *)&VPPRequest);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "vpp.queryiosurf");



            // Query number required surfaces for dec
            mfxFrameAllocRequest DecRequest;

            sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, &DecRequest);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf));


            // Query number of required surfaces for enc
            mfxFrameAllocRequest EncRequest = new mfxFrameAllocRequest();

            sts = UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf(session, &mfxEncParams, &EncRequest);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf));



            // Determine the required number of surfaces for decoder output (VPP input) and for VPP output (encoder input)
            nSurfNumDecVPP = DecRequest.NumFrameSuggested + VPPRequest.In.NumFrameSuggested + mfxVPPParams.AsyncDepth;
            nSurfNumVPPEnc = EncRequest.NumFrameSuggested + VPPRequest.Out.NumFrameSuggested + mfxVPPParams.AsyncDepth;



            {
                Trace.Assert((mfxEncParams.IOPattern & IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY) != 0);
                Trace.Assert((mfxDecParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY) != 0);

                UInt16 width        = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Width);
                UInt16 height       = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Height);
                int    bitsPerPixel = 12;
                int    surfaceSize  = width * height * bitsPerPixel / 8;

                var decVppSurfaceBuffers = Marshal.AllocHGlobal(surfaceSize * nSurfNumDecVPP);
                var vppEncSurfaceBuffers = Marshal.AllocHGlobal(surfaceSize * nSurfNumVPPEnc);

                pSurfaces =
                    (mfxFrameSurface1 *)MyAllocHGlobalAndZero(sizeof(mfxFrameSurface1) * nSurfNumDecVPP);

                pSurfaces2 =
                    (mfxFrameSurface1 *)MyAllocHGlobalAndZero(sizeof(mfxFrameSurface1) * nSurfNumVPPEnc);

                for (int i = 0; i < nSurfNumDecVPP; i++)
                {
                    pSurfaces[i]            = new mfxFrameSurface1();
                    pSurfaces[i].Info       = DecRequest.Info;
                    pSurfaces[i].Data.Y_ptr = (byte *)decVppSurfaceBuffers + i * surfaceSize;
                    pSurfaces[i].Data.U_ptr = pSurfaces[i].Data.Y_ptr + width * height;
                    pSurfaces[i].Data.V_ptr = pSurfaces[i].Data.U_ptr + 1;
                    pSurfaces[i].Data.Pitch = width;
                }
                for (int i = 0; i < nSurfNumVPPEnc; i++)
                {
                    pSurfaces2[i]            = new mfxFrameSurface1();
                    pSurfaces2[i].Info       = EncRequest.Info;
                    pSurfaces2[i].Data.Y_ptr = (byte *)vppEncSurfaceBuffers + i * surfaceSize;
                    pSurfaces2[i].Data.U_ptr = pSurfaces2[i].Data.Y_ptr + width * height;
                    pSurfaces2[i].Data.V_ptr = pSurfaces2[i].Data.U_ptr + 1;
                    pSurfaces2[i].Data.Pitch = width;
                }
            }



            sts = UnsafeNativeMethods.MFXVideoDECODE_Init(session, &mfxDecParams);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoDECODE_Init), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "decode.init");

            sts = UnsafeNativeMethods.MFXVideoENCODE_Init(session, &mfxEncParams);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Init), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "encode.init");

            sts = UnsafeNativeMethods.MFXVideoVPP_Init(session, &mfxVPPParams);
            if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoVPP_Init), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "vpp.init");



            //mfxExtVPPDoNotUse zz;
            //zz.Header.BufferId = BufferId.MFX_EXTBUFF_VPP_DONOTUSE;
            //zz.Header.BufferSz = (uint)sizeof(mfxExtVPPDoUse);
            //mfxExtBuffer** pExtParamsVPPx = stackalloc mfxExtBuffer*[1];
            //pExtParamsVPPx[0] = (mfxExtBuffer*)&zz;
            //var t1 = stackalloc uint[100];
            //zz.AlgList = t1;
            //zz.NumAlg = 100;
            //mfxVideoParam par;
            //par.ExtParam = pExtParamsVPPx;
            //par.NumExtParam = 1;
            //sts = UnsafeNativeMethods.MFXVideoVPP_GetVideoParam(session, &par);
            //Trace.Assert(sts == mfxStatus.MFX_ERR_NONE);
            //Console.WriteLine(zz.NumAlg);
            //for (int i = 0; i < 10; i++)
            //{
            //    Console.WriteLine((BufferId)t1[i]);
            //}
            mfxVideoParam par;



            // Retrieve video parameters selected by encoder.
            // - BufferSizeInKB parameter is required to set bit stream buffer size
            par = new mfxVideoParam();
            sts = UnsafeNativeMethods.MFXVideoENCODE_GetVideoParam(session, &par);
            QuickSyncStatic.ThrowOnBadStatus(sts, "enc.getvideoparams");



            // Create task pool to improve asynchronous performance (greater GPU utilization)

            taskPoolSize = mfxEncParams.AsyncDepth;  // number of tasks that can be submitted, before synchronizing is required
                                                     //  Task* pTasks = stackalloc Task[taskPoolSize];
            pTasks = (Task *)MyAllocHGlobalAndZero(sizeof(Task) * taskPoolSize);
            // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned);
            for (int i = 0; i < taskPoolSize; i++)
            {
                // Prepare Media SDK bit stream buffer
                pTasks[i].mfxBS.MaxLength = (uint)(par.mfx.BufferSizeInKB * 1000);
                pTasks[i].mfxBS.Data      = MyAllocHGlobalAndZero((int)pTasks[i].mfxBS.MaxLength);
                Trace.Assert(pTasks[i].mfxBS.Data != IntPtr.Zero);
            }

            // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned);
        }
Esempio n. 11
0
        public unsafe VideoAccelerationSupport(mfxSession session, bool forceSystemMemory = false)
        {
            mfxStatus  sts;
            mfxVersion versionMinimum = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            acceleratorHandle = VideoAccelerationSupportPInvoke.VideoAccelerationSupport_New();
            Trace.Assert(acceleratorHandle != IntPtr.Zero);

            if (sizeof(IntPtr) != 8)
            {
                throw new Exception("only x64 supported at this time");
            }

            mfxIMPL ii;

            sts = UnsafeNativeMethods.MFXQueryIMPL(session, &ii);
            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXQueryIMPL");

            //  if (Environment.OSVersion.Platform == PlatformID.Win32NT)


            mfxIMPL viaMask = (mfxIMPL.MFX_IMPL_VIA_D3D9 | mfxIMPL.MFX_IMPL_VIA_D3D11 | mfxIMPL.MFX_IMPL_VIA_VAAPI);

            if ((ii & viaMask) == mfxIMPL.MFX_IMPL_VIA_D3D11)
            {
                isDirectX11 = true;
                memType     = FrameMemType.D3D11_MEMORY;
            }
            else if ((ii & viaMask) == mfxIMPL.MFX_IMPL_VIA_D3D9)
            {
                memType = FrameMemType.D3D9_MEMORY;
            }
            else if ((ii & viaMask) == mfxIMPL.MFX_IMPL_VIA_VAAPI)
            {
                memType = FrameMemType.VAAPI_MEMORY;
            }



            //if (Environment.OSVersion.Platform == PlatformID.Win32NT)
            //{
            //    if (Environment.OSVersion.Version.Major >= 6 && Environment.OSVersion.Version.Minor >= 2)
            //        memType = MemType.D3D11_MEMORY;
            //    else
            //        memType = MemType.D3D9_MEMORY;
            //}
            //else
            //{
            //    memType = MemType.VAAPI_MEMORY;
            //}



            if (forceSystemMemory)
            {
                memType = FrameMemType.SYSTEM_MEMORY;
            }

            sts = VideoAccelerationSupportPInvoke.VideoAccelerationSupport_Init(acceleratorHandle, session, false, memType);
            QuickSyncStatic.ThrowOnBadStatus(sts, "VideoAccelerationSupport_Init");
        }
Esempio n. 12
0
        unsafe static void Main(string[] args)
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");


            CodecId codecId = CodecId.MFX_CODEC_JPEG;
            FourCC  fourcc  = FourCC.UYVY;  // supported: RGB4, YUY2 NV12 [UYVY through tricks! see below]
            mfxIMPL impl    = mfxIMPL.MFX_IMPL_AUTO;



            string fourccString = fourcc.ToString().Substring(0, 4);
            string inFilename;

            //inFilename = "BigBuckBunny_320x180.UYVY.enc.jpeg";
            inFilename = "BigBuckBunny_1920x1080.UYVY.enc.jpeg";
            //inFilename = "BigBuckBunny_3840x2160.UYVY.enc.jpeg";
            string outFilename = Path.ChangeExtension(inFilename, ".yuv");

            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine();

            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found. Press any key to exit.");
                Console.ReadKey();
                return;
            }


            Stream         infs, outfs;
            BenchmarkTimer bt = null;


#if !ENABLE_BENCHMARK
            infs  = File.Open(inFilename, FileMode.Open);
            outfs = File.Open(outFilename, FileMode.Create);
#else       // delete this code for most simple example
            // * Benchmark Mode *
            // this block does a couple things:
            //   1. causes the file to be pre-read into memory so we are not timing disk reads.
            //   2. replaces the output stream with a NullStream so nothing gets written to disk.
            //   3. Starts the timer for benchmarking
            // this pre-reads file into memory for benchmarking
            long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024;
            Console.WriteLine("Pre-reading input");
            infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate);



            Console.WriteLine("Input read");

            outfs = new NullStream();
            bt    = new BenchmarkTimer();
            bt.Start();

            int minimumFrames = 4000;
#endif



            Console.WriteLine("Output filename: {0}",
                              Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT"));
            Console.WriteLine();

            var outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY;


            // The encoder cannot encode UYVY, but if you are the only decoder of the JPEG
            // files, you can encode UYVY as YUY2 and everything is good.
            if (fourcc == FourCC.UYVY)
            {
                fourcc = FourCC.YUY2;
            }

            mfxVideoParam decoderParameters = QuickSyncStatic.ReadFileHeaderInfo(codecId, impl, infs, outIOPattern);
            decoderParameters.mfx.FrameInfo.FourCC = fourcc;

            AssignChromaFormat(fourcc, ref decoderParameters);


            var decoder = new StreamDecoder(infs, decoderParameters, null, impl);

#if ENABLE_BENCHMARK     // delete this code for most simple example
            decoder.benchmarkNeverStopMode = true;
#endif

            string impltext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.session);
            Console.WriteLine("Implementation = {0}", impltext);

            // not needed
            //var formatConverter = new NV12ToXXXXConverter(fourcc, decoder.width, decoder.height);

            int width  = decoderParameters.mfx.FrameInfo.CropW;
            int height = decoderParameters.mfx.FrameInfo.CropH;
            var tmpbuf = new byte[width * height * 2];

            int count = 0;

            foreach (var frame in decoder.GetFrames())
            {
                //var frameBytes = formatConverter.ConvertFromNV12(frame.Data);        // Convert to format requested


                Trace.Assert(frame.Data.Pitch == width * 2);  // yuy2 only

                fixed(byte *aa = &tmpbuf[0])
                FastMemcpyMemmove.memcpy((IntPtr)aa, frame.Data.Y, height * width * 2);

                outfs.Write(tmpbuf, 0, tmpbuf.Length);

                if (++count % 100 == 0)
                {
                    Console.Write("Frame {0}\r", count);
                }

#if ENABLE_BENCHMARK     // delete this code for most simple example
                if (count > minimumFrames)
                {
                    break;
                }
#endif
            }

            if (bt != null)
            {
                bt.StopAndReport(count, infs.Position, outfs.Position);
            }

            infs.Close();
            outfs.Close();

            Console.WriteLine("Decoded {0} frames", count);

            // make sure program always waits for user, except F5-Release run
            if (Debugger.IsAttached ||
                Environment.GetEnvironmentVariable("VisualStudioVersion") == null)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
Esempio n. 13
0
        static void Main(string[] args)
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");


            int     width, height;
            string  inFilename;
            mfxIMPL impl   = mfxIMPL.MFX_IMPL_AUTO;
            FourCC  fourcc = FourCC.NV12;   // supported: RGB3 RGB4 BGR4 BGR3 NV12 I420 IYUV YUY2 UYVY YV12 P411 P422

            inFilename = "BigBuckBunny_320x180." + fourcc + ".yuv"; width = 320; height = 180;
            //inFilename = "BigBuckBunny_1920x1080." + fourcc + ".yuv"; width = 1920; height = 1080;


            string outFilename = Path.ChangeExtension(inFilename, "enc.264");


            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine("Input width: {0}  Input height: {1}", width, height);
            Console.WriteLine();

            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found.");
                Console.WriteLine("Please let Decoder1 run to completion to create input file");
                Console.WriteLine("Press any key to exit.");
                Console.ReadKey();
                return;
            }


            Stream         infs, outfs;
            BenchmarkTimer bt = null;


#if !ENABLE_BENCHMARK
            infs  = File.Open(inFilename, FileMode.Open);
            outfs = File.Open(outFilename, FileMode.Create);
#else       // delete this code for most simple example
            // * Benchmark Mode *
            // this block does a couple things:
            //   1. causes the file to be pre-read into memory so we are not timing disk reads.
            //   2. replaces the output stream with a NullStream so nothing gets written to disk.
            //   3. Starts the timer for benchmarking
            // this pre-reads file into memory for benchmarking
            long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024;
            Console.WriteLine("Pre-reading input");
            infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate);
            Console.WriteLine("Input read");

            outfs = new NullStream();
            bt    = new BenchmarkTimer();
            bt.Start();

            int minimumFrames = 4000;
#endif
            Console.WriteLine("Output filename: {0}",
                              Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT"));
            Console.WriteLine();


            mfxVideoParam mfxEncParams = new mfxVideoParam();
            mfxEncParams.mfx.CodecId                 = CodecId.MFX_CODEC_AVC;
            mfxEncParams.mfx.TargetUsage             = TargetUsage.MFX_TARGETUSAGE_BALANCED;
            mfxEncParams.mfx.TargetKbps              = 2000;
            mfxEncParams.mfx.RateControlMethod       = RateControlMethod.MFX_RATECONTROL_VBR;
            mfxEncParams.mfx.FrameInfo.FrameRateExtN = 30;
            mfxEncParams.mfx.FrameInfo.FrameRateExtD = 1;
            mfxEncParams.mfx.FrameInfo.FourCC        = FourCC.NV12;
            mfxEncParams.mfx.FrameInfo.ChromaFormat  = ChromaFormat.MFX_CHROMAFORMAT_YUV420;
            mfxEncParams.mfx.FrameInfo.PicStruct     = PicStruct.MFX_PICSTRUCT_PROGRESSIVE;
            mfxEncParams.mfx.FrameInfo.CropX         = 0;
            mfxEncParams.mfx.FrameInfo.CropY         = 0;
            mfxEncParams.mfx.FrameInfo.CropW         = (ushort)width;
            mfxEncParams.mfx.FrameInfo.CropH         = (ushort)height;
            // Width must be a multiple of 16
            // Height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture
            mfxEncParams.mfx.FrameInfo.Width  = QuickSyncStatic.ALIGN16(width);
            mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.AlignHeightTo32or16(height, mfxEncParams.mfx.FrameInfo.PicStruct);
            mfxEncParams.IOPattern            = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; // must be 'in system memory'
            mfxEncParams.AsyncDepth           = 4;                                        // Pipeline depth. Best at 4


            BitStreamChunk bsc = new BitStreamChunk(); //where we receive compressed frame data

            //var encoder = new LowLevelEncoder2(mfxEncParams, impl);
            ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl);


            string impltext = QuickSyncStatic.ImplementationString(encoder.session);
            Console.WriteLine("Implementation = {0}", impltext);
            //string memtext = QuickSyncStatic.ImplementationString(encoder.deviceSetup.memType);
            //Console.WriteLine("Memory type = {0}", memtext);

            var formatConverter = new NV12FromXXXXConverter(fourcc, width, height);


            int inputFrameLength = width * height * VideoUtility.GetBitsPerPixel(fourcc) / 8;

            byte[] uncompressed = new byte[inputFrameLength];

            int count = 0;

            while (infs.Read(uncompressed, 0, inputFrameLength) == inputFrameLength)
            {
                int ix = encoder.GetFreeFrameIndex();  //get index of free surface

                formatConverter.ConvertToNV12FrameSurface(ref encoder.Frames[ix], uncompressed, 0);

                encoder.EncodeFrame(ix, ref bsc);

                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);

                    if (++count % 100 == 0)
                    {
                        Console.Write("Frame {0}\r", count);
                    }
                }

#if ENABLE_BENCHMARK     // delete this code for most simple example
                if (infs.Position + inputFrameLength - 1 >= infs.Length)
                {
                    infs.Position = 0;
                }
                if (count >= minimumFrames)
                {
                    break;
                }
#endif
            }



            while (encoder.Flush(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);

                    if (++count % 100 == 0)
                    {
                        Console.Write("Frame {0}\r", count);
                    }
                }
            }

            if (bt != null)
            {
                bt.StopAndReport(count, infs.Position, outfs.Position);
            }

            infs.Close();
            outfs.Close();

            encoder.Dispose();

            Console.WriteLine("Encoded {0} frames", count);

            if (Debugger.IsAttached)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
Esempio n. 14
0
        /// <summary>Builds the transcoder configuration from stream.</summary>
        /// <param name="inStream">The in stream.</param>
        /// <param name="inputCodecId">The input codec identifier.</param>
        /// <param name="outputCodecId">The output codec identifier.</param>
        /// <param name="implementation">The implementation.</param>
        /// <param name="useOpaqueSurfaces">if set to <c>true</c> [use opaque surfaces].</param>
        /// <returns></returns>
        public static TranscoderConfiguration BuildTranscoderConfigurationFromStream(Stream inStream, CodecId inputCodecId, CodecId outputCodecId, mfxIMPL implementation = mfxIMPL.MFX_IMPL_AUTO, bool useOpaqueSurfaces = true)
        {
            TranscoderConfiguration config = new TranscoderConfiguration();

            long oldposition = inStream.Position;

            config.decParams  = QuickSyncStatic.DecodeHeader(inStream, inputCodecId, implementation);
            inStream.Position = oldposition;

            //config.decParams.mfx.CodecId  was set in last function
            //config.encParams.mfx.CodecId  will get set below in a func

            int width  = config.decParams.mfx.FrameInfo.CropW;
            int height = config.decParams.mfx.FrameInfo.CropH;

            config.vppParams = TranscoderSetupVPPParameters(width, height);
            config.encParams = TranscoderSetupEncoderParameters(width, height, outputCodecId);


            config.decParams.IOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
            config.vppParams.IOPattern = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY | IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
            config.encParams.IOPattern = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY;

            // Configure Media SDK to keep more operations in flight
            // - AsyncDepth represents the number of tasks that can be submitted, before synchronizing is required
            ushort asyncdepth = 4;

            config.decParams.AsyncDepth = asyncdepth;
            config.encParams.AsyncDepth = asyncdepth;
            config.vppParams.AsyncDepth = asyncdepth;

            return(config);
        }
Esempio n. 15
0
        static public void Main()
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");

            mfxIMPL impl    = mfxIMPL.MFX_IMPL_AUTO; //automatic GPU/CPU mode
            CodecId codecId = CodecId.MFX_CODEC_AVC;
            // avc fourcc supported: RGB3 RGB4 BGR4 BGR3 NV12 I420 IYUV YUY2 UYVY YV12 P411 P422
            FourCC fourcc       = FourCC.NV12;
            string fourccString = fourcc.ToString().Substring(0, 4);

            string inFilename;

            inFilename = "BigBuckBunny_320x180.264";
            //inFilename = "BigBuckBunny_1920x1080.264";
            //inFilename = "BigBuckBunny_3840x2160.264";
            string outFilename = Path.ChangeExtension(inFilename, fourccString + ".yuv");

            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine();

            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found. Press any key to exit.");
                Console.ReadKey();
                return;
            }


            Stream         infs, outfs;
            BenchmarkTimer bt = null;


#if !ENABLE_BENCHMARK
            infs  = File.Open(inFilename, FileMode.Open);
            outfs = File.Open(outFilename, FileMode.Create);
#else       // delete this code for most simple example
            // * Benchmark Mode *
            // this block does a couple things:
            //   1. causes the file to be pre-read into memory so we are not timing disk reads.
            //   2. replaces the output stream with a NullStream so nothing gets written to disk.
            //   3. Starts the timer for benchmarking
            // this pre-reads file into memory for benchmarking
            // maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024;
            Console.WriteLine("Pre-reading input");
            infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open));
            Console.WriteLine("Input read");

            outfs = new NullStream();
            bt    = new BenchmarkTimer();
            bt.Start();

            //int minimumFrames = 4000;
#endif

            Console.WriteLine("Output filename: {0}",
                              Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT"));
            Console.WriteLine();

            var outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY;

            mfxVideoParam decoderParameters = QuickSyncStatic.ReadFileHeaderInfo(codecId, impl, infs, outIOPattern);
            decoderParameters.mfx.FrameInfo.FourCC = fourcc;

            var decoder = new StreamDecoder(infs, CodecId.MFX_CODEC_AVC, impl, outIOPattern);

            string impltext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.session);
            Console.WriteLine("Implementation = {0}", impltext);



            var formatConverter = new NV12ToXXXXConverter(fourcc, decoder.width, decoder.height);



            int count = 0;



            foreach (var frame in decoder.GetFrames())
            {
                var frameBytes = formatConverter.ConvertFromNV12(frame.Data);        // Convert to format requested
                outfs.Write(frameBytes, 0, frameBytes.Length);



                if (++count % 100 == 0)
                {
                    Console.Write("Frame {0}\r", count);
                }
            }
            Console.WriteLine("Decoded {0} frames", count);
            Console.WriteLine();

            if (bt != null)
            {
                bt.StopAndReport(count, infs.Position, outfs.Position);
            }

            infs.Close();
            outfs.Close();



            // make sure program always waits for user, except F5-Release run
            if (!UnitTest.IsRunning && Debugger.IsAttached ||
                Environment.GetEnvironmentVariable("VisualStudioVersion") == null)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
Esempio n. 16
0
        unsafe static void Main(string[] args)
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");


            CodecId codecId = CodecId.MFX_CODEC_JPEG;
            FourCC  fourcc  = FourCC.UYVY;  // supported: RGB4, YUY2 NV12 [UYVY through tricks! see below]
            mfxIMPL impl    = mfxIMPL.MFX_IMPL_AUTO;


            int    width, height;
            string inFilename;

            //inFilename = "BigBuckBunny_320x180." + fourcc + ".yuv"; width = 320; height = 180;
            inFilename = "BigBuckBunny_1920x1080." + fourcc + ".yuv"; width = 1920; height = 1080;
            string outFilename = Path.ChangeExtension(inFilename, "enc.jpeg");


            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine("Input width: {0}  Input height: {1}", width, height);


            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found.");
                Console.WriteLine("Please let Decoder1 run to completion to create input file");
                Console.WriteLine("Press any key to exit.");
                Console.ReadKey();
                return;
            }


            Stream         infs, outfs;
            BenchmarkTimer bt = null;


#if !ENABLE_BENCHMARK
            infs  = File.Open(inFilename, FileMode.Open);
            outfs = File.Open(outFilename, FileMode.Create);
#else       // delete this code for most simple example
            // * Benchmark Mode *
            // this block does a couple things:
            //   1. causes the file to be pre-read into memory so we are not timing disk reads.
            //   2. replaces the output stream with a NullStream so nothing gets written to disk.
            //   3. Starts the timer for benchmarking
            // this pre-reads file into memory for benchmarking
            long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024;
            Console.WriteLine("Pre-reading input");
            infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate);
            Console.WriteLine("Input read");

            outfs = new NullStream();
            bt    = new BenchmarkTimer();
            bt.Start();

            int minimumFrames = 4000;
#endif

            Console.WriteLine("Output filename: {0}",
                              Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT"));
            Console.WriteLine();

            // The encoder cannot encode UYVY, but if you are the only decoder of the JPEG
            // files, you can encode UYVY as YUY2 and everything is good.
            if (fourcc == FourCC.UYVY)
            {
                fourcc = FourCC.YUY2;
            }


            mfxVideoParam mfxEncParams = new mfxVideoParam();
            mfxEncParams.mfx.CodecId     = codecId;
            mfxEncParams.mfx.TargetUsage = TargetUsage.MFX_TARGETUSAGE_BALANCED;
            //mfxEncParams.mfx.TargetKbps = 2000;
            //mfxEncParams.mfx.RateControlMethod = RateControlMethod.MFX_RATECONTROL_VBR;
            mfxEncParams.mfx.Quality                 = 90;
            mfxEncParams.mfx.Interleaved             = 1;
            mfxEncParams.mfx.FrameInfo.FrameRateExtN = 30;
            mfxEncParams.mfx.FrameInfo.FrameRateExtD = 1;
            mfxEncParams.mfx.FrameInfo.FourCC        = fourcc;


            switch (fourcc)
            {
            case FourCC.NV12:
            case FourCC.YV12:
                mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420;
                break;

            case FourCC.YUY2:
                mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV422V;     // fatal on SKYLAKE!
                mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV422;
                break;

            case FourCC.RGB4:
                mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV444;
                break;

            default:
                Trace.Assert(false);
                break;
            }


            mfxEncParams.mfx.FrameInfo.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE;
            mfxEncParams.mfx.FrameInfo.CropX     = 0;
            mfxEncParams.mfx.FrameInfo.CropY     = 0;
            mfxEncParams.mfx.FrameInfo.CropW     = (ushort)width;
            mfxEncParams.mfx.FrameInfo.CropH     = (ushort)height;
            // Width must be a multiple of 16
            // Height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture
            mfxEncParams.mfx.FrameInfo.Width  = QuickSyncStatic.ALIGN16(width);
            mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.AlignHeightTo32or16(height, mfxEncParams.mfx.FrameInfo.PicStruct);
            mfxEncParams.IOPattern            = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; // must be 'in system memory'
            mfxEncParams.AsyncDepth           = 4;                                        // Pipeline depth. Best at 4


            mfxEncParams.mfx.FrameInfo.Width  = QuickSyncStatic.ALIGN32(width);
            mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.ALIGN32(height);


            BitStreamChunk bsc = new BitStreamChunk(); //where we receive compressed frame data

            ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl);
            //ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl);


            string impltext = QuickSyncStatic.ImplementationString(encoder.session);
            Console.WriteLine("Implementation = {0}", impltext);


            // not needed for YUY2 encoding
            //var formatConverter = new NV12FromXXXXConverter(fileFourcc, width, height);


            int inputFrameLength = width * height * VideoUtility.GetBitsPerPixel(fourcc) / 8;

            byte[] uncompressed = new byte[inputFrameLength];

            int count = 0;

            // we do not call encoder.LockFrame() and encoder.UnlockFrame() as this example is
            // for system memory.


            while (infs.Read(uncompressed, 0, inputFrameLength) == inputFrameLength)
            {
                int ix = encoder.GetFreeFrameIndex();  //this call relys locks in authoritative array of surf

                //formatConverter.ConvertToNV12FrameSurface(ref encoder.Frames[ix], uncompressed, 0);
                mfxFrameSurface1 *f = (mfxFrameSurface1 *)encoder.Frames[ix];


                switch (fourcc)
                {
                case FourCC.NV12:
                    Trace.Assert(f->Data.Pitch == width * 1);

                    fixed(byte *aa = &uncompressed[0])
                    FastMemcpyMemmove.memcpy(f->Data.Y, (IntPtr)aa, height * width);

                    fixed(byte *aa = &uncompressed[height * width])
                    FastMemcpyMemmove.memcpy(f->Data.UV, (IntPtr)aa, height / 2 * width);

                    break;

                case FourCC.YUY2:

                    Trace.Assert(f->Data.Pitch == width * 2);

                    fixed(byte *aa = &uncompressed[0])
                    FastMemcpyMemmove.memcpy(f->Data.Y, (IntPtr)aa, height * width * 2);

                    break;

                default:
                    Trace.Assert(false);
                    break;
                }

                encoder.EncodeFrame(ix, ref bsc);

                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);

                    if (++count % 100 == 0)
                    {
                        Console.Write("Frame {0}\r", count);
                    }
                }

#if ENABLE_BENCHMARK     // delete this code for most simple example
                if (infs.Position + inputFrameLength - 1 >= infs.Length)
                {
                    infs.Position = 0;
                }
                if (count >= minimumFrames)
                {
                    break;
                }
#endif
            }

            while (encoder.Flush(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);

                    if (++count % 100 == 0)
                    {
                        Console.Write("Frame {0}\r", count);
                    }
                }
            }

            if (bt != null)
            {
                bt.StopAndReport(count, infs.Position, outfs.Position);
            }

            infs.Close();
            outfs.Close();

            encoder.Dispose();

            Console.WriteLine("Encoded {0} frames", count);

            // make sure program always waits for user, except F5-Release run
            if (Debugger.IsAttached ||
                Environment.GetEnvironmentVariable("VisualStudioVersion") == null)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
        /// <summary>Initializes a new instance of the <see cref="LowLevelEncoderCSharp"/> class.</summary>
        /// <param name="mfxEncParams">The encoder parameters.</param>
        /// <param name="impl">The implementation.</param>
        public LowLevelEncoderCSharp(mfxVideoParam mfxEncParams, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO)
        {
            mfxStatus sts;

            session = new mfxSession();
            var ver = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            fixed(mfxSession *s = &session)
            sts = UnsafeNativeMethods.MFXInit(impl, &ver, s);

            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit");
            //deviceSetup = new DeviceSetup(session, false);



            sts = UnsafeNativeMethods.MFXVideoENCODE_Query(session, &mfxEncParams, &mfxEncParams);
            if (sts > 0)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Query), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "encodequery");


            mfxFrameAllocRequest EncRequest;

            sts = UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf(session, &mfxEncParams, &EncRequest);
            QuickSyncStatic.ThrowOnBadStatus(sts, "queryiosurf");

            EncRequest.NumFrameSuggested = (ushort)(EncRequest.NumFrameSuggested + mfxEncParams.AsyncDepth);

            EncRequest.Type |= (FrameMemoryType)0x2000; // WILL_WRITE; // This line is only required for Windows DirectX11 to ensure that surfaces can be written to by the application

            UInt16 numSurfaces = EncRequest.NumFrameSuggested;

            // - Width and height of buffer must be aligned, a multiple of 32
            // - Frame surface array keeps pointers all surface planes and general frame info

            UInt16 width        = (UInt16)QuickSyncStatic.ALIGN32(mfxEncParams.mfx.FrameInfo.Width);
            UInt16 height       = (UInt16)QuickSyncStatic.ALIGN32(mfxEncParams.mfx.FrameInfo.Height);
            int    bitsPerPixel = VideoUtility.GetBitsPerPixel(mfxEncParams.mfx.FrameInfo.FourCC);
            int    surfaceSize  = width * height * bitsPerPixel / 8;
            //byte[] surftaceBuffers = new byte[surfaceSize * numSurfaces]; //XXX
            IntPtr surfaceBuffers    = Marshal.AllocHGlobal(surfaceSize * numSurfaces);
            byte * surfaceBuffersPtr = (byte *)surfaceBuffers;


            //         // Allocate surface headers (mfxFrameSurface1) for decoder
            Frames = new mfxFrameSurface1[numSurfaces];
            //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC);
            for (int i = 0; i < numSurfaces; i++)
            {
                Frames[i]      = new mfxFrameSurface1();
                Frames[i].Info = mfxEncParams.mfx.FrameInfo;

                switch (mfxEncParams.mfx.FrameInfo.FourCC)
                {
                case FourCC.NV12:
                    Frames[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize;
                    Frames[i].Data.U_ptr = Frames[i].Data.Y_ptr + width * height;
                    Frames[i].Data.V_ptr = Frames[i].Data.U_ptr + 1;
                    Frames[i].Data.Pitch = width;
                    break;

                case FourCC.YUY2:
                    Frames[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize;
                    Frames[i].Data.U_ptr = Frames[i].Data.Y_ptr + 1;
                    Frames[i].Data.V_ptr = Frames[i].Data.U_ptr + 3;
                    Frames[i].Data.Pitch = (ushort)(width * 2);
                    break;

                default:      //find sysmem_allocator.cpp for more help
                    throw new NotImplementedException();
                }
            }

            frameIntPtrs = new IntPtr[Frames.Length];
            for (int i = 0; i < Frames.Length; i++)
            {
                fixed(mfxFrameSurface1 *a = &Frames[i])
                frameIntPtrs[i] = (IntPtr)a;
            }


            sts = UnsafeNativeMethods.MFXVideoENCODE_Init(session, &mfxEncParams);
            if (sts > 0)
            {
                warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Init), sts);
                sts = 0;
            }
            QuickSyncStatic.ThrowOnBadStatus(sts, "encodeinit");

            mfxVideoParam par;

            UnsafeNativeMethods.MFXVideoENCODE_GetVideoParam(session, &par);
            QuickSyncStatic.ThrowOnBadStatus(sts, "encodegetvideoparam");


            // from mediasdkjpeg-man.pdf
            // BufferSizeInKB = 4 + (Width * Height * BytesPerPx + 1023) / 1024;
            //where Width and Height are weight and height of the picture in pixel, BytesPerPx is number of
            //byte for one pixel.It equals to 1 for monochrome picture, 1.5 for NV12 and YV12 color formats,
            //	2 for YUY2 color format, and 3 for RGB32 color format(alpha channel is not encoded).

            if (par.mfx.BufferSizeInKB == 0 && mfxEncParams.mfx.CodecId == CodecId.MFX_CODEC_JPEG)
            {
                par.mfx.BufferSizeInKB = (ushort)((4 + (mfxEncParams.mfx.FrameInfo.CropW * mfxEncParams.mfx.FrameInfo.CropH * 3 + 1023)) / 1000);
            }
            //printf("bufsize %d\n", par.mfx.BufferSizeInKB);



            // Create task pool to improve asynchronous performance (greater GPU utilization)
            int taskPoolSize = mfxEncParams.AsyncDepth;  // number of tasks that can be submitted, before synchronizing is required


            //Task* pTasks             = stackalloc Task[taskPoolSize];
            // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned);
            pTasks = new Task[taskPoolSize];

            for (int i = 0; i < taskPoolSize; i++)
            {
                // Prepare Media SDK bit stream buffer
                pTasks[i].mfxBS.MaxLength = (uint)(par.mfx.BufferSizeInKB * 1000);
                pTasks[i].mfxBS.Data      = Marshal.AllocHGlobal((int)pTasks[i].mfxBS.MaxLength);
                Trace.Assert(pTasks[i].mfxBS.Data != IntPtr.Zero);
            }

            pinningHandles.Add(GCHandle.Alloc(pTasks, GCHandleType.Pinned));
            pinningHandles.Add(GCHandle.Alloc(Frames, GCHandleType.Pinned));
        }
Esempio n. 18
0
        static public void Main(string[] args)
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");

            mfxIMPL impl            = mfxIMPL.MFX_IMPL_AUTO;
            CodecId inputCodecId    = CodecId.MFX_CODEC_AVC;
            CodecId outputCodecId   = CodecId.MFX_CODEC_AVC;
            string  outputExtension = ".transcoded.264";//this should match codecld above


            string inFilename = "BigBuckBunny_320x180.264";
            //string inFilename = "BigBuckBunny_1920x1080.264";
            string outFilename = Path.ChangeExtension(inFilename, outputExtension);

            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine("Output filename: {0}", outFilename);
            Console.WriteLine();

            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found. Press any key to exit.");
                Console.ReadKey();
                return;
            }

            var infs  = File.Open(inFilename, FileMode.Open);
            var outfs = File.Open(outFilename, FileMode.Create);


            var config = TranscoderConfiguration.BuildTranscoderConfigurationFromStream(infs,
                                                                                        inputCodecId,
                                                                                        outputCodecId);

            var transcoder = new LowLevelTranscoderCSharp(config, impl);

            string impltext = QuickSyncStatic.ImplementationString(transcoder.session);

            Console.WriteLine("Implementation = {0}", impltext);
            //string memtext = QuickSyncStatic.ImplementationString(transcoder.deviceSetup.memType);
            //Console.WriteLine("Memory type = {0}", memtext);

            int            count = 0;
            var            buf   = new byte[transcoder.BufferFreeCount];
            BitStreamChunk bsc   = new BitStreamChunk();

            int modulo = 100;



            while (true)
            {
                int free = transcoder.BufferFreeCount;


                if (free > transcoder.BufferSize / 2)
                {
                    int n = infs.Read(buf, 0, free);
                    if (n <= 0)
                    {
                        break;
                    }
                    transcoder.PutBitstream(buf, 0, n);
                }



                transcoder.GetNextFrame(ref bsc);
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            while (transcoder.GetNextFrame(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            while (transcoder.Flush1(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            while (transcoder.Flush2(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            while (transcoder.Flush3(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            while (transcoder.Flush4(ref bsc))
            {
                if (bsc.bytesAvailable > 0)
                {
                    outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable);
                    if (++count % modulo == 0)
                    {
                        Console.Write("Frames transcoded {0}\r", count);
                    }
                }
            }

            infs.Close();
            outfs.Close();

            Console.WriteLine("Frames transcoded {0}", count);



            if (Debugger.IsAttached)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
Esempio n. 19
0
        public LowLevelDecoderNative(mfxVideoParam mfxDecParamsX,
                                     mfxVideoParam?VPPParamsX = null,
                                     mfxIMPL impl             = mfxIMPL.MFX_IMPL_AUTO)
        {
            mfxVideoParam tmpMfxVideoParam;

            if (VPPParamsX.HasValue)
            {
                tmpMfxVideoParam = VPPParamsX.Value;
            }
            else
            {
                tmpMfxVideoParam.AsyncDepth = 1;
                tmpMfxVideoParam.IOPattern  = IOPattern.MFX_IOPATTERN_IN_VIDEO_MEMORY | IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY;
                tmpMfxVideoParam.vpp.In     = mfxDecParamsX.mfx.FrameInfo;
                tmpMfxVideoParam.vpp.Out    = mfxDecParamsX.mfx.FrameInfo;
            }


            mfxStatus sts;

            session = new mfxSession();
            var ver = new mfxVersion()
            {
                Major = 1, Minor = 3
            };

            fixed(mfxSession *s = &session)
            sts = UnsafeNativeMethods.MFXInit(impl, &ver, s);

            QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit");
            //deviceSetup = new DeviceSetup(session, false);

            h = NativeLLDecoderUnsafeNativeMethods.NativeDecoder_New();
            Trace.Assert(h != IntPtr.Zero);


            shared = (DecoderShared *)h;
            //Console.WriteLine("mfxbs offset in C# {0}", (UInt64)(&(shared->mfxBS)) - (UInt64)shared);
            //Console.WriteLine("warningCount offset in C# {0}", (UInt64)(&(shared->warningCount)) - (UInt64)shared);
            //Console.WriteLine("sizeof(mfxBitstream) {0}", sizeof(mfxBitstream));
            //Console.WriteLine("sizeof(DecoderShared) {0}", sizeof(DecoderShared));
            //Console.WriteLine("shared->safety {0}", shared->safety);

            Trace.Assert(shared->safety == sizeof(DecoderShared));

            shared->mfxBS.MaxLength  = 1000000;
            shared->mfxBS.Data       = Marshal.AllocHGlobal((int)shared->mfxBS.MaxLength);
            shared->mfxBS.DataLength = 0;
            shared->mfxBS.DataOffset = 0;



            sts = NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Init(h, session, &mfxDecParamsX, &tmpMfxVideoParam);
            QuickSyncStatic.ThrowOnBadStatus(sts, nameof(NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Init));


            //mfxFrameSurface1 aaa = *shared->foo1[0];
            //aaa.Data = new mfxFrameData();
            //File.WriteAllText("\\x\\a", Newtonsoft.Json.JsonConvert.SerializeObject(aaa,Formatting.Indented));

            // aaa = *shared->foo2[0];
            //aaa.Data = new mfxFrameData();
            //File.WriteAllText("\\x\\b", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented));

            //aaa = *shared->foo3[0];
            //aaa.Data = new mfxFrameData();
            //File.WriteAllText("\\x\\c", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented));

            //aaa = *shared->foo4[0];
            //aaa.Data = new mfxFrameData();
            //File.WriteAllText("\\x\\d", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented));



            GetAndPrintWarnings();
        }
Esempio n. 20
0
        static public void Main(string[] args)
        {
            ConfirmQuickSyncReadiness.HaltIfNotReady();

            Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory;
            // keep ascending directories until 'media' folder is found
            for (int i = 0; i < 10 && !Directory.Exists("Media"); i++)
            {
                Directory.SetCurrentDirectory("..");
            }
            Directory.SetCurrentDirectory("Media");

            mfxIMPL impl            = mfxIMPL.MFX_IMPL_AUTO;
            CodecId inputCodecId    = CodecId.MFX_CODEC_JPEG;
            CodecId outputCodecId   = CodecId.MFX_CODEC_JPEG;
            string  outputExtension = ".transcoded.264";//this should match codecld above


            string inFilename;

            inFilename = @"C:\x\core-imaging-playground\images\IMG_2301.jpg";
            //  inFilename = "BigBuckBunny_320x180.264";
            //inFilename = "BigBuckBunny_1920x1080.264";
            //inFilename = "BigBuckBunny_3840x2160.264";
            string outFilename = Path.ChangeExtension(inFilename, outputExtension);

            Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory);
            Console.WriteLine("Input filename: {0}", inFilename);
            Console.WriteLine();

            if (!File.Exists(inFilename))
            {
                Console.WriteLine("Input file not found. Press any key to exit.");
                Console.ReadKey();
                return;
            }


            Stream         infs, outfs;
            BenchmarkTimer bt = null;


#if !ENABLE_BENCHMARK
            infs  = File.Open(inFilename, FileMode.Open);
            outfs = File.Open(outFilename, FileMode.Create);
#else       // delete this code for most simple example
            // * Benchmark Mode *
            // this block does a couple things:
            //   1. causes the file to be pre-read into memory so we are not timing disk reads.
            //   2. replaces the output stream with a NullStream so nothing gets written to disk.
            //   3. Starts the timer for benchmarking
            // this pre-reads file into memory for benchmarking
            // maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024;
            Console.WriteLine("Pre-reading input");
            infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open));
            Console.WriteLine("Input read");

            outfs = new NullStream();
            bt    = new BenchmarkTimer();
            bt.Start();

            //int minimumFrames = 4000;
#endif

            Console.WriteLine("Output filename: {0}",
                              Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT"));
            Console.WriteLine();


            var config = TranscoderConfiguration.BuildTranscoderConfigurationFromStream(infs,
                                                                                        inputCodecId,
                                                                                        outputCodecId);

            var transcoder = new StreamTranscoder(infs, config, impl, false);

            string impltext = QuickSyncStatic.ImplementationString(transcoder.lowLevelTranscoder.session);
            Console.WriteLine("Implementation = {0}", impltext);
            //string memtext = QuickSyncStatic.ImplementationString(transcoder.lowLevelTranscoder.deviceSetup.memType);
            //Console.WriteLine("Memory type = {0}", memtext);



            int modulo = 100;

            int count = 0;


            foreach (var item in transcoder.GetFrames())
            {
                outfs.Write(item.bitstream, 0, item.bytesAvailable);

                if (++count % modulo == 0)
                {
                    Console.Write("Frames transcoded {0}\r", count);
                }
            }


            Console.WriteLine("Frames transcoded {0}", count);
            Console.WriteLine();

            if (bt != null)
            {
                bt.StopAndReport(count, infs.Position, outfs.Position);
            }

            infs.Close();
            outfs.Close();



            if (Debugger.IsAttached)
            {
                Console.WriteLine("done - press a key to exit");
                Console.ReadKey();
            }
        }
Esempio n. 21
0
 public static extern mfxStatus MFXInit(mfxIMPL impl, mfxVersion *ver, mfxSession *session);