static mfxVideoParam TranscoderSetupEncoderParameters(int width, int height, CodecId codec) { // mfxVideoParam mfxEncParams; mfxVideoParam mfxEncParams = new mfxVideoParam(); mfxEncParams.mfx.CodecId = codec; mfxEncParams.mfx.TargetUsage = TargetUsage.MFX_TARGETUSAGE_BALANCED; mfxEncParams.mfx.TargetKbps = 1000; mfxEncParams.mfx.RateControlMethod = RateControlMethod.MFX_RATECONTROL_VBR; mfxEncParams.mfx.FrameInfo.FrameRateExtN = 30; mfxEncParams.mfx.FrameInfo.FrameRateExtD = 1; mfxEncParams.mfx.FrameInfo.FourCC = FourCC.NV12; mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; mfxEncParams.mfx.FrameInfo.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE; mfxEncParams.mfx.FrameInfo.CropX = 0; mfxEncParams.mfx.FrameInfo.CropY = 0; mfxEncParams.mfx.FrameInfo.CropW = (ushort)width; // Half the resolution of decode stream mfxEncParams.mfx.FrameInfo.CropH = (ushort)height; // width must be a multiple of 16 // height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture mfxEncParams.mfx.FrameInfo.Width = ALIGN16(mfxEncParams.mfx.FrameInfo.CropW); mfxEncParams.mfx.FrameInfo.Height = (PicStruct.MFX_PICSTRUCT_PROGRESSIVE == mfxEncParams.mfx.FrameInfo.PicStruct) ? ALIGN16(mfxEncParams.mfx.FrameInfo.CropH) : ALIGN32(mfxEncParams.mfx.FrameInfo.CropH); return(mfxEncParams); }
/// <summary> /// Reset the decoder, useful to discard bitstream, and start over /// 11 1 15 Cam /// SPS/PPS FOR AVC WILL BE REQUIRED AFTER RESET! /// It appears a RESET to the IMSDK causes the decoder to enter seek mode for PPS/SPS! /// You won't get any frames back until you give the docoder SPS/PPS! /// </summary> public void Reset(mfxVideoParam p) { bitstream.DataLength = 0; bitstream.DataOffset = 0; mfxStatus sts = UnsafeNativeMethods.MFXVideoDECODE_Reset(session, &p); if (sts < 0) { throw new QuickSyncException("Reset fail", sts); } }
private static void ConfigureComposition(ref mfxVideoParam mfxVPPParams) { // add composition to VPPParams/mfxvideoparams mfxExtVPPComposite extVPPComposite; extVPPComposite.Header.BufferId = BufferId.MFX_EXTBUFF_VPP_COMPOSITE; extVPPComposite.Header.BufferSz = (uint)sizeof(mfxExtVPPComposite); extVPPComposite.NumInputStream = 2; var inputStreams = stackalloc mfxVPPCompInputStream[extVPPComposite.NumInputStream]; extVPPComposite.InputStream_ptr = &inputStreams[0]; //extVPPComposite.Y = 0; //extVPPComposite.U = 128; //extVPPComposite.V = 128; for (int i = 0; i < extVPPComposite.NumInputStream; i++) { if (i > 0) { var a = (byte *)&inputStreams[i]; var b = (byte *)&inputStreams[i - 1]; var x = a - b; var c = sizeof(mfxVPPCompInputStream); Trace.Assert(x == sizeof(mfxVPPCompInputStream)); } extVPPComposite.InputStream_ptr[i].DstX = 0; extVPPComposite.InputStream_ptr[i].DstY = 0; extVPPComposite.InputStream_ptr[i].DstW = 320; extVPPComposite.InputStream_ptr[i].DstH = 180; if (i > 0) { extVPPComposite.InputStream_ptr[i].LumaKeyEnable = 1; } extVPPComposite.InputStream_ptr[i].LumaKeyMin = 0; extVPPComposite.InputStream_ptr[i].LumaKeyMax = 0; } mfxExtBuffer **pExtParamsVPP2 = stackalloc mfxExtBuffer *[1]; pExtParamsVPP2[0] = (mfxExtBuffer *)&extVPPComposite; mfxVPPParams.ExtParam = pExtParamsVPP2; mfxVPPParams.NumExtParam = 1; }
//static public IEnumerable<byte[]> DecodeStream(Stream s, FourCC fourcc = FourCC.NV12, AccelerationLevel acceleration = AccelerationLevel.BestAvailableAccelerationUseGPUorCPU) //{ // return null; //} /// <summary> /// Construct the decoder. /// </summary> /// <param name="stream">Stream be read from</param> /// <param name="codecId">What format the bitstream is in: AVC, HEVC, MJPEG, ...</param> /// <param name="impl">implementation to use</param> /// <param name="outIOPattern">memory type for decoding</param> public StreamDecoder(Stream stream, CodecId codecId, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO, IOPattern outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY) { long oldposition = stream.Position; var buf = new byte[65536]; //avail after init int n = stream.Read(buf, 0, buf.Length); if (n < buf.Length) { Array.Resize(ref buf, n); } stream.Position = oldposition; this.decoderParameters = QuickSyncStatic.DecodeHeader(buf, codecId, impl); this.decoderParameters.IOPattern = outIOPattern; lowLevelDecoder = new LowLevelDecoder(decoderParameters, null, impl); Init(stream); }
static mfxVideoParam TranscoderSetupVPPParameters(int width, int height) { mfxVideoParam VPPParams = new mfxVideoParam(); // Input data VPPParams.vpp.In.FourCC = FourCC.NV12; VPPParams.vpp.In.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; VPPParams.vpp.In.CropX = 0; VPPParams.vpp.In.CropY = 0; VPPParams.vpp.In.CropW = (ushort)width; VPPParams.vpp.In.CropH = (ushort)height; VPPParams.vpp.In.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE; VPPParams.vpp.In.FrameRateExtN = 30; VPPParams.vpp.In.FrameRateExtD = 1; // width must be a multiple of 16 // height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture VPPParams.vpp.In.Width = ALIGN16(VPPParams.vpp.In.CropW); VPPParams.vpp.In.Height = (PicStruct.MFX_PICSTRUCT_PROGRESSIVE == VPPParams.vpp.In.PicStruct) ? ALIGN16(VPPParams.vpp.In.CropH) : ALIGN32(VPPParams.vpp.In.CropH); // Output data VPPParams.vpp.Out.FourCC = FourCC.NV12; VPPParams.vpp.Out.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; VPPParams.vpp.Out.CropX = 0; VPPParams.vpp.Out.CropY = 0; VPPParams.vpp.Out.CropW = (ushort)(VPPParams.vpp.In.CropW / 1); // 1/16th the resolution of decode stream VPPParams.vpp.Out.CropH = (ushort)(VPPParams.vpp.In.CropH / 1); VPPParams.vpp.Out.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE; VPPParams.vpp.Out.FrameRateExtN = 30; VPPParams.vpp.Out.FrameRateExtD = 1; // width must be a multiple of 16 // height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture VPPParams.vpp.Out.Width = ALIGN16(VPPParams.vpp.Out.CropW); VPPParams.vpp.Out.Height = (PicStruct.MFX_PICSTRUCT_PROGRESSIVE == VPPParams.vpp.Out.PicStruct) ? ALIGN16(VPPParams.vpp.Out.CropH) : ALIGN32(VPPParams.vpp.Out.CropH); return(VPPParams); }
unsafe public LowLevelEncoderNative(mfxVideoParam mfxEncParams, mfxIMPL impl) { mfxStatus sts; this._session = new mfxSession(); var ver = new mfxVersion() { Major = 1, Minor = 3 }; fixed(mfxSession *s = &_session) sts = UnsafeNativeMethods.MFXInit(impl, &ver, s); QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXInit)); h = NativeLLEncoderUnsafeNativeMethods.NativeEncoder_New(); Trace.Assert(h != IntPtr.Zero); shared = (EncoderShared *)h; shared->session = _session; shared->mfxEncParams = mfxEncParams; Trace.Assert(shared->safety == sizeof(EncoderShared)); sts = NativeLLEncoderUnsafeNativeMethods.NativeEncoder_Init(h); QuickSyncStatic.ThrowOnBadStatus(sts, nameof(NativeLLEncoderUnsafeNativeMethods.NativeEncoder_Init)); frameIntPtrs = new IntPtr[shared->nEncSurfNum]; for (int i = 0; i < frameIntPtrs.Length; i++) { frameIntPtrs[i] = (IntPtr)shared->pmfxSurfaces[i]; } GetAndPrintWarnings(); }
private static void AssignChromaFormat(FourCC decoderFourcc, ref mfxVideoParam decoderParameters) { switch (decoderFourcc) { case FourCC.NV12: case FourCC.YV12: decoderParameters.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; break; case FourCC.YUY2: decoderParameters.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV422; break; case FourCC.RGB4: decoderParameters.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV444; break; default: Trace.Assert(false); break; } }
/// <summary> /// Constructor /// </summary> public LowLevelDecoderCSharp(mfxVideoParam mfxDecParamsX, mfxVideoParam?VPPParamsX = null, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO) { mfxStatus sts; bool enableVPP = VPPParamsX != null; if (VPPParamsX == null) { // Create a default VPPParamsX var foo = new mfxVideoParam(); foo.AsyncDepth = 1; foo.IOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY | IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; foo.vpp.In = mfxDecParamsX.mfx.FrameInfo; foo.vpp.Out = mfxDecParamsX.mfx.FrameInfo; VPPParamsX = foo; } mfxVideoParam VPPParams = VPPParamsX != null ? VPPParamsX.Value : new mfxVideoParam(); mfxVideoParam mfxDecParams = mfxDecParamsX; // NOTE // IF I am worried about interop issues with stuff moving due to GC, // just pin ever single blitable here pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces, GCHandleType.Pinned)); pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces2, GCHandleType.Pinned)); //pinningHandles.Add(GCHandle.Alloc(struct1, GCHandleType.Pinned)); //pinningHandles.Add(GCHandle.Alloc(struct1, GCHandleType.Pinned)); this.videoParam = mfxDecParams; this.enableVPP = enableVPP; session = new mfxSession(); var ver = new mfxVersion() { Major = 1, Minor = 3 }; fixed(mfxSession *s = &session) sts = UnsafeNativeMethods.MFXInit(impl, &ver, s); QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit"); bool decVideoMemOut = (mfxDecParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY) != 0; bool vppVideoMemIn = (VPPParams.IOPattern & IOPattern.MFX_IOPATTERN_IN_VIDEO_MEMORY) != 0; bool vppVideoMemOut = (VPPParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY) != 0; Trace.Assert(!enableVPP || decVideoMemOut == vppVideoMemIn, "When the VPP is enabled, the memory type from DEC into VPP must be of same type"); if (vppVideoMemIn || vppVideoMemOut) { //if you want to use video memory, you need to have a way to allocate the Direct3D or Vaapi frames videoAccelerationSupport = new VideoAccelerationSupport(session); } fixed(mfxFrameAllocRequest *p = &DecRequest) sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, p); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "DECODE_QueryIOSurf"); if (enableVPP) { fixed(mfxFrameAllocRequest *p = &VPPRequest[0]) sts = UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf(session, &VPPParams, p); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "VPP_QueryIOSurf"); VPPRequest[1].Type |= FrameMemoryType.WILL_READ; } //mfxU16 nSurfNumDecVPP = DecRequest.NumFrameSuggested + VPPRequest[0].NumFrameSuggested; //mfxU16 nSurfNumVPPOut = VPPRequest[1].NumFrameSuggested; int nSurfNumVPPOut = 0; var numSurfaces = DecRequest.NumFrameSuggested + VPPRequest[0].NumFrameSuggested + VPPParams.AsyncDepth; if (enableVPP) { nSurfNumVPPOut = 0 + VPPRequest[1].NumFrameSuggested + VPPParams.AsyncDepth; } bitstreamBuffer = Marshal.AllocHGlobal(defaultBitstreamBufferSize); bitstream.Data = bitstreamBuffer; bitstream.DataLength = 0; bitstream.MaxLength = (uint)defaultBitstreamBufferSize; bitstream.DataOffset = 0; //mfxFrameAllocRequest DecRequest; //sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, &DecRequest); //if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) sts = 0; //Trace.Assert(sts == mfxStatus.MFX_ERR_NONE); //allocate decoder frames via directx mfxFrameAllocResponse DecResponse = new mfxFrameAllocResponse(); if (decVideoMemOut) { DecRequest.NumFrameMin = DecRequest.NumFrameSuggested = (ushort)numSurfaces; fixed(mfxFrameAllocRequest *p = &DecRequest) videoAccelerationSupport.AllocFrames(p, &DecResponse); } //allocate vpp frames via directx mfxFrameAllocResponse EncResponse = new mfxFrameAllocResponse(); if (vppVideoMemOut) { VPPRequest[1].NumFrameMin = VPPRequest[1].NumFrameSuggested = (ushort)nSurfNumVPPOut; fixed(mfxFrameAllocRequest *p = &VPPRequest[1]) videoAccelerationSupport.AllocFrames(p, &EncResponse); } // Allocate surfaces for decoder // - Width and height of buffer must be aligned, a multiple of 32 // - Frame surface array keeps pointers all surface planes and general frame info UInt16 width = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Width); UInt16 height = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Height); int bitsPerPixel = VideoUtility.GetBitsPerPixel(mfxDecParams.mfx.FrameInfo.FourCC); int surfaceSize = width * height * bitsPerPixel / 8; //byte[] surfaceBuffers = new byte[surfaceSize * numSurfaces]; //XXX if (!decVideoMemOut) { surfaceBuffers = Marshal.AllocHGlobal(surfaceSize * numSurfaces); } // // Allocate surface headers (mfxFrameSurface1) for decoder pmfxSurfaces = new mfxFrameSurface1[numSurfaces]; pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces, GCHandleType.Pinned)); //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC); for (int i = 0; i < numSurfaces; i++) { pmfxSurfaces[i] = new mfxFrameSurface1(); pmfxSurfaces[i].Info = mfxDecParams.mfx.FrameInfo; if (!decVideoMemOut) { switch (mfxDecParams.mfx.FrameInfo.FourCC) { case FourCC.NV12: pmfxSurfaces[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize; pmfxSurfaces[i].Data.U_ptr = pmfxSurfaces[i].Data.Y_ptr + width * height; pmfxSurfaces[i].Data.V_ptr = pmfxSurfaces[i].Data.U_ptr + 1; pmfxSurfaces[i].Data.Pitch = width; break; case FourCC.YUY2: pmfxSurfaces[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize; pmfxSurfaces[i].Data.U_ptr = pmfxSurfaces[i].Data.Y_ptr + 1; pmfxSurfaces[i].Data.V_ptr = pmfxSurfaces[i].Data.U_ptr + 3; pmfxSurfaces[i].Data.Pitch = (ushort)(width * 2); break; default: //find sysmem_allocator.cpp for more help throw new NotImplementedException(); } } else { pmfxSurfaces[i].Data.MemId = DecResponse.mids_ptr[i]; // MID (memory id) represent one D3D NV12 surface } } if (enableVPP) { UInt16 width2 = (UInt16)QuickSyncStatic.ALIGN32(VPPRequest[1].Info.CropW); UInt16 height2 = (UInt16)QuickSyncStatic.ALIGN32(VPPRequest[1].Info.CropH); int bitsPerPixel2 = VideoUtility.GetBitsPerPixel(VPPParams.vpp.Out.FourCC); // NV12 format is a 12 bits per pixel format int surfaceSize2 = width2 * height2 * bitsPerPixel2 / 8; int pitch2 = width2 * bitsPerPixel2 / 8; if (!vppVideoMemOut) { surfaceBuffers2 = Marshal.AllocHGlobal(surfaceSize2 * nSurfNumVPPOut); } pmfxSurfaces2 = new mfxFrameSurface1[nSurfNumVPPOut]; pinningHandles.Add(GCHandle.Alloc(pmfxSurfaces2, GCHandleType.Pinned)); //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC); for (int i = 0; i < nSurfNumVPPOut; i++) { pmfxSurfaces2[i] = new mfxFrameSurface1(); pmfxSurfaces2[i].Info = VPPParams.vpp.Out; if (!vppVideoMemOut) { pmfxSurfaces2[i].Data.Pitch = (ushort)pitch2; switch (VPPParams.vpp.Out.FourCC) { case FourCC.NV12: pmfxSurfaces2[i].Data.Y_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2; pmfxSurfaces2[i].Data.U_ptr = pmfxSurfaces2[i].Data.Y_ptr + width * height; pmfxSurfaces2[i].Data.V_ptr = pmfxSurfaces2[i].Data.U_ptr + 1; break; case FourCC.RGB4: pmfxSurfaces2[i].Data.B_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2; pmfxSurfaces2[i].Data.G_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2 + 1; pmfxSurfaces2[i].Data.R_ptr = (byte *)surfaceBuffers2 + i * surfaceSize2 + 2; // pmfxSurfaces2[i].Data.A_ptr = (byte*)surfaceBuffers2 + i * surfaceSize2+3; // pmfxSurfaces2[i].Data. = pmfxSurfaces2[i].Data.Y_ptr + width * height; // pmfxSurfaces2[i].Data.V_ptr = pmfxSurfaces2[i].Data.U_ptr + 1; break; default: break; } } else { pmfxSurfaces2[i].Data.MemId = EncResponse.mids_ptr[i]; // MID (memory id) represent one D3D NV12 surface } } } sts = UnsafeNativeMethods.MFXVideoDECODE_Init(session, &mfxDecParams); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "MFXVideoDECODE_Init"); if (enableVPP) { sts = UnsafeNativeMethods.MFXVideoVPP_Init(session, &VPPParams); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "MFXVideoVPP_Init"); } }
/// <summary>Initializes a new instance of the <see cref="StreamDecoder"/> class. /// Fully specify decode params, and optionally VPP params</summary> /// <param name="stream">The stream.</param> /// <param name="decodeParameters">The decode parameters.</param> /// <param name="mfxVPPParams">The MFX VPP parameters.</param> /// <param name="impl">The implementation.</param> public StreamDecoder(Stream stream, mfxVideoParam decodeParameters, mfxVideoParam?mfxVPPParams = null, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO) { this.decoderParameters = decodeParameters; lowLevelDecoder = new LowLevelDecoder(decodeParameters, mfxVPPParams, impl); Init(stream); }
/// <summary>Initializes a new instance of the <see cref="LowLevelTranscoderCSharp"/> class.</summary> /// <param name="config">The configuration.</param> /// <param name="impl">The implementation.</param> /// <param name="forceSystemMemory">if set to <c>true</c> [force system memory].</param> public LowLevelTranscoderCSharp(TranscoderConfiguration config, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO, bool forceSystemMemory = false) { mfxStatus sts; mfxVideoParam mfxDecParams = config.decParams; mfxVideoParam mfxVPPParams = config.vppParams; mfxVideoParam mfxEncParams = config.encParams; session = new mfxSession(); var ver = new mfxVersion() { Major = 1, Minor = 3 }; fixed(mfxSession *s = &session) sts = UnsafeNativeMethods.MFXInit(impl, &ver, s); QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit"); //deviceSetup = new DeviceSetup(session, forceSystemMemory); // mfxVideoParam mfxDecParams = new mfxVideoParam(); // mfxDecParams.mfx.CodecId = CodecId.MFX_CODEC_AVC; int bufsize = (int)1e6; mfxBS = (mfxBitstream *)MyAllocHGlobalAndZero(sizeof(mfxBitstream)); mfxBS->Data = MyAllocHGlobalAndZero(bufsize); mfxBS->DataLength = (uint)0; mfxBS->MaxLength = (uint)bufsize; mfxBS->DataOffset = 0; int outwidth = mfxDecParams.mfx.FrameInfo.CropW; int outheight = mfxDecParams.mfx.FrameInfo.CropH; // Query number of required surfaces for VPP //mfxFrameAllocRequest[] VPPRequest = new mfxFrameAllocRequest[2]; // [0] - in, [1] - out TwoMfxFrameAllocRequest VPPRequest; sts = UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf(session, &mfxVPPParams, (mfxFrameAllocRequest *)&VPPRequest); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoVPP_QueryIOSurf), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "vpp.queryiosurf"); // Query number required surfaces for dec mfxFrameAllocRequest DecRequest; sts = UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf(session, &mfxDecParams, &DecRequest); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXVideoDECODE_QueryIOSurf)); // Query number of required surfaces for enc mfxFrameAllocRequest EncRequest = new mfxFrameAllocRequest(); sts = UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf(session, &mfxEncParams, &EncRequest); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, nameof(UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf)); // Determine the required number of surfaces for decoder output (VPP input) and for VPP output (encoder input) nSurfNumDecVPP = DecRequest.NumFrameSuggested + VPPRequest.In.NumFrameSuggested + mfxVPPParams.AsyncDepth; nSurfNumVPPEnc = EncRequest.NumFrameSuggested + VPPRequest.Out.NumFrameSuggested + mfxVPPParams.AsyncDepth; { Trace.Assert((mfxEncParams.IOPattern & IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY) != 0); Trace.Assert((mfxDecParams.IOPattern & IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY) != 0); UInt16 width = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Width); UInt16 height = (UInt16)QuickSyncStatic.ALIGN32(DecRequest.Info.Height); int bitsPerPixel = 12; int surfaceSize = width * height * bitsPerPixel / 8; var decVppSurfaceBuffers = Marshal.AllocHGlobal(surfaceSize * nSurfNumDecVPP); var vppEncSurfaceBuffers = Marshal.AllocHGlobal(surfaceSize * nSurfNumVPPEnc); pSurfaces = (mfxFrameSurface1 *)MyAllocHGlobalAndZero(sizeof(mfxFrameSurface1) * nSurfNumDecVPP); pSurfaces2 = (mfxFrameSurface1 *)MyAllocHGlobalAndZero(sizeof(mfxFrameSurface1) * nSurfNumVPPEnc); for (int i = 0; i < nSurfNumDecVPP; i++) { pSurfaces[i] = new mfxFrameSurface1(); pSurfaces[i].Info = DecRequest.Info; pSurfaces[i].Data.Y_ptr = (byte *)decVppSurfaceBuffers + i * surfaceSize; pSurfaces[i].Data.U_ptr = pSurfaces[i].Data.Y_ptr + width * height; pSurfaces[i].Data.V_ptr = pSurfaces[i].Data.U_ptr + 1; pSurfaces[i].Data.Pitch = width; } for (int i = 0; i < nSurfNumVPPEnc; i++) { pSurfaces2[i] = new mfxFrameSurface1(); pSurfaces2[i].Info = EncRequest.Info; pSurfaces2[i].Data.Y_ptr = (byte *)vppEncSurfaceBuffers + i * surfaceSize; pSurfaces2[i].Data.U_ptr = pSurfaces2[i].Data.Y_ptr + width * height; pSurfaces2[i].Data.V_ptr = pSurfaces2[i].Data.U_ptr + 1; pSurfaces2[i].Data.Pitch = width; } } sts = UnsafeNativeMethods.MFXVideoDECODE_Init(session, &mfxDecParams); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoDECODE_Init), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "decode.init"); sts = UnsafeNativeMethods.MFXVideoENCODE_Init(session, &mfxEncParams); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Init), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "encode.init"); sts = UnsafeNativeMethods.MFXVideoVPP_Init(session, &mfxVPPParams); if (sts == mfxStatus.MFX_WRN_PARTIAL_ACCELERATION) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoVPP_Init), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "vpp.init"); //mfxExtVPPDoNotUse zz; //zz.Header.BufferId = BufferId.MFX_EXTBUFF_VPP_DONOTUSE; //zz.Header.BufferSz = (uint)sizeof(mfxExtVPPDoUse); //mfxExtBuffer** pExtParamsVPPx = stackalloc mfxExtBuffer*[1]; //pExtParamsVPPx[0] = (mfxExtBuffer*)&zz; //var t1 = stackalloc uint[100]; //zz.AlgList = t1; //zz.NumAlg = 100; //mfxVideoParam par; //par.ExtParam = pExtParamsVPPx; //par.NumExtParam = 1; //sts = UnsafeNativeMethods.MFXVideoVPP_GetVideoParam(session, &par); //Trace.Assert(sts == mfxStatus.MFX_ERR_NONE); //Console.WriteLine(zz.NumAlg); //for (int i = 0; i < 10; i++) //{ // Console.WriteLine((BufferId)t1[i]); //} mfxVideoParam par; // Retrieve video parameters selected by encoder. // - BufferSizeInKB parameter is required to set bit stream buffer size par = new mfxVideoParam(); sts = UnsafeNativeMethods.MFXVideoENCODE_GetVideoParam(session, &par); QuickSyncStatic.ThrowOnBadStatus(sts, "enc.getvideoparams"); // Create task pool to improve asynchronous performance (greater GPU utilization) taskPoolSize = mfxEncParams.AsyncDepth; // number of tasks that can be submitted, before synchronizing is required // Task* pTasks = stackalloc Task[taskPoolSize]; pTasks = (Task *)MyAllocHGlobalAndZero(sizeof(Task) * taskPoolSize); // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned); for (int i = 0; i < taskPoolSize; i++) { // Prepare Media SDK bit stream buffer pTasks[i].mfxBS.MaxLength = (uint)(par.mfx.BufferSizeInKB * 1000); pTasks[i].mfxBS.Data = MyAllocHGlobalAndZero((int)pTasks[i].mfxBS.MaxLength); Trace.Assert(pTasks[i].mfxBS.Data != IntPtr.Zero); } // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned); }
public LowLevelDecoderNative(mfxVideoParam mfxDecParamsX, mfxVideoParam?VPPParamsX = null, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO) { mfxVideoParam tmpMfxVideoParam; if (VPPParamsX.HasValue) { tmpMfxVideoParam = VPPParamsX.Value; } else { tmpMfxVideoParam.AsyncDepth = 1; tmpMfxVideoParam.IOPattern = IOPattern.MFX_IOPATTERN_IN_VIDEO_MEMORY | IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY; tmpMfxVideoParam.vpp.In = mfxDecParamsX.mfx.FrameInfo; tmpMfxVideoParam.vpp.Out = mfxDecParamsX.mfx.FrameInfo; } mfxStatus sts; session = new mfxSession(); var ver = new mfxVersion() { Major = 1, Minor = 3 }; fixed(mfxSession *s = &session) sts = UnsafeNativeMethods.MFXInit(impl, &ver, s); QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit"); //deviceSetup = new DeviceSetup(session, false); h = NativeLLDecoderUnsafeNativeMethods.NativeDecoder_New(); Trace.Assert(h != IntPtr.Zero); shared = (DecoderShared *)h; //Console.WriteLine("mfxbs offset in C# {0}", (UInt64)(&(shared->mfxBS)) - (UInt64)shared); //Console.WriteLine("warningCount offset in C# {0}", (UInt64)(&(shared->warningCount)) - (UInt64)shared); //Console.WriteLine("sizeof(mfxBitstream) {0}", sizeof(mfxBitstream)); //Console.WriteLine("sizeof(DecoderShared) {0}", sizeof(DecoderShared)); //Console.WriteLine("shared->safety {0}", shared->safety); Trace.Assert(shared->safety == sizeof(DecoderShared)); shared->mfxBS.MaxLength = 1000000; shared->mfxBS.Data = Marshal.AllocHGlobal((int)shared->mfxBS.MaxLength); shared->mfxBS.DataLength = 0; shared->mfxBS.DataOffset = 0; sts = NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Init(h, session, &mfxDecParamsX, &tmpMfxVideoParam); QuickSyncStatic.ThrowOnBadStatus(sts, nameof(NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Init)); //mfxFrameSurface1 aaa = *shared->foo1[0]; //aaa.Data = new mfxFrameData(); //File.WriteAllText("\\x\\a", Newtonsoft.Json.JsonConvert.SerializeObject(aaa,Formatting.Indented)); // aaa = *shared->foo2[0]; //aaa.Data = new mfxFrameData(); //File.WriteAllText("\\x\\b", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented)); //aaa = *shared->foo3[0]; //aaa.Data = new mfxFrameData(); //File.WriteAllText("\\x\\c", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented)); //aaa = *shared->foo4[0]; //aaa.Data = new mfxFrameData(); //File.WriteAllText("\\x\\d", Newtonsoft.Json.JsonConvert.SerializeObject(aaa, Formatting.Indented)); GetAndPrintWarnings(); }
static public void Main() { ConfirmQuickSyncReadiness.HaltIfNotReady(); Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory; // keep ascending directories until 'media' folder is found for (int i = 0; i < 10 && !Directory.Exists("Media"); i++) { Directory.SetCurrentDirectory(".."); } Directory.SetCurrentDirectory("Media"); mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO; //automatic GPU/CPU mode CodecId codecId = CodecId.MFX_CODEC_AVC; // avc fourcc supported: RGB3 RGB4 BGR4 BGR3 NV12 I420 IYUV YUY2 UYVY YV12 P411 P422 FourCC fourcc = FourCC.NV12; string fourccString = fourcc.ToString().Substring(0, 4); string inFilename; inFilename = "BigBuckBunny_320x180.264"; //inFilename = "BigBuckBunny_1920x1080.264"; //inFilename = "BigBuckBunny_3840x2160.264"; string outFilename = Path.ChangeExtension(inFilename, fourccString + ".yuv"); Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory); Console.WriteLine("Input filename: {0}", inFilename); Console.WriteLine(); if (!File.Exists(inFilename)) { Console.WriteLine("Input file not found. Press any key to exit."); Console.ReadKey(); return; } Stream infs, outfs; BenchmarkTimer bt = null; #if !ENABLE_BENCHMARK infs = File.Open(inFilename, FileMode.Open); outfs = File.Open(outFilename, FileMode.Create); #else // delete this code for most simple example // * Benchmark Mode * // this block does a couple things: // 1. causes the file to be pre-read into memory so we are not timing disk reads. // 2. replaces the output stream with a NullStream so nothing gets written to disk. // 3. Starts the timer for benchmarking // this pre-reads file into memory for benchmarking // maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024; Console.WriteLine("Pre-reading input"); infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open)); Console.WriteLine("Input read"); outfs = new NullStream(); bt = new BenchmarkTimer(); bt.Start(); //int minimumFrames = 4000; #endif Console.WriteLine("Output filename: {0}", Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT")); Console.WriteLine(); var outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY; mfxVideoParam decoderParameters = QuickSyncStatic.ReadFileHeaderInfo(codecId, impl, infs, outIOPattern); decoderParameters.mfx.FrameInfo.FourCC = fourcc; var decoder = new StreamDecoder(infs, CodecId.MFX_CODEC_AVC, impl, outIOPattern); string impltext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.session); Console.WriteLine("Implementation = {0}", impltext); var formatConverter = new NV12ToXXXXConverter(fourcc, decoder.width, decoder.height); int count = 0; foreach (var frame in decoder.GetFrames()) { var frameBytes = formatConverter.ConvertFromNV12(frame.Data); // Convert to format requested outfs.Write(frameBytes, 0, frameBytes.Length); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } } Console.WriteLine("Decoded {0} frames", count); Console.WriteLine(); if (bt != null) { bt.StopAndReport(count, infs.Position, outfs.Position); } infs.Close(); outfs.Close(); // make sure program always waits for user, except F5-Release run if (!UnitTest.IsRunning && Debugger.IsAttached || Environment.GetEnvironmentVariable("VisualStudioVersion") == null) { Console.WriteLine("done - press a key to exit"); Console.ReadKey(); } }
unsafe static void Main(string[] args) { ConfirmQuickSyncReadiness.HaltIfNotReady(); Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory; // keep ascending directories until 'media' folder is found for (int i = 0; i < 10 && !Directory.Exists("Media"); i++) { Directory.SetCurrentDirectory(".."); } Directory.SetCurrentDirectory("Media"); CodecId codecId = CodecId.MFX_CODEC_JPEG; FourCC fourcc = FourCC.UYVY; // supported: RGB4, YUY2 NV12 [UYVY through tricks! see below] mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO; int width, height; string inFilename; //inFilename = "BigBuckBunny_320x180." + fourcc + ".yuv"; width = 320; height = 180; inFilename = "BigBuckBunny_1920x1080." + fourcc + ".yuv"; width = 1920; height = 1080; string outFilename = Path.ChangeExtension(inFilename, "enc.jpeg"); Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory); Console.WriteLine("Input filename: {0}", inFilename); Console.WriteLine("Input width: {0} Input height: {1}", width, height); if (!File.Exists(inFilename)) { Console.WriteLine("Input file not found."); Console.WriteLine("Please let Decoder1 run to completion to create input file"); Console.WriteLine("Press any key to exit."); Console.ReadKey(); return; } Stream infs, outfs; BenchmarkTimer bt = null; #if !ENABLE_BENCHMARK infs = File.Open(inFilename, FileMode.Open); outfs = File.Open(outFilename, FileMode.Create); #else // delete this code for most simple example // * Benchmark Mode * // this block does a couple things: // 1. causes the file to be pre-read into memory so we are not timing disk reads. // 2. replaces the output stream with a NullStream so nothing gets written to disk. // 3. Starts the timer for benchmarking // this pre-reads file into memory for benchmarking long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024; Console.WriteLine("Pre-reading input"); infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate); Console.WriteLine("Input read"); outfs = new NullStream(); bt = new BenchmarkTimer(); bt.Start(); int minimumFrames = 4000; #endif Console.WriteLine("Output filename: {0}", Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT")); Console.WriteLine(); // The encoder cannot encode UYVY, but if you are the only decoder of the JPEG // files, you can encode UYVY as YUY2 and everything is good. if (fourcc == FourCC.UYVY) { fourcc = FourCC.YUY2; } mfxVideoParam mfxEncParams = new mfxVideoParam(); mfxEncParams.mfx.CodecId = codecId; mfxEncParams.mfx.TargetUsage = TargetUsage.MFX_TARGETUSAGE_BALANCED; //mfxEncParams.mfx.TargetKbps = 2000; //mfxEncParams.mfx.RateControlMethod = RateControlMethod.MFX_RATECONTROL_VBR; mfxEncParams.mfx.Quality = 90; mfxEncParams.mfx.Interleaved = 1; mfxEncParams.mfx.FrameInfo.FrameRateExtN = 30; mfxEncParams.mfx.FrameInfo.FrameRateExtD = 1; mfxEncParams.mfx.FrameInfo.FourCC = fourcc; switch (fourcc) { case FourCC.NV12: case FourCC.YV12: mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; break; case FourCC.YUY2: mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV422V; // fatal on SKYLAKE! mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV422; break; case FourCC.RGB4: mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV444; break; default: Trace.Assert(false); break; } mfxEncParams.mfx.FrameInfo.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE; mfxEncParams.mfx.FrameInfo.CropX = 0; mfxEncParams.mfx.FrameInfo.CropY = 0; mfxEncParams.mfx.FrameInfo.CropW = (ushort)width; mfxEncParams.mfx.FrameInfo.CropH = (ushort)height; // Width must be a multiple of 16 // Height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture mfxEncParams.mfx.FrameInfo.Width = QuickSyncStatic.ALIGN16(width); mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.AlignHeightTo32or16(height, mfxEncParams.mfx.FrameInfo.PicStruct); mfxEncParams.IOPattern = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; // must be 'in system memory' mfxEncParams.AsyncDepth = 4; // Pipeline depth. Best at 4 mfxEncParams.mfx.FrameInfo.Width = QuickSyncStatic.ALIGN32(width); mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.ALIGN32(height); BitStreamChunk bsc = new BitStreamChunk(); //where we receive compressed frame data ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl); //ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl); string impltext = QuickSyncStatic.ImplementationString(encoder.session); Console.WriteLine("Implementation = {0}", impltext); // not needed for YUY2 encoding //var formatConverter = new NV12FromXXXXConverter(fileFourcc, width, height); int inputFrameLength = width * height * VideoUtility.GetBitsPerPixel(fourcc) / 8; byte[] uncompressed = new byte[inputFrameLength]; int count = 0; // we do not call encoder.LockFrame() and encoder.UnlockFrame() as this example is // for system memory. while (infs.Read(uncompressed, 0, inputFrameLength) == inputFrameLength) { int ix = encoder.GetFreeFrameIndex(); //this call relys locks in authoritative array of surf //formatConverter.ConvertToNV12FrameSurface(ref encoder.Frames[ix], uncompressed, 0); mfxFrameSurface1 *f = (mfxFrameSurface1 *)encoder.Frames[ix]; switch (fourcc) { case FourCC.NV12: Trace.Assert(f->Data.Pitch == width * 1); fixed(byte *aa = &uncompressed[0]) FastMemcpyMemmove.memcpy(f->Data.Y, (IntPtr)aa, height * width); fixed(byte *aa = &uncompressed[height * width]) FastMemcpyMemmove.memcpy(f->Data.UV, (IntPtr)aa, height / 2 * width); break; case FourCC.YUY2: Trace.Assert(f->Data.Pitch == width * 2); fixed(byte *aa = &uncompressed[0]) FastMemcpyMemmove.memcpy(f->Data.Y, (IntPtr)aa, height * width * 2); break; default: Trace.Assert(false); break; } encoder.EncodeFrame(ix, ref bsc); if (bsc.bytesAvailable > 0) { outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } } #if ENABLE_BENCHMARK // delete this code for most simple example if (infs.Position + inputFrameLength - 1 >= infs.Length) { infs.Position = 0; } if (count >= minimumFrames) { break; } #endif } while (encoder.Flush(ref bsc)) { if (bsc.bytesAvailable > 0) { outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } } } if (bt != null) { bt.StopAndReport(count, infs.Position, outfs.Position); } infs.Close(); outfs.Close(); encoder.Dispose(); Console.WriteLine("Encoded {0} frames", count); // make sure program always waits for user, except F5-Release run if (Debugger.IsAttached || Environment.GetEnvironmentVariable("VisualStudioVersion") == null) { Console.WriteLine("done - press a key to exit"); Console.ReadKey(); } }
unsafe static void Main(string[] args) { ConfirmQuickSyncReadiness.HaltIfNotReady(); if (Environment.OSVersion.Platform != PlatformID.Win32NT) { throw new Exception("DirectX sample only works on Windows"); } Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory; // keep ascending directories until 'media' folder is found for (int i = 0; i < 10 && !Directory.Exists("Media"); i++) { Directory.SetCurrentDirectory(".."); } Directory.SetCurrentDirectory("Media"); string fn; fn = @"BigBuckBunny_320x180.264"; //fn = @"C:\w\BigBuckBunny_1920x1080.264"; //fn = @"C:\w\bbb_sunflower_2160p_30fps_normal_track1.h264"; var s = File.Open(fn, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); var buf = new byte[1000]; int n = s.Read(buf, 0, buf.Length); s.Position = 0; Trace.Assert(n == buf.Length); var decVideoParam = QuickSyncStatic.DecodeHeader(buf, CodecId.MFX_CODEC_AVC); mfxVideoParam vppVideoParam = new mfxVideoParam();// = SetupVPPConfiguration(decVideoParam.mfx.FrameInfo.CropW, decVideoParam.mfx.FrameInfo.CropH); vppVideoParam.vpp.In = decVideoParam.mfx.FrameInfo; vppVideoParam.vpp.Out = decVideoParam.mfx.FrameInfo; decVideoParam.IOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY; vppVideoParam.IOPattern = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; vppVideoParam.IOPattern |= useSystemMemoryNotVideoMemory ? IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY : IOPattern.MFX_IOPATTERN_OUT_VIDEO_MEMORY; int vppOutWidth; int vppOutHeight; Console.WriteLine($"Bitstream -- width {0} height {1}", decVideoParam.mfx.FrameInfo.CropW, decVideoParam.mfx.FrameInfo.CropH); // vppOutWidth = 1920; // vppOutHeight = 1080; bool doResize = true; if (doResize) { } else { } vppOutWidth = decVideoParam.mfx.FrameInfo.CropW; // HxW of actual bitstream vppOutHeight = decVideoParam.mfx.FrameInfo.CropH; //resizing setup vppOutWidth = decVideoParam.mfx.FrameInfo.CropW / 4; vppOutHeight = decVideoParam.mfx.FrameInfo.CropH / 4; Console.WriteLine($"VPP output -- width {0} height {1}", vppOutWidth, vppOutHeight); vppVideoParam.vpp.Out.FourCC = FourCC.RGB4; vppVideoParam.vpp.Out.CropW = (ushort)(vppOutWidth); vppVideoParam.vpp.Out.CropH = (ushort)(vppOutHeight); var form = new SharpDX.Windows.RenderForm(); form.Width = vppOutWidth; form.Height = vppOutHeight; // use resized HxW form.Width = decVideoParam.mfx.FrameInfo.CropW; form.Height = decVideoParam.mfx.FrameInfo.CropH; // use original HxW Console.WriteLine($"vppOutWidth {vppOutWidth} vppOutHeight {vppOutHeight}"); Console.WriteLine($"form.Width {form.Width} form.Height {form.Height}"); var impl = mfxIMPL.MFX_IMPL_VIA_D3D11 | mfxIMPL.MFX_IMPL_HARDWARE; var decoder = new StreamDecoder(s, decVideoParam, vppVideoParam, impl); //string impltext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.session); //Console.WriteLine("Implementation = {0}", impltext); //string memtext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.deviceSetup.memType); //Console.WriteLine("Memory type = {0}", memtext); if (useSystemMemoryNotVideoMemory) { device = new SharpDX.Direct3D11.Device(DriverType.Hardware); } else { IntPtr dx11device = decoder.lowLevelDecoder.videoAccelerationSupport.DeviceGetHandle(mfxHandleType.MFX_HANDLE_D3D11_DEVICE); device = new SharpDX.Direct3D11.Device(dx11device); } var fps = new FPSCounter(); var sd = new SwapChainDescription() { BufferCount = 1, ModeDescription = new ModeDescription(vppOutWidth, vppOutHeight, new Rational(60, 1), Format.B8G8R8A8_UNorm), IsWindowed = true, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput, Flags = SwapChainFlags.None }; var a = device.QueryInterface <SharpDX.DXGI.Device>(); var b = a.Adapter.QueryInterface <Adapter2>(); var c = b.GetParent <Factory2>(); swapChain = new SwapChain(c, device, sd); var enumerator = decoder.GetFrames().GetEnumerator(); RenderLoop.Run(form, () => { enumerator.MoveNext(); RenderFrameX(decoder, enumerator.Current); fps.PrintFPS(); }); swapChain.Dispose(); device.Dispose(); }
unsafe static void Main(string[] args) { ConfirmQuickSyncReadiness.HaltIfNotReady(); Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory; // keep ascending directories until 'media' folder is found for (int i = 0; i < 10 && !Directory.Exists("Media"); i++) { Directory.SetCurrentDirectory(".."); } Directory.SetCurrentDirectory("Media"); CodecId codecId = CodecId.MFX_CODEC_JPEG; FourCC fourcc = FourCC.UYVY; // supported: RGB4, YUY2 NV12 [UYVY through tricks! see below] mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO; string fourccString = fourcc.ToString().Substring(0, 4); string inFilename; //inFilename = "BigBuckBunny_320x180.UYVY.enc.jpeg"; inFilename = "BigBuckBunny_1920x1080.UYVY.enc.jpeg"; //inFilename = "BigBuckBunny_3840x2160.UYVY.enc.jpeg"; string outFilename = Path.ChangeExtension(inFilename, ".yuv"); Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory); Console.WriteLine("Input filename: {0}", inFilename); Console.WriteLine(); if (!File.Exists(inFilename)) { Console.WriteLine("Input file not found. Press any key to exit."); Console.ReadKey(); return; } Stream infs, outfs; BenchmarkTimer bt = null; #if !ENABLE_BENCHMARK infs = File.Open(inFilename, FileMode.Open); outfs = File.Open(outFilename, FileMode.Create); #else // delete this code for most simple example // * Benchmark Mode * // this block does a couple things: // 1. causes the file to be pre-read into memory so we are not timing disk reads. // 2. replaces the output stream with a NullStream so nothing gets written to disk. // 3. Starts the timer for benchmarking // this pre-reads file into memory for benchmarking long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024; Console.WriteLine("Pre-reading input"); infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate); Console.WriteLine("Input read"); outfs = new NullStream(); bt = new BenchmarkTimer(); bt.Start(); int minimumFrames = 4000; #endif Console.WriteLine("Output filename: {0}", Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT")); Console.WriteLine(); var outIOPattern = IOPattern.MFX_IOPATTERN_OUT_SYSTEM_MEMORY; // The encoder cannot encode UYVY, but if you are the only decoder of the JPEG // files, you can encode UYVY as YUY2 and everything is good. if (fourcc == FourCC.UYVY) { fourcc = FourCC.YUY2; } mfxVideoParam decoderParameters = QuickSyncStatic.ReadFileHeaderInfo(codecId, impl, infs, outIOPattern); decoderParameters.mfx.FrameInfo.FourCC = fourcc; AssignChromaFormat(fourcc, ref decoderParameters); var decoder = new StreamDecoder(infs, decoderParameters, null, impl); #if ENABLE_BENCHMARK // delete this code for most simple example decoder.benchmarkNeverStopMode = true; #endif string impltext = QuickSyncStatic.ImplementationString(decoder.lowLevelDecoder.session); Console.WriteLine("Implementation = {0}", impltext); // not needed //var formatConverter = new NV12ToXXXXConverter(fourcc, decoder.width, decoder.height); int width = decoderParameters.mfx.FrameInfo.CropW; int height = decoderParameters.mfx.FrameInfo.CropH; var tmpbuf = new byte[width * height * 2]; int count = 0; foreach (var frame in decoder.GetFrames()) { //var frameBytes = formatConverter.ConvertFromNV12(frame.Data); // Convert to format requested Trace.Assert(frame.Data.Pitch == width * 2); // yuy2 only fixed(byte *aa = &tmpbuf[0]) FastMemcpyMemmove.memcpy((IntPtr)aa, frame.Data.Y, height * width * 2); outfs.Write(tmpbuf, 0, tmpbuf.Length); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } #if ENABLE_BENCHMARK // delete this code for most simple example if (count > minimumFrames) { break; } #endif } if (bt != null) { bt.StopAndReport(count, infs.Position, outfs.Position); } infs.Close(); outfs.Close(); Console.WriteLine("Decoded {0} frames", count); // make sure program always waits for user, except F5-Release run if (Debugger.IsAttached || Environment.GetEnvironmentVariable("VisualStudioVersion") == null) { Console.WriteLine("done - press a key to exit"); Console.ReadKey(); } }
/// <summary>Initializes a new instance of the <see cref="LowLevelEncoderCSharp"/> class.</summary> /// <param name="mfxEncParams">The encoder parameters.</param> /// <param name="impl">The implementation.</param> public LowLevelEncoderCSharp(mfxVideoParam mfxEncParams, mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO) { mfxStatus sts; session = new mfxSession(); var ver = new mfxVersion() { Major = 1, Minor = 3 }; fixed(mfxSession *s = &session) sts = UnsafeNativeMethods.MFXInit(impl, &ver, s); QuickSyncStatic.ThrowOnBadStatus(sts, "MFXInit"); //deviceSetup = new DeviceSetup(session, false); sts = UnsafeNativeMethods.MFXVideoENCODE_Query(session, &mfxEncParams, &mfxEncParams); if (sts > 0) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Query), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "encodequery"); mfxFrameAllocRequest EncRequest; sts = UnsafeNativeMethods.MFXVideoENCODE_QueryIOSurf(session, &mfxEncParams, &EncRequest); QuickSyncStatic.ThrowOnBadStatus(sts, "queryiosurf"); EncRequest.NumFrameSuggested = (ushort)(EncRequest.NumFrameSuggested + mfxEncParams.AsyncDepth); EncRequest.Type |= (FrameMemoryType)0x2000; // WILL_WRITE; // This line is only required for Windows DirectX11 to ensure that surfaces can be written to by the application UInt16 numSurfaces = EncRequest.NumFrameSuggested; // - Width and height of buffer must be aligned, a multiple of 32 // - Frame surface array keeps pointers all surface planes and general frame info UInt16 width = (UInt16)QuickSyncStatic.ALIGN32(mfxEncParams.mfx.FrameInfo.Width); UInt16 height = (UInt16)QuickSyncStatic.ALIGN32(mfxEncParams.mfx.FrameInfo.Height); int bitsPerPixel = VideoUtility.GetBitsPerPixel(mfxEncParams.mfx.FrameInfo.FourCC); int surfaceSize = width * height * bitsPerPixel / 8; //byte[] surftaceBuffers = new byte[surfaceSize * numSurfaces]; //XXX IntPtr surfaceBuffers = Marshal.AllocHGlobal(surfaceSize * numSurfaces); byte * surfaceBuffersPtr = (byte *)surfaceBuffers; // // Allocate surface headers (mfxFrameSurface1) for decoder Frames = new mfxFrameSurface1[numSurfaces]; //MSDK_CHECK_POINTER(pmfxSurfaces, MFX_ERR_MEMORY_ALLOC); for (int i = 0; i < numSurfaces; i++) { Frames[i] = new mfxFrameSurface1(); Frames[i].Info = mfxEncParams.mfx.FrameInfo; switch (mfxEncParams.mfx.FrameInfo.FourCC) { case FourCC.NV12: Frames[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize; Frames[i].Data.U_ptr = Frames[i].Data.Y_ptr + width * height; Frames[i].Data.V_ptr = Frames[i].Data.U_ptr + 1; Frames[i].Data.Pitch = width; break; case FourCC.YUY2: Frames[i].Data.Y_ptr = (byte *)surfaceBuffers + i * surfaceSize; Frames[i].Data.U_ptr = Frames[i].Data.Y_ptr + 1; Frames[i].Data.V_ptr = Frames[i].Data.U_ptr + 3; Frames[i].Data.Pitch = (ushort)(width * 2); break; default: //find sysmem_allocator.cpp for more help throw new NotImplementedException(); } } frameIntPtrs = new IntPtr[Frames.Length]; for (int i = 0; i < Frames.Length; i++) { fixed(mfxFrameSurface1 *a = &Frames[i]) frameIntPtrs[i] = (IntPtr)a; } sts = UnsafeNativeMethods.MFXVideoENCODE_Init(session, &mfxEncParams); if (sts > 0) { warnings.Add(nameof(UnsafeNativeMethods.MFXVideoENCODE_Init), sts); sts = 0; } QuickSyncStatic.ThrowOnBadStatus(sts, "encodeinit"); mfxVideoParam par; UnsafeNativeMethods.MFXVideoENCODE_GetVideoParam(session, &par); QuickSyncStatic.ThrowOnBadStatus(sts, "encodegetvideoparam"); // from mediasdkjpeg-man.pdf // BufferSizeInKB = 4 + (Width * Height * BytesPerPx + 1023) / 1024; //where Width and Height are weight and height of the picture in pixel, BytesPerPx is number of //byte for one pixel.It equals to 1 for monochrome picture, 1.5 for NV12 and YV12 color formats, // 2 for YUY2 color format, and 3 for RGB32 color format(alpha channel is not encoded). if (par.mfx.BufferSizeInKB == 0 && mfxEncParams.mfx.CodecId == CodecId.MFX_CODEC_JPEG) { par.mfx.BufferSizeInKB = (ushort)((4 + (mfxEncParams.mfx.FrameInfo.CropW * mfxEncParams.mfx.FrameInfo.CropH * 3 + 1023)) / 1000); } //printf("bufsize %d\n", par.mfx.BufferSizeInKB); // Create task pool to improve asynchronous performance (greater GPU utilization) int taskPoolSize = mfxEncParams.AsyncDepth; // number of tasks that can be submitted, before synchronizing is required //Task* pTasks = stackalloc Task[taskPoolSize]; // GCHandle gch3 = GCHandle.Alloc(pTasks, GCHandleType.Pinned); pTasks = new Task[taskPoolSize]; for (int i = 0; i < taskPoolSize; i++) { // Prepare Media SDK bit stream buffer pTasks[i].mfxBS.MaxLength = (uint)(par.mfx.BufferSizeInKB * 1000); pTasks[i].mfxBS.Data = Marshal.AllocHGlobal((int)pTasks[i].mfxBS.MaxLength); Trace.Assert(pTasks[i].mfxBS.Data != IntPtr.Zero); } pinningHandles.Add(GCHandle.Alloc(pTasks, GCHandleType.Pinned)); pinningHandles.Add(GCHandle.Alloc(Frames, GCHandleType.Pinned)); }
static void Main(string[] args) { ConfirmQuickSyncReadiness.HaltIfNotReady(); Environment.CurrentDirectory = AppDomain.CurrentDomain.BaseDirectory; // keep ascending directories until 'media' folder is found for (int i = 0; i < 10 && !Directory.Exists("Media"); i++) { Directory.SetCurrentDirectory(".."); } Directory.SetCurrentDirectory("Media"); int width, height; string inFilename; mfxIMPL impl = mfxIMPL.MFX_IMPL_AUTO; FourCC fourcc = FourCC.NV12; // supported: RGB3 RGB4 BGR4 BGR3 NV12 I420 IYUV YUY2 UYVY YV12 P411 P422 inFilename = "BigBuckBunny_320x180." + fourcc + ".yuv"; width = 320; height = 180; //inFilename = "BigBuckBunny_1920x1080." + fourcc + ".yuv"; width = 1920; height = 1080; string outFilename = Path.ChangeExtension(inFilename, "enc.264"); Console.WriteLine("Working directory: {0}", Environment.CurrentDirectory); Console.WriteLine("Input filename: {0}", inFilename); Console.WriteLine("Input width: {0} Input height: {1}", width, height); Console.WriteLine(); if (!File.Exists(inFilename)) { Console.WriteLine("Input file not found."); Console.WriteLine("Please let Decoder1 run to completion to create input file"); Console.WriteLine("Press any key to exit."); Console.ReadKey(); return; } Stream infs, outfs; BenchmarkTimer bt = null; #if !ENABLE_BENCHMARK infs = File.Open(inFilename, FileMode.Open); outfs = File.Open(outFilename, FileMode.Create); #else // delete this code for most simple example // * Benchmark Mode * // this block does a couple things: // 1. causes the file to be pre-read into memory so we are not timing disk reads. // 2. replaces the output stream with a NullStream so nothing gets written to disk. // 3. Starts the timer for benchmarking // this pre-reads file into memory for benchmarking long maximumMemoryToAllocate = (long)4L * 1024 * 1024 * 1024; Console.WriteLine("Pre-reading input"); infs = new PreReadLargeMemoryStream(File.Open(inFilename, FileMode.Open), maximumMemoryToAllocate); Console.WriteLine("Input read"); outfs = new NullStream(); bt = new BenchmarkTimer(); bt.Start(); int minimumFrames = 4000; #endif Console.WriteLine("Output filename: {0}", Path.GetFileName((outfs as FileStream)?.Name ?? "NO OUTPUT")); Console.WriteLine(); mfxVideoParam mfxEncParams = new mfxVideoParam(); mfxEncParams.mfx.CodecId = CodecId.MFX_CODEC_AVC; mfxEncParams.mfx.TargetUsage = TargetUsage.MFX_TARGETUSAGE_BALANCED; mfxEncParams.mfx.TargetKbps = 2000; mfxEncParams.mfx.RateControlMethod = RateControlMethod.MFX_RATECONTROL_VBR; mfxEncParams.mfx.FrameInfo.FrameRateExtN = 30; mfxEncParams.mfx.FrameInfo.FrameRateExtD = 1; mfxEncParams.mfx.FrameInfo.FourCC = FourCC.NV12; mfxEncParams.mfx.FrameInfo.ChromaFormat = ChromaFormat.MFX_CHROMAFORMAT_YUV420; mfxEncParams.mfx.FrameInfo.PicStruct = PicStruct.MFX_PICSTRUCT_PROGRESSIVE; mfxEncParams.mfx.FrameInfo.CropX = 0; mfxEncParams.mfx.FrameInfo.CropY = 0; mfxEncParams.mfx.FrameInfo.CropW = (ushort)width; mfxEncParams.mfx.FrameInfo.CropH = (ushort)height; // Width must be a multiple of 16 // Height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture mfxEncParams.mfx.FrameInfo.Width = QuickSyncStatic.ALIGN16(width); mfxEncParams.mfx.FrameInfo.Height = QuickSyncStatic.AlignHeightTo32or16(height, mfxEncParams.mfx.FrameInfo.PicStruct); mfxEncParams.IOPattern = IOPattern.MFX_IOPATTERN_IN_SYSTEM_MEMORY; // must be 'in system memory' mfxEncParams.AsyncDepth = 4; // Pipeline depth. Best at 4 BitStreamChunk bsc = new BitStreamChunk(); //where we receive compressed frame data //var encoder = new LowLevelEncoder2(mfxEncParams, impl); ILowLevelEncoder encoder = new LowLevelEncoder(mfxEncParams, impl); string impltext = QuickSyncStatic.ImplementationString(encoder.session); Console.WriteLine("Implementation = {0}", impltext); //string memtext = QuickSyncStatic.ImplementationString(encoder.deviceSetup.memType); //Console.WriteLine("Memory type = {0}", memtext); var formatConverter = new NV12FromXXXXConverter(fourcc, width, height); int inputFrameLength = width * height * VideoUtility.GetBitsPerPixel(fourcc) / 8; byte[] uncompressed = new byte[inputFrameLength]; int count = 0; while (infs.Read(uncompressed, 0, inputFrameLength) == inputFrameLength) { int ix = encoder.GetFreeFrameIndex(); //get index of free surface formatConverter.ConvertToNV12FrameSurface(ref encoder.Frames[ix], uncompressed, 0); encoder.EncodeFrame(ix, ref bsc); if (bsc.bytesAvailable > 0) { outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } } #if ENABLE_BENCHMARK // delete this code for most simple example if (infs.Position + inputFrameLength - 1 >= infs.Length) { infs.Position = 0; } if (count >= minimumFrames) { break; } #endif } while (encoder.Flush(ref bsc)) { if (bsc.bytesAvailable > 0) { outfs.Write(bsc.bitstream, 0, bsc.bytesAvailable); if (++count % 100 == 0) { Console.Write("Frame {0}\r", count); } } } if (bt != null) { bt.StopAndReport(count, infs.Position, outfs.Position); } infs.Close(); outfs.Close(); encoder.Dispose(); Console.WriteLine("Encoded {0} frames", count); if (Debugger.IsAttached) { Console.WriteLine("done - press a key to exit"); Console.ReadKey(); } }
public void Reset(mfxVideoParam p) { var sts = NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Reset(h, &p); QuickSyncStatic.ThrowOnBadStatus(sts, nameof(NativeLLDecoderUnsafeNativeMethods.NativeDecoder_Reset)); }