void GetBitstreamIfAny(ref BitStreamChunk bsc) { mfxStatus sts = 0; bsc.bytesAvailable = 0; Trace.Assert(pTasks[nFirstSyncTask].syncp.sync_ptr != null); // No more free tasks, need to sync sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, pTasks[nFirstSyncTask].syncp, 60000); QuickSyncStatic.ThrowOnBadStatus(sts, "syncoper"); // sts = WriteBitStreamFrame(&pTasks[nFirstSyncTask].mfxBS, fSink); // MSDK_BREAK_ON_ERROR(g); int n = (int)pTasks[nFirstSyncTask].mfxBS.DataLength; if (bsc.bitstream == null || bsc.bitstream.Length < n) { bsc.bitstream = new byte[pTasks[nFirstSyncTask].mfxBS.MaxLength]; } Trace.Assert(pTasks[nFirstSyncTask].mfxBS.DataOffset == 0); Marshal.Copy(pTasks[nFirstSyncTask].mfxBS.Data, bsc.bitstream, 0, n); bsc.bytesAvailable = n; pTasks[nFirstSyncTask].mfxBS.DataLength = 0; pTasks[nFirstSyncTask].syncp.sync_ptr = null; nFirstSyncTask = (nFirstSyncTask + 1) % pTasks.Length; }
/// <summary> /// The first stage of flushing the internal frame buffers. /// </summary> /// <param name="frame">The frame into which valid frames will be placed.</param> /// <returns> /// True indicates more data is needed. /// False indicates buffer is sufficient. /// </returns> public bool Flush1(mfxFrameSurface1 frame) { if (stage == Stage.MemoryAllocated) { stage++; } Trace.Assert(stage == Stage.DecodingDone); void *foo = (void *)0; frame.ptr = (void *)0; mfxStatus sts = UnsafeNativeMethods.CLowLevelDecoder_Flush1(handle, &foo); if (sts == mfxStatus.MFX_ERR_MORE_SURFACE) // decoder needs to be called again, it is eating memory.SWmode { return(false); } if (sts == mfxStatus.MFX_ERR_MORE_DATA) { stage++; return(true); } if (sts != 0) { throw new LimeVideoSDKLowLevelException("Flush1 fail", sts); } frame.ptr = foo; return(false); }
#pragma warning restore 169 //#endif /// <summary>Helper method that throws an exception when a status code reflects an error.</summary> /// <param name="sts">The STS.</param> /// <param name="msg">The MSG.</param> /// <exception cref="QuickSyncException"></exception> public static void ThrowOnBadStatus(mfxStatus sts, string msg) { if (sts < 0) { throw new QuickSyncException(msg, sts); } }
/// <summary> /// Dispose /// </summary> /// <param name="disposing"></param> protected virtual void Dispose(bool disposing) { if (!disposed) { if (disposing) { // Free other state (managed objects). } if (plugin_uid != null && plugin_uid.Length == 16) { mfxStatus sts = mfxStatus.MFX_ERR_NONE; fixed(byte *uid = plugin_uid) sts = UnsafeNativeMethods.MFXVideoUSER_UnLoad(session, uid); QuickSyncStatic.ThrowOnBadStatus(sts, "MFXVideoUSER_UnLoad"); } foreach (var item in pinningHandles) { item.Free(); } // Set large fields to null. disposed = true; } }
/// <summary> /// Reset the decoder, useful to discard bitstream, and start over /// 11 1 15 Cam /// SPS/PPS FOR AVC WILL BE REQUIRED AFTER RESET! /// It appears a RESET to the IMSDK causes the decoder to enter seek mode for PPS/SPS! /// You won't get any frames back until you give the docoder SPS/PPS! /// </summary> public void Reset(mfxVideoParam p) { bitstream.DataLength = 0; bitstream.DataOffset = 0; mfxStatus sts = UnsafeNativeMethods.MFXVideoDECODE_Reset(session, &p); if (sts < 0) { throw new QuickSyncException("Reset fail", sts); } }
bool Flush1(ref BitStreamChunk bsc) { bsc.bytesAvailable = 0; if (GetBitstreamIfFull(ref bsc)) { return(true); } mfxStatus sts = 0; int nTaskIdx = GetFreeTaskIndex(pTasks); // Find free task Trace.Assert((int)mfxStatus.MFX_ERR_NOT_FOUND != nTaskIdx); for (;;) { // Encode a frame asychronously (returns immediately) fixed(mfxBitstream *b = &pTasks[nTaskIdx].mfxBS) fixed(mfxSyncPoint * c = &pTasks[nTaskIdx].syncp) sts = UnsafeNativeMethods.MFXVideoENCODE_EncodeFrameAsync(session, null, null, b, c); if (mfxStatus.MFX_ERR_NONE < sts && !(pTasks[nTaskIdx].syncp.sync_ptr != null)) { // Repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // Wait if device is busy, then repeat the same call } } else if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync_ptr != null) { sts = mfxStatus.MFX_ERR_NONE; // Ignore warnings if output is available break; } else { break; } } // MFX_ERR_MORE_DATA means that the input file has ended, need to go to buffering loop, exit in case of other errors //MSDK_IGNORE_MFX_STS(sts, MFX_ERR_MORE_DATA); if (sts == mfxStatus.MFX_ERR_MORE_DATA) { return(false); // no more to flush here } sts = mfxStatus.MFX_ERR_NONE; //MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); QuickSyncStatic.ThrowOnBadStatus(sts, "flush1.encodeFrameAsync"); return(true); // yes, call me again, more to flush }
public bool DecodeFrame(mfxFrameSurface1 frame) { Trace.Assert(stage == Stage.MemoryAllocated); //Console.WriteLine(999); mfxStatus sts = UnsafeNativeMethods.MFXVideoDECODE_DecodeFrameAsync(session, &mfxBS, ) //Console.WriteLine(sts) ; if (sts == mfxStatus.MFX_ERR_MORE_SURFACE) // decoder needs to be called again, it is eating memory.SWmode { return(false); } if (sts == mfxStatus.MFX_ERR_MORE_DATA) { // stage++; return(true); } if (sts < 0) { throw new LimeVideoSDKLowLevelException("DecodeFrame fail", sts); } if (sts == mfxStatus.MFX_WRN_VIDEO_PARAM_CHANGED) { frame.WarningVideoParamChanged = true; } else { frame.WarningVideoParamChanged = false; if (sts > 0) { Console.WriteLine("warn: " + sts.ToString()); } } frame.ptr = foo; return(false); }
/// <summary> /// Constructor /// </summary> /// <param name="message"></param> /// <param name="sts"></param> public unsafe QuickSyncException(sbyte *message, mfxStatus sts) : base(sts.ToString() + ":" + new string(message)) { }
/// <summary> /// Constructor /// </summary> /// <param name="message"></param> /// <param name="sts"></param> public QuickSyncException(string message, mfxStatus sts) : base(sts.ToString() + ":" + message) { }
/// <summary>Encodes a frame.</summary> /// <param name="frameIndex">Index of the frame to encode.</param> /// <param name="bitStreamChunk">Output frames bitstream data, if available</param> public void EncodeFrame(int frameIndex, ref BitStreamChunk bitStreamChunk) { mfxStatus sts = 0; bitStreamChunk.bytesAvailable = 0; GetBitstreamIfFull(ref bitStreamChunk); // int nEncSurfIdx = 0; int nTaskIdx = GetFreeTaskIndex(pTasks); // Find free task Trace.Assert((int)mfxStatus.MFX_ERR_NOT_FOUND != nTaskIdx); //int nsource = 0; //var buf = new byte[pTasks[0].mfxBS.MaxLength]; // // Stage 1: Main encoding loop // //if (mfxStatus.MFX_ERR_NONE <= sts || mfxStatus.MFX_ERR_MORE_DATA == sts) //{ // } // else // { //nEncSurfIdx = GetFreeSurfaceIndex(pmfxSurfaces); // Find free frame surface //MSDK_CHECK_ERROR(MFX_ERR_NOT_FOUND, nEncSurfIdx, MFX_ERR_MEMORY_ALLOC); //Trace.Assert(nEncSurfIdx != (int)mfxStatus.MFX_ERR_NOT_FOUND); // Surface locking required when read/write D3D surfaces //sts = mfxAllocator.Lock(mfxAllocator.pthis, pmfxSurfaces[nEncSurfIdx]->Data.MemId, &(pmfxSurfaces[nEncSurfIdx]->Data)); //MSDK_BREAK_ON_ERROR(sts); // sts = LoadRawFrame(pmfxSurfaces[nEncSurfIdx], fSource); // MSDK_BREAK_ON_ERROR(sts); // from the prototype, we just copy the frame data from a byte array, // but in this class we are passed a prepared frame. //int pfs = 320 * 180 * 3 / 2; //if (nsource * pfs >= yuv.Length) // break; //int stride = pmfxSurfaces[nEncSurfIdx].Data.Pitch; //for (int i = 0; i < h; i++) // Marshal.Copy(yuv, nsource * pfs + i * w, pmfxSurfaces[nEncSurfIdx].Data.Y + stride * i, w); //for (int i = 0; i < h / 2; i++) // Marshal.Copy(yuv, nsource * pfs + i * w + h * w, pmfxSurfaces[nEncSurfIdx].Data.UV + stride * i, w); //sts = mfxAllocator.Unlock(mfxAllocator.pthis, pmfxSurfaces[nEncSurfIdx]->Data.MemId, &(pmfxSurfaces[nEncSurfIdx]->Data)); //MSDK_BREAK_ON_ERROR(sts); // Frames[nEncSurfIdx] = frame; for (;;) { // Encode a frame asychronously (returns immediately) fixed(mfxFrameSurface1 *a = &Frames[frameIndex]) fixed(mfxBitstream * b = &pTasks[nTaskIdx].mfxBS) fixed(mfxSyncPoint * c = &pTasks[nTaskIdx].syncp) sts = UnsafeNativeMethods.MFXVideoENCODE_EncodeFrameAsync(session, null, a, b, c); if (mfxStatus.MFX_ERR_NONE < sts && !(pTasks[nTaskIdx].syncp.sync_ptr != null)) { // Repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // Wait if device is busy, then repeat the same call } } else if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync_ptr != null) { sts = mfxStatus.MFX_ERR_NONE; // Ignore warnings if output is available break; } else if (mfxStatus.MFX_ERR_NOT_ENOUGH_BUFFER == sts) { Trace.Assert(false); // Allocate more bitstream buffer memory here if needed... break; } else { break; } } // } // MFX_ERR_MORE_DATA means that the input file has ended, need to go to buffering loop, exit in case of other errors //MSDK_IGNORE_MFX_STS(sts, MFX_ERR_MORE_DATA); if (sts == mfxStatus.MFX_ERR_MORE_DATA) { sts = mfxStatus.MFX_ERR_NONE; } //MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); QuickSyncStatic.ThrowOnBadStatus(sts, "encodeFrameAsync"); return; }
/// <summary> /// /// </summary> /// <param name="frame"></param> /// <returns> /// true:keep calling me /// false:this phase done /// </returns> bool Flush1(mfxFrameSurface1 **frame) { mfxStatus sts = 0; * frame = (mfxFrameSurface1 *)0; mfxSyncPoint syncpD, syncpV; mfxFrameSurface1 *pmfxOutSurface = (mfxFrameSurface1 *)0; int nIndex = 0; int nIndex2 = 0; // // Stage 2: Retrieve the buffered decoded frames // //while (MFX_ERR_NONE <= sts || MFX_ERR_MORE_SURFACE == sts) { if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // Wait if device is busy, then repeat the same call to DecodeFrameAsync } nIndex = GetFreeSurfaceIndex(pmfxSurfaces); // Find free frame surface QuickSyncStatic.ThrowOnBadStatus((mfxStatus)nIndex, "cannot find free surface"); // Decode a frame asychronously (returns immediately) fixed(mfxFrameSurface1 *p1 = &pmfxSurfaces[nIndex]) sts = UnsafeNativeMethods.MFXVideoDECODE_DecodeFrameAsync(session, null, p1, &pmfxOutSurface, &syncpD); // Ignore warnings if output is available, // if no output and no action required just repeat the DecodeFrameAsync call if (mfxStatus.MFX_ERR_NONE < sts && syncpD.sync_ptr != null) { sts = mfxStatus.MFX_ERR_NONE; } if (!enableVPP) { if (mfxStatus.MFX_ERR_NONE == sts) { sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, syncpD, 60000); // Synchronize. Wait until decoded frame is ready *frame = pmfxOutSurface; } } if (sts == mfxStatus.MFX_ERR_MORE_SURFACE) // decoder needs to be called again, it is eating memory.SWmode { return(true); } if (sts == mfxStatus.MFX_ERR_MORE_DATA) { return(false); } if (sts < 0) { throw new QuickSyncException("Flush1 fail", sts); } if (enableVPP && sts == mfxStatus.MFX_ERR_NONE) { fixed(mfxFrameSurface1 *p1 = &pmfxSurfaces2[nIndex2]) { nIndex2 = GetFreeSurfaceIndex(pmfxSurfaces2); // Find free frame surface QuickSyncStatic.ThrowOnBadStatus((mfxStatus)nIndex2, "cannot find free surface"); for (;;) { // Process a frame asychronously (returns immediately) sts = UnsafeNativeMethods.MFXVideoVPP_RunFrameVPPAsync(session, pmfxOutSurface, p1, null, &syncpV); //if (sts == MFX_WRN_VIDEO_PARAM_CHANGED) // ; if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync_ptr == null) { // repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // wait if device is busy } } else if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync_ptr != null) { sts = mfxStatus.MFX_ERR_NONE; // ignore warnings if output is available break; } else { break; // not a warning } // VPP needs more data, let decoder decode another frame as input if (mfxStatus.MFX_ERR_MORE_DATA == sts) { //continue; return(false); } else if (mfxStatus.MFX_ERR_MORE_SURFACE == sts) { // Not relevant for the illustrated workload! Therefore not handled. // Relevant for cases when VPP produces more frames at output than consumes at input. E.g. framerate conversion 30 fps -> 60 fps //break; return(true); } else if (sts < 0) { throw new QuickSyncException("RunFrameVPPAsync fail", sts); } // MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); //MSDK_BREAK_ON_ERROR(sts); } if (mfxStatus.MFX_ERR_NONE == sts && syncpV.sync != IntPtr.Zero) { sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, syncpV, 60000); // Synchronize. Wait until decoded frame is ready *frame = p1; } } } return(true); //} // MFX_ERR_MORE_DATA means that decoder is done with buffered frames, need to go to VPP buffering loop, exit in case of other errors //MSDK_IGNORE_MFX_STS(sts, MFX_ERR_MORE_DATA); //MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); }
/// <summary> /// Place a decoded frame in 'frame' if one is available. /// </summary> /// <param name="frame">Where to pyt frame.</param> /// <returns> /// true:keep calling me /// false:this phase done /// </returns> /// bool DecodeFrame(mfxFrameSurface1 **frame) { mfxStatus sts = 0; *frame = (mfxFrameSurface1 *)0; mfxSyncPoint syncpD; mfxSyncPoint syncpV; mfxFrameSurface1 *pmfxOutSurface = (mfxFrameSurface1 *)0; int nIndex = 0; int nIndex2 = 0; // // Stage 1: Main decoding loop // if (mfxStatus.MFX_ERR_NONE <= sts || mfxStatus.MFX_ERR_MORE_DATA == sts || mfxStatus.MFX_ERR_MORE_SURFACE == sts) { if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // Wait if device is busy, then repeat the same call to DecodeFrameAsync } //if (MFX_ERR_MORE_DATA == sts) { // sts = ReadBitStreamData(&config.mfxBS, fSource); // Read more data into input bit stream // MSDK_BREAK_ON_ERROR(sts); //} foo: if (mfxStatus.MFX_ERR_MORE_SURFACE == sts || mfxStatus.MFX_ERR_NONE == sts) { nIndex = GetFreeSurfaceIndex(pmfxSurfaces); // Find free frame surface QuickSyncStatic.ThrowOnBadStatus((mfxStatus)nIndex, "cannot find free surface"); } // Decode a frame asychronously (returns immediately) // - If input bitstream contains multiple frames DecodeFrameAsync will start decoding multiple frames, and remove them from bitstream // it might have been better to use marshal.XXX to pin this? fixed(mfxFrameSurface1 *p1 = &pmfxSurfaces[nIndex]) fixed(mfxBitstream * p2 = &bitstream) { sts = UnsafeNativeMethods.MFXVideoDECODE_DecodeFrameAsync(session, p2, p1, &pmfxOutSurface, &syncpD); if (!enableVPP && mfxStatus.MFX_ERR_NONE == sts && syncpD.sync != IntPtr.Zero) { sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, syncpD, 60000); // Synchronize. Wait until decoded frame is ready *frame = pmfxOutSurface; } } // Decode a frame asychronously (returns immediately) //sts = mfxDEC->DecodeFrameAsync(&config.mfxBS, pmfxSurfaces[nIndex], &pmfxOutSurface, &syncpD); //if (sts == MFX_WRN_VIDEO_PARAM_CHANGED) // ; // I had a problem where I was getting a lot of these, I suspect // when you get this return code, and you sync anyway, it forces more of them // be sure to test this statement under vmware in software mode // it seems this uniquely happens there that it uses this to ask for more internal surfaces. if (sts == mfxStatus.MFX_ERR_MORE_SURFACE) { goto foo; } // Ignore warnings if output is available, // if no output and no action required just repeat the DecodeFrameAsync call if (mfxStatus.MFX_ERR_NONE < sts && syncpD.sync != IntPtr.Zero) { sts = mfxStatus.MFX_ERR_NONE; } } if (sts == mfxStatus.MFX_ERR_MORE_SURFACE) // decoder needs to be called again, it is eating memory.SWmode { return(true); } if (sts == mfxStatus.MFX_ERR_MORE_DATA) { return(false); } if (sts < 0) { throw new QuickSyncException("DecodeFrame fail", sts); } if (enableVPP && sts == mfxStatus.MFX_ERR_NONE) { fixed(mfxFrameSurface1 *p1 = &pmfxSurfaces2[nIndex2]) { nIndex2 = GetFreeSurfaceIndex(pmfxSurfaces2); // Find free frame surface QuickSyncStatic.ThrowOnBadStatus((mfxStatus)nIndex2, "cannot find free surface"); tryagain: // Process a frame asychronously (returns immediately) sts = UnsafeNativeMethods.MFXVideoVPP_RunFrameVPPAsync(session, pmfxOutSurface, p1, null, &syncpV); //if (sts == MFX_WRN_VIDEO_PARAM_CHANGED) // ; if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync_ptr == null) { // repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // wait if device is busy goto tryagain; } } else if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync_ptr != null) { sts = mfxStatus.MFX_ERR_NONE; // ignore warnings if output is available } else if (mfxStatus.MFX_ERR_MORE_DATA == sts) // VPP needs more data, let decoder decode another frame as input { //continue; return(false); } else if (mfxStatus.MFX_ERR_MORE_SURFACE == sts) { // Not relevant for the illustrated workload! Therefore not handled. // Relevant for cases when VPP produces more frames at output than consumes at input. E.g. framerate conversion 30 fps -> 60 fps //break; return(true); } else if (sts < 0) { throw new QuickSyncException("RunFrameVPPAsync fail", sts); } // MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); //MSDK_BREAK_ON_ERROR(sts); else if (mfxStatus.MFX_ERR_NONE == sts && syncpV.sync != IntPtr.Zero) { sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, syncpV, 60000); // Synchronize. Wait until decoded frame is ready *frame = p1; return(true); } } } return(true); }
/// <summary>Get frames during 2nd stage of flushing</summary> /// <param name="bitStreamChunk">A single frame</param> /// <returns>true if you should continue to call this method, false if you must go to the next phase.</returns> public bool Flush3(ref BitStreamChunk bitStreamChunk) { bitStreamChunk.bytesAvailable = 0; //////// mfxStatus sts = mfxStatus.MFX_ERR_NONE; // // Stage 4: Retrieve the buffered encoded frames // if (mfxStatus.MFX_ERR_NONE <= sts) { int nTaskIdx = GetFreeTaskIndex(pTasks, taskPoolSize); // Find free task if ((int)mfxStatus.MFX_ERR_NOT_FOUND == nTaskIdx) { // No more free tasks, need to sync sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, pTasks[nFirstSyncTask].syncp, 60000); QuickSyncStatic.ThrowOnBadStatus(sts, "syncOper"); if (bitStreamChunk.bitstream == null || bitStreamChunk.bitstream.Length < pTasks[nFirstSyncTask].mfxBS.DataLength) { bitStreamChunk.bitstream = new byte[pTasks[nFirstSyncTask].mfxBS.DataLength]; } Trace.Assert(pTasks[nFirstSyncTask].mfxBS.DataOffset == 0); Marshal.Copy(pTasks[nFirstSyncTask].mfxBS.Data, bitStreamChunk.bitstream, 0, (int)pTasks[nFirstSyncTask].mfxBS.DataLength); bitStreamChunk.bytesAvailable = (int)pTasks[nFirstSyncTask].mfxBS.DataLength; // WriteBitStreamFrame(pTasks[nFirstSyncTask].mfxBS, outbs); //MSDK_BREAK_ON_ERROR(sts); pTasks[nFirstSyncTask].syncp.sync = IntPtr.Zero; pTasks[nFirstSyncTask].mfxBS.DataLength = 0; pTasks[nFirstSyncTask].mfxBS.DataOffset = 0; nFirstSyncTask = (nFirstSyncTask + 1) % taskPoolSize; return(true); } else { for (;;) { // Encode a frame asychronously (returns immediately) //sts = mfxENC.EncodeFrameAsync(NULL, pSurfaces2[nIndex2], &pTasks[nTaskIdx].mfxBS, &pTasks[nTaskIdx].syncp); sts = UnsafeNativeMethods.MFXVideoENCODE_EncodeFrameAsync(session, (mfxEncodeCtrl *)0, null, &pTasks[nTaskIdx].mfxBS, &pTasks[nTaskIdx].syncp); if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync == IntPtr.Zero) { // repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // wait if device is busy } } else if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync != IntPtr.Zero) { sts = mfxStatus.MFX_ERR_NONE; // ignore warnings if output is available break; } else if (mfxStatus.MFX_ERR_NOT_ENOUGH_BUFFER == sts) { // Allocate more bitstream buffer memory here if needed... break; } else { if (sts != mfxStatus.MFX_ERR_MORE_DATA && sts != mfxStatus.MFX_ERR_MORE_SURFACE) { QuickSyncStatic.ThrowOnBadStatus(sts, "encodeAsync"); } break; } } } } if (mfxStatus.MFX_ERR_MORE_DATA == sts) { return(false); } QuickSyncStatic.ThrowOnBadStatus(sts, "enc error"); return(true); }
/// <summary>Get frames during 2nd stage of flushing</summary> /// <param name="bitStreamChunk">A single frame</param> /// <returns>true if you should continue to call this method, false if you must go to the next stage.</returns> public bool Flush2(ref BitStreamChunk bitStreamChunk) { mfxSyncPoint syncpV; mfxFrameSurface1 *pmfxOutSurface = (mfxFrameSurface1 *)0; int nIndex2 = 0; bitStreamChunk.bytesAvailable = 0; //////// mfxStatus sts = mfxStatus.MFX_ERR_NONE; // // Stage 3: Retrieve buffered frames from VPP // if (mfxStatus.MFX_ERR_NONE <= sts || mfxStatus.MFX_ERR_MORE_DATA == sts || mfxStatus.MFX_ERR_MORE_SURFACE == sts) { int nTaskIdx = GetFreeTaskIndex(pTasks, taskPoolSize); // Find free task if ((int)mfxStatus.MFX_ERR_NOT_FOUND == nTaskIdx) { // No more free tasks, need to sync sts = UnsafeNativeMethods.MFXVideoCORE_SyncOperation(session, pTasks[nFirstSyncTask].syncp, 60000); QuickSyncStatic.ThrowOnBadStatus(sts, "syncOper"); if (bitStreamChunk.bitstream == null || bitStreamChunk.bitstream.Length < pTasks[nFirstSyncTask].mfxBS.DataLength) { bitStreamChunk.bitstream = new byte[pTasks[nFirstSyncTask].mfxBS.DataLength]; } Trace.Assert(pTasks[nFirstSyncTask].mfxBS.DataOffset == 0); Marshal.Copy(pTasks[nFirstSyncTask].mfxBS.Data, bitStreamChunk.bitstream, 0, (int)pTasks[nFirstSyncTask].mfxBS.DataLength); bitStreamChunk.bytesAvailable = (int)pTasks[nFirstSyncTask].mfxBS.DataLength; // WriteBitStreamFrame(pTasks[nFirstSyncTask].mfxBS, outbs); //MSDK_BREAK_ON_ERROR(sts); pTasks[nFirstSyncTask].syncp.sync = IntPtr.Zero; pTasks[nFirstSyncTask].mfxBS.DataLength = 0; pTasks[nFirstSyncTask].mfxBS.DataOffset = 0; nFirstSyncTask = (nFirstSyncTask + 1) % taskPoolSize; return(true); } else { int compositeFrameIndex = 0; //morevpp: nIndex2 = GetFreeSurfaceIndex(pSurfaces2, nSurfNumVPPEnc); // Find free frame surface Trace.Assert(nIndex2 != (int)mfxStatus.MFX_ERR_NOT_FOUND); for (;;) { var z = pmfxOutSurface; z = null; // if (compositeFrameIndex == 1) // z = overlay; Trace.Assert(compositeFrameIndex <= 1); // Process a frame asychronously (returns immediately) sts = UnsafeNativeMethods.MFXVideoVPP_RunFrameVPPAsync(session, z, &pSurfaces2[nIndex2], (mfxExtVppAuxData *)0, &syncpV); // COMPOSITING if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync == IntPtr.Zero) { // repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // wait if device is busy } } else if (mfxStatus.MFX_ERR_NONE < sts && syncpV.sync != IntPtr.Zero) { sts = mfxStatus.MFX_ERR_NONE; // ignore warnings if output is available break; } else { if (sts != mfxStatus.MFX_ERR_MORE_DATA && sts != mfxStatus.MFX_ERR_MORE_SURFACE) { QuickSyncStatic.ThrowOnBadStatus(sts, "vppAsync"); } break; // not a warning } } //VPP needs more data, let decoder decode another frame as input if (mfxStatus.MFX_ERR_MORE_DATA == sts) { return(false); // compositeFrameIndex++; //goto morevpp; } else if (mfxStatus.MFX_ERR_MORE_SURFACE == sts) { // Not relevant for the illustrated workload! Therefore not handled. // Relevant for cases when VPP produces more frames at output than consumes at input. E.g. framerate conversion 30 fps -> 60 fps QuickSyncStatic.ThrowOnBadStatus(sts, "vpp");; } else if (mfxStatus.MFX_ERR_NONE != sts) { QuickSyncStatic.ThrowOnBadStatus(sts, "vpp"); } ; for (;;) { // Encode a frame asychronously (returns immediately) //sts = mfxENC.EncodeFrameAsync(NULL, pSurfaces2[nIndex2], &pTasks[nTaskIdx].mfxBS, &pTasks[nTaskIdx].syncp); sts = UnsafeNativeMethods.MFXVideoENCODE_EncodeFrameAsync(session, (mfxEncodeCtrl *)0, &pSurfaces2[nIndex2], &pTasks[nTaskIdx].mfxBS, &pTasks[nTaskIdx].syncp); if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync == IntPtr.Zero) { // repeat the call if warning and no output if (mfxStatus.MFX_WRN_DEVICE_BUSY == sts) { Thread.Sleep(1); // wait if device is busy } } else if (mfxStatus.MFX_ERR_NONE < sts && pTasks[nTaskIdx].syncp.sync != IntPtr.Zero) { sts = mfxStatus.MFX_ERR_NONE; // ignore warnings if output is available break; } else if (mfxStatus.MFX_ERR_NOT_ENOUGH_BUFFER == sts) { // Allocate more bitstream buffer memory here if needed... break; } else { if (sts != mfxStatus.MFX_ERR_MORE_DATA && sts != mfxStatus.MFX_ERR_MORE_SURFACE) { QuickSyncStatic.ThrowOnBadStatus(sts, "encodeAsync"); } break; } } } } // MFX_ERR_MORE_DATA means that file has ended, need to go to buffering loop, exit in case of other errors //MSDK_IGNORE_MFX_STS(sts, MFX_ERR_MORE_DATA); if (sts == mfxStatus.MFX_ERR_MORE_DATA) { return(false); } QuickSyncStatic.ThrowOnBadStatus(sts, "dec or enc or vpp"); return(true); }