Esempio n. 1
0
        public static int ProcessVideoFrame(Decoder decoder, MediaFrame mFrame, AVFrame *frame)
        {
            int ret = 0;

            try
            {
                // Hardware Frame (NV12|P010)   | CopySubresourceRegion FFmpeg Texture Array -> Device Texture[1] (NV12|P010) / SRV (RX_RXGX) -> PixelShader (Y_UV)
                if (decoder.hwAccelSuccess)
                {
                    decoder.textureFFmpeg   = new Texture2D((IntPtr)frame->data.ToArray()[0]);
                    decoder.textDesc.Format = decoder.textureFFmpeg.Description.Format;
                    mFrame.textures         = new Texture2D[1];
                    mFrame.textures[0]      = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc);
                    decoder.decCtx.renderer.device.ImmediateContext.CopySubresourceRegion(decoder.textureFFmpeg, (int)frame->data.ToArray()[1], new ResourceRegion(0, 0, 0, mFrame.textures[0].Description.Width, mFrame.textures[0].Description.Height, 1), mFrame.textures[0], 0);

                    return(ret);
                }

                // Software Frame (8-bit YUV)   | YUV byte* -> Device Texture[3] (RX) / SRV (RX_RX_RX) -> PixelShader (Y_U_V)
                else if (decoder.info.PixelFormatType == PixelFormatType.Software_Handled)
                {
                    mFrame.textures = new Texture2D[3];

                    // YUV Planar [Y0 ...] [U0 ...] [V0 ....]
                    if (decoder.info.IsPlanar)
                    {
                        DataBox db = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[0];
                        db.RowPitch        = frame->linesize.ToArray()[0];
                        mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { db });

                        db                 = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[1];
                        db.RowPitch        = frame->linesize.ToArray()[1];
                        mFrame.textures[1] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { db });

                        db                 = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[2];
                        db.RowPitch        = frame->linesize.ToArray()[2];
                        mFrame.textures[2] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { db });
                    }

                    // YUV Packed ([Y0U0Y1V0] ....)
                    else
                    {
                        DataStream dsY = new DataStream(decoder.textDesc.Width * decoder.textDesc.Height, true, true);
                        DataStream dsU = new DataStream(decoder.textDescUV.Width * decoder.textDescUV.Height, true, true);
                        DataStream dsV = new DataStream(decoder.textDescUV.Width * decoder.textDescUV.Height, true, true);
                        DataBox    dbY = new DataBox();
                        DataBox    dbU = new DataBox();
                        DataBox    dbV = new DataBox();

                        dbY.DataPointer = dsY.DataPointer;
                        dbU.DataPointer = dsU.DataPointer;
                        dbV.DataPointer = dsV.DataPointer;

                        dbY.RowPitch = decoder.textDesc.Width;
                        dbU.RowPitch = decoder.textDescUV.Width;
                        dbV.RowPitch = decoder.textDescUV.Width;

                        long totalSize = frame->linesize.ToArray()[0] * decoder.textDesc.Height;

                        byte *dataPtr = frame->data.ToArray()[0];
                        AVComponentDescriptor[] comps = decoder.info.PixelFormatDesc->comp.ToArray();

                        for (int i = 0; i < totalSize; i += decoder.info.Comp0Step)
                        {
                            dsY.WriteByte(*(dataPtr + i));
                        }

                        for (int i = 1; i < totalSize; i += decoder.info.Comp1Step)
                        {
                            dsU.WriteByte(*(dataPtr + i));
                        }

                        for (int i = 3; i < totalSize; i += decoder.info.Comp2Step)
                        {
                            dsV.WriteByte(*(dataPtr + i));
                        }

                        mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { dbY });
                        mFrame.textures[1] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { dbU });
                        mFrame.textures[2] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { dbV });

                        Utilities.Dispose(ref dsY); Utilities.Dispose(ref dsU); Utilities.Dispose(ref dsV);
                    }
                }

                // Software Frame (OTHER/sws_scale) | X byte* -> Sws_Scale RGBA -> Device Texture[1] (RGBA) / SRV (RGBA) -> PixelShader (RGBA)
                else
                {
                    if (decoder.swsCtx == null)
                    {
                        decoder.textDesc.Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm;
                        decoder.outData         = new byte_ptrArray4();
                        decoder.outLineSize     = new int_array4();
                        decoder.outBufferSize   = av_image_get_buffer_size(Decoder.VOutPixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);
                        Marshal.FreeHGlobal(decoder.outBufferPtr);
                        decoder.outBufferPtr = Marshal.AllocHGlobal(decoder.outBufferSize);
                        av_image_fill_arrays(ref decoder.outData, ref decoder.outLineSize, (byte *)decoder.outBufferPtr, Decoder.VOutPixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);

                        int vSwsOptFlags = decoder.decCtx.cfg.video.SwsHighQuality ? DecoderContext.SCALING_HQ : DecoderContext.SCALING_LQ;
                        decoder.swsCtx = sws_getContext(decoder.codecCtx->coded_width, decoder.codecCtx->coded_height, decoder.codecCtx->pix_fmt, decoder.codecCtx->width, decoder.codecCtx->height, Decoder.VOutPixelFormat, vSwsOptFlags, null, null, null);
                        if (decoder.swsCtx == null)
                        {
                            Log($"[ProcessVideoFrame|RGB] [ERROR-1] Failed to allocate SwsContext"); return(ret);
                        }
                    }

                    sws_scale(decoder.swsCtx, frame->data, frame->linesize, 0, frame->height, decoder.outData, decoder.outLineSize);

                    DataBox db = new DataBox();
                    db.DataPointer     = (IntPtr)decoder.outData.ToArray()[0];
                    db.RowPitch        = decoder.outLineSize[0];
                    mFrame.textures    = new Texture2D[1];
                    mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { db });
                }

                return(ret);
            } catch (Exception e) { ret = -1;  Log("Error[" + (ret).ToString("D4") + "], Func: ProcessVideoFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }