Beispiel #1
0
        public void captureSetup(iVideoTrack videoTrack, int decodedBuffersCount, sDecodedVideoSize decodedSize)
        {
            this.decodedSize = decodedSize;
            if (pendingFrames.capacity != decodedBuffersCount)
            {
                pendingFrames = new PendingFrames(decodedBuffersCount);
            }

            // Set decoded format. Pi4 Linux failed to implement V4L2 stateful decoder setup workflow, instead computing everything manually, from parsed SPS.
            sPixelFormatMP sdf = computeDecodedFormat(ref decodedSize);

            device.setDataFormat(eBufferType.VideoCaptureMPlane, ref sdf);

            // Apparently, Pi4 hardware or drivers is unable to process S_SELECTION request and crop the video. Cropping it later while rendering NV12 into RGB.

            /* sSelection selection = default;
             * selection.type = eBufferType.VideoCaptureMPlane;
             * selection.target = eSelectionTarget.Compose;
             * selection.flags = eSelectionFlags.LesserOrEqual;
             * selection.rect = decodedSize.cropRect;
             * device.file.call( eControlCode.S_SELECTION, ref selection );
             * device.file.call( eControlCode.G_SELECTION, ref selection );
             * CRect selectedRect = selection.rect;
             * if( selectedRect == decodedSize.cropRect )
             *      Logger.logVerbose( "Video cropping: decoded size {0}, cropped to {1}", decodedSize.size, selectedRect );
             * else
             *      Logger.logInfo( "Video cropping: decoded size {0}, asked to crop to {1}, GPU driver replaced with {2}", decodedSize.size, decodedSize.cropRect, selectedRect ); */

            sdf     = device.getDataFormat(eBufferType.VideoCaptureMPlane);
            decoded = new DecodedQueue(device, decodedBuffersCount);
            // decoded.exportTextures( renderDev, device, ref sdf );

            // Start streaming of the output queue
            device.startStreaming(eBufferType.VideoCaptureMPlane);
        }
Beispiel #2
0
        sPixelFormatMP iVideoTrack.getDecodedFormat()
        {
            sPixelFormatMP res = new sPixelFormatMP();

            res.size        = videoParams.decodedSize.size;
            res.pixelFormat = ePixelFormat.NV12;
            res.field       = eField.Progressive;
            videoParams.setColorAttributes(ref res);
            return(res);
        }
Beispiel #3
0
        public Nv12Texture exportNv12(iGlesRenderDevice gles, VideoDevice device, ref sPixelFormatMP pixelFormat, ref ColorFormat colorFormat)
        {
            device.file.call(eControlCode.QUERYBUF, ref buffer);

            sDmaBuffer dma     = exportOutputBuffer(device, ref pixelFormat);
            ITexture   texture = gles.importNv12Texture(ref dma, ref colorFormat);

            // Logger.logVerbose( "Exported NV12 texture: {0}", dma );
            return(new Nv12Texture(texture));
        }
Beispiel #4
0
        /// <summary>Export all buffers from V4L2, import them into GLES in Diligent Engine</summary>
        public void exportTextures(IRenderDevice renderDevice, VideoDevice device, ref sPixelFormatMP pixelFormat, ref ColorFormat color)
        {
            iGlesRenderDevice gles = ComLightCast.cast <iGlesRenderDevice>(renderDevice);

            textures = new Nv12Texture[buffers.Length];
            for (int i = 0; i < buffers.Length; i++)
            {
                textures[i] = buffers[i].exportNv12(gles, device, ref pixelFormat, ref color);
            }
        }
Beispiel #5
0
        public EncodedQueue(VideoDevice device, int encodedBuffersCount, ref sPixelFormatMP encodedFormat) :
            base(allocateBuffers(device, encodedBuffersCount), device.file)
        {
            bufferCapacity = encodedFormat.getPlaneFormat(0).sizeImage;

            // Create encoded buffers, this does the memory mapping
            for (int i = 0; i < buffersCount; i++)
            {
                var eb = new EncodedBuffer(device.file, i);
                buffers[i] = eb;
            }
        }
Beispiel #6
0
        public override sPixelFormatMP getDecodedFormat()
        {
            sPixelFormatMP res = new sPixelFormatMP();

            res.size             = sizePixels;
            res.pixelFormat      = ePixelFormat.NV12;
            res.field            = eField.Progressive;
            res.colorSpace       = eColorSpace.BT709;
            res.encoding         = (byte)eYCbCrEncoding.BT709;
            res.quantization     = eQuantization.FullRange;
            res.transferFunction = eTransferFunction.BT_709;
            return(res);
        }
Beispiel #7
0
        public sDmaBuffer exportOutputBuffer(VideoDevice device, ref sPixelFormatMP pixelFormat)
        {
            sExportBuffer     eb          = device.exportOutputBuffer(bufferIndex);
            sPlanePixelFormat planeFormat = pixelFormat.getPlaneFormat(0);

            sDmaBuffer dma = new sDmaBuffer()
            {
                fd          = eb.fd,
                offset      = 0,
                stride      = planeFormat.bytesPerLine,
                imageSize   = planeFormat.bytesPerLine * pixelFormat.size.cy,
                sizePixels  = pixelFormat.size,
                bufferIndex = bufferIndex
            };

            return(dma);
        }
Beispiel #8
0
        public override sPixelFormatMP getEncodedFormat()
        {
            sPixelFormatMP res = new sPixelFormatMP();

            res.size        = sizePixels;
            res.pixelFormat = ePixelFormat.H264;
            res.field       = eField.Progressive;
            // res.sizeImage = bitRate.decodingBufferSize;
            res.colorSpace = eColorSpace.BT709;
            //if( !extended )
            //  return res;

            res.encoding         = (byte)eYCbCrEncoding.BT709;
            res.quantization     = eQuantization.LimitedRange;
            res.transferFunction = eTransferFunction.BT_709;
            return(res);
        }
Beispiel #9
0
        public void initialize(iVideoTrack videoTrack, int encodedBuffersCount)
        {
            // dbgPrintEncodedFormats();

            // Determine format of the encoded video
            sPixelFormatMP encodedFormat = videoTrack.getEncodedFormat();

            // Set format of the first and the only plane of the compressed video.
            encodedFormat.numPlanes = 1;
            encodedFormat.setPlaneFormat(0, new sPlanePixelFormat()
            {
                sizeImage = EncodedQueue.encodedVideoBufferSize(videoTrack)
            });

            // 4.5.1.5. Initialization

            // 1. Set the coded format on OUTPUT via VIDIOC_S_FMT()
            sStreamDataFormat sdf = new sStreamDataFormat()
            {
                bufferType = eBufferType.VideoOutputMPlane,
                pix_mp     = encodedFormat
            };

            device.file.call(eControlCode.S_FMT, ref sdf);

            // Logger.logVerbose( "eControlCode.S_FMT completed OK for encoded format: {0}", sdf.pix_mp );

            // 2 Allocate source (bytestream) buffers via VIDIOC_REQBUFS() on OUTPUT
            encoded = new EncodedQueue(device, encodedBuffersCount, ref encodedFormat);

            // 3 Start streaming on the OUTPUT queue via VIDIOC_STREAMON()
            device.startStreaming(eBufferType.VideoOutputMPlane);

            // Continue queuing/dequeuing bytestream buffers to/from the OUTPUT queue via VIDIOC_QBUF() and VIDIOC_DQBUF().
            // The buffers will be processed and returned to the client in order, until required metadata to configure the CAPTURE queue are found.
            // This is indicated by the decoder sending a V4L2_EVENT_SOURCE_CHANGE event with changes set to V4L2_EVENT_SRC_CH_RESOLUTION.
            eventSubscription = new EventSubscription(device);

            // Linux kernel on Pi4 appears to be too old and does not implement that spec. The event never arrives, while the encoded buffers are stuck in the Queued state.
            // For this reason, we have to deal with dynamic resolution changes instead :-(
            // WaitForResolution.wait( device, encoded, reader, waitForResolutionTimeout );
        }
Beispiel #10
0
        static sPixelFormatMP computeDecodedFormat(ref sDecodedVideoSize decodedSize)
        {
            if (decodedSize.chromaFormat != eChromaFormat.c420)
            {
                throw new NotImplementedException("So far, the library only supports 4:2:0 chroma sampling");
            }

            // Apparently, the hardware decoder of the Pi4 can't crop video. Not a huge deal, will crop while rendering NV12 into RGB.
            // You would expect you need to pass decodedSize.size here, but no, Linux only plays the video when cropped size is passed there.
            // The size of the output buffers actually created by that Linux ain't cropped. Crazy stuff.
            CSize px = decodedSize.cropRect.size;

            // Set stride to be a multiple of 4 bytes, GLES requirement on Pi4
            int stride = (px.cx + 3) & (~3);

            sPixelFormatMP pmp = new sPixelFormatMP()
            {
                size             = px,
                pixelFormat      = ePixelFormat.NV12,
                field            = eField.Progressive,
                colorSpace       = eColorSpace.BT709,
                numPlanes        = 2,
                encoding         = (byte)eYCbCrEncoding.BT709,
                quantization     = eQuantization.FullRange,
                transferFunction = eTransferFunction.BT_709,
            };

            pmp.setPlaneFormat(0, new sPlanePixelFormat()
            {
                sizeImage = px.cy * stride, bytesPerLine = stride
            });
            pmp.setPlaneFormat(1, new sPlanePixelFormat()
            {
                sizeImage = px.cy * stride / 2, bytesPerLine = stride
            });
            return(pmp);
        }
Beispiel #11
0
 internal abstract void setColorAttributes(ref sPixelFormatMP pixFormat);
Beispiel #12
0
 internal override void setColorAttributes(ref sPixelFormatMP pixFormat)
 {
     throw new NotImplementedException();
 }
Beispiel #13
0
        public VideoTextures exportTextures(iGlesRenderDevice gles, VideoDevice device, ref sPixelFormatMP pixelFormat)
        {
            sExportBuffer eb = new sExportBuffer()
            {
                type  = eBufferType.VideoCaptureMPlane,
                index = bufferIndex,
                plane = 0,
                flags = eFileFlags.O_RDONLY | eFileFlags.O_CLOEXEC
            };

            device.file.call(eControlCode.EXPBUF, ref eb);

            sPlanePixelFormat planeFormat = pixelFormat.getPlaneFormat(0);

            sDmaBuffer dma = new sDmaBuffer()
            {
                fd          = eb.fd,
                offset      = 0,
                stride      = planeFormat.bytesPerLine,
                imageSize   = planeFormat.bytesPerLine * pixelFormat.size.cy,
                sizePixels  = pixelFormat.size,
                bufferIndex = bufferIndex
            };
            ITexture luma = gles.importLumaTexture(ref dma);

            Logger.logVerbose("Exported luma texture: {0}", dma);

            // I asked V4L2 for 2 planes, however QUERYBUF returned a single plane, with the complete NV12 image in it.
            // No big deal, we have EGL_DMA_BUF_PLANE0_OFFSET_EXT for that; that's where the sDmaBuffer.offset field goes.
            dma.offset     = dma.imageSize;
            dma.sizePixels = chromaSize(pixelFormat.size);
            dma.imageSize  = dma.stride * dma.sizePixels.cy;
            ITexture chroma = gles.importChromaTexture(ref dma);

            Logger.logVerbose("Exported chroma texture: {0}", dma);

            return(new VideoTextures(luma, chroma));
        }