public H264(VideoDevice device) { this.device = device; // Pi4 only supports two: // "H.264", H264 // "Motion-JPEG", MJPEG sImageFormatDescription formatDesc = device.enumerateFormats(inputBufferType) .first(i => i.pixelFormat == inputPixelFormat, "h.264 decoder requires a hardware capable of decoding h.264. The provided video device can’t do that."); // Logger.logVerbose( "Compressed format: {0}", formatDesc ); inputFormat = new sImageFormat(ref formatDesc); // We gonna be using NV12, but generally speaking Pi4 supports following: // "Planar YUV 4:2:0", YUV420 // "Planar YVU 4:2:0", YVU420 // "Y/CbCr 4:2:0", NV12 // "Y/CrCb 4:2:0", NV21 // "16-bit RGB 5-6-5", RGB565 formatDesc = device.enumerateFormats(outputBufferType) .first(i => i.pixelFormat == outputPixelFormat, "h.264 decoder requires a hardware capable of decoding h.264 into NV12. The provided video device can’t do that."); outputFormat = new sImageFormat(ref formatDesc); inputSize = SizeSupported.query(device, inputPixelFormat); outputSize = SizeSupported.query(device, outputPixelFormat); }
internal sImageFormat(ref sImageFormatDescription src) { bufferType = src.type; index = src.index; description = src.description; pixelFormat = src.pixelFormat; flags = src.flags; }
// Called on decoder thread void iDecoderEvents.onDynamicResolutionChange() { sSelection selection = default; selection.type = eBufferType.VideoCaptureMPlane; selection.target = eSelectionTarget.Compose; device.file.call(eControlCode.G_SELECTION, ref selection); // Logger.logInfo( "selection: {0}", selection ); // Appears to be correct, i.e. matches what's in the PPS of the video if (selection.rect != decodedSize.cropRect) { throw new ApplicationException($"Linux failed to decode SPS from the video; SPS says the crop rectangle is { decodedSize.cropRect }, Linux decoded as { selection.rect }"); } sStreamDataFormat sdf = new sStreamDataFormat { bufferType = eBufferType.VideoCaptureMPlane }; device.file.call(eControlCode.G_FMT, ref sdf); Logger.logVerbose("Automatically selected format: {0}", sdf); decodedPixelFormat = sdf.pix_mp; colorFormat = sdf.pix_mp.colorFormat(); Logger.logInfo("Dynamic resolution change: {0}", colorFormat); return; // The following code causes endless loop of resolution changes, despite nothing being changed, really // state = eDecoderState.DrainRezChange; // The setup workflow in that V4L spec is BS, unfortunately :-( device.stopStreaming(eBufferType.VideoCaptureMPlane); int decodedBuffersCount = decoded.buffersCount; decoded.Dispose(); decoded = null; sImageFormatDescription format = device.findOutputFormat(); Logger.logVerbose("Picked the format \"{0}\", {1}, flags {2}", format.description, format.pixelFormat, format.flags); // Destroy the old decoded buffers sRequestBuffers rbDecoded = new sRequestBuffers() { type = eBufferType.VideoCaptureMPlane, memory = eMemory.MemoryMap }; device.file.call(eControlCode.REQBUFS, ref rbDecoded); // Fix a few things there sdf.pix_mp.pixelFormat = ePixelFormat.NV12; // This one is actually pre-selected, prolly because of the initial one we set in the captureSetup method sdf.pix_mp.quantization = eQuantization.FullRange; // Linux defaults to limited range, not what we want. device.file.call(eControlCode.S_FMT, ref sdf); // Logger.logVerbose( "Set format: {0}", sdf ); // Create new set of decoded buffers, same count as before decoded = new DecodedQueue(device, decodedBuffersCount); // Finally, resume the video device.startStreaming(eBufferType.VideoCaptureMPlane); decoded.enqueueAll(); }