Пример #1
0
        internal VideoParams264(TrackEntry videoTrack)
        {
            // File.WriteAllBytes( @"C:\Temp\2remove\mkv\videoPrivateData.bin", videoTrack.codecPrivate );
            ReadOnlySpan <byte> codecPrivate = videoTrack.codecPrivate.AsSpan();
            int          cbHeader            = Marshal.SizeOf <NativeStruct>();
            NativeStruct ns = codecPrivate.Slice(0, cbHeader).cast <NativeStruct>()[0];

            profile = ns.profileCode;
            profileCompatibility = ns.profileCompatibility;
            levelCode            = ns.levelCode;

            int offset = cbHeader;

            sps = ContainerUtils.copyBlobs(ns.numOfSequenceParameterSets, codecPrivate, ref offset);

            // File.WriteAllBytes( @"C:\Temp\2remove\mkv\sps.bin", sps[ 0 ] );

            int ppsCount = codecPrivate[offset++];

            pps = ContainerUtils.copyBlobs(ppsCount, codecPrivate, ref offset);

            ReadOnlySpan <byte> spsBlob = sps[0].AsSpan();

            if (MiscUtils.getNaluType(spsBlob[0]) != eNaluType.SPS)
            {
                throw new ApplicationException("The SPS is invalid, wrong NALU type");
            }
            spsBlob = spsBlob.Slice(1);

            BitReader spsReader = new BitReader(spsBlob);

            parsedSps = new SequenceParameterSet(ref spsReader);

            chromaFormat   = parsedSps.chromaFormat;
            bitDepthLuma   = parsedSps.bitDepthLuma;
            bitDepthChroma = parsedSps.bitDepthChroma;
            m_decodedSize  = new sDecodedVideoSize(parsedSps.decodedSize, parsedSps.cropRectangle, chromaFormat);
        }
Пример #2
0
        public AVC1SampleEntry(Mp4Reader reader, int bytesLeft) :
            base(reader, ref bytesLeft)
        {
            var avcc = reader.readStructure <Structures.AVCDecoderConfigurationRecord>();

            if (avcc.boxType != eAVC1BoxType.avcC)
            {
                throw new NotImplementedException();
            }
            bytesLeft -= decoderConfigSizeof;

            profile = avcc.profileCode;
            profileCompatibility = avcc.profileCompatibility;
            levelCode            = avcc.levelCode;
            naluLengthSize       = checked ((byte)(avcc.lengthSizeMinusOne + 1));

            Span <byte> remainingStuff = stackalloc byte[bytesLeft];

            reader.read(remainingStuff);

            int readOffset = 0;

            sps = ContainerUtils.copyBlobs(avcc.numOfSequenceParameterSets, remainingStuff, ref readOffset);

            if (null == sps)
            {
                throw new ArgumentException("The file doesn't have an SPS");
            }
            // SpsData spsData = new SpsData( sps[ 0 ] );
            // File.WriteAllBytes( @"C:\Temp\2remove\h264\sps.bin", sps[ 0 ] );

            int ppsCount = remainingStuff[readOffset++];

            pps = ContainerUtils.copyBlobs(ppsCount, remainingStuff, ref readOffset);

            if (null == sps || null == pps)
            {
                throw new NotImplementedException("Vrmac Video only supports mp4 files with out-of-band SPS and PPS blobs, in the `avcC` atom of the file.");
            }
            if (sps.Length > 1 || pps.Length > 1)
            {
                throw new NotImplementedException("Vrmac Video only supports mp4 files with a single out-of-band SPS and PPS for the complete video.");                     // The video payload may include other PPS-es, these are fine.
            }
            if (readOffset >= remainingStuff.Length)
            {
                return;
            }

            remainingStuff = remainingStuff.Slice(readOffset);

            if (readOffset + decoderConfigSizeof < avcc.length)
            {
                // The spec I have says the files with profile IDs 100, 110, 122, 144 have this.
                // The mp4 file I use to test this code has 100, but misses this data.
                chromaFormat   = (eChromaFormat)(remainingStuff[0] & 3);
                bitDepthLuma   = (byte)((remainingStuff[1] & 7) + 8);
                bitDepthChroma = (byte)((remainingStuff[2] & 7) + 8);
                int numPpsEx = remainingStuff[3];
                readOffset = 4;                 // Resetting because sliced the span
                ppsExt     = ContainerUtils.copyBlobs(numPpsEx, remainingStuff, ref readOffset);

                remainingStuff = remainingStuff.Slice(readOffset);
            }
            else
            {
                // https://en.wikipedia.org/wiki/Advanced_Video_Coding#Feature_support_in_particular_profiles
                chromaFormat   = eChromaFormat.c420;
                bitDepthLuma   = 8;
                bitDepthChroma = 8;
            }

            while (!remainingStuff.IsEmpty)
            {
                int          size = BitConverter.ToInt32(remainingStuff).endian();
                eAVC1BoxType code = (eAVC1BoxType)BitConverter.ToUInt32(remainingStuff.Slice(4));
                switch (code)
                {
                case eAVC1BoxType.btrt:
                    bitRate           = new MPEG4BitRateBox(remainingStuff);
                    m_maxBytesInFrame = bitRate.decodingBufferSize;
                    break;
                }
                remainingStuff = remainingStuff.Slice(size);
            }
        }