static void readSyncExtension(BitStream inStream, DecoderConfig config)
        {
            int type = inStream.readBits(11);

            switch (type)
            {
            case 0x2B7:
            {
                var profile = new Profile(inStream.readBits(5));

                if (profile.type == Profile.ProfileType.AAC_SBR)
                {
                    config.sbrPresent = inStream.readBool();
                    if (config.sbrPresent)
                    {
                        config.profile = profile;

                        int tmp = inStream.readBits(4);

                        if (tmp == config.sampleFrequency.getIndex())
                        {
                            config.downSampledSBR = true;
                        }
                        if (tmp == 15)
                        {
                            throw new AACException("sample rate specified explicitly, not supported yet!");
                            // tmp = inStream.readBits(24);
                        }
                    }
                }
            } break;
            }
        }
        public SyntacticElements(DecoderConfig config)
        {
            this.config = config;

            pce      = new PCE();
            elements = new Element[4 * MAX_ELEMENTS];
            cces     = new CCE[MAX_ELEMENTS];
            dses     = new DSE[MAX_ELEMENTS];
            fils     = new FIL[MAX_ELEMENTS];

            startNewFrame();
        }
        // --- ========== decoding ========== ---
        public void decode(BitStream inStream, DecoderConfig conf, bool commonWindow)
        {
            var sf = conf.getSampleFrequency();

            if (sf.getIndex() == -1)
            {
                throw new AACException("invalid sample frequency");
            }

            inStream.skipBit(); //reserved
            windowSequence        = (WindowSequence)inStream.readBits(2);
            windowShape[PREVIOUS] = windowShape[CURRENT];
            windowShape[CURRENT]  = inStream.readBit();

            windowGroupCount     = 1;
            windowGroupLength[0] = 1;
            if (windowSequence == WindowSequence.EIGHT_SHORT_SEQUENCE)
            {
                maxSFB = inStream.readBits(4);
                int i;
                for (i = 0; i < 7; i++)
                {
                    if (inStream.readBool())
                    {
                        windowGroupLength[windowGroupCount - 1]++;
                    }
                    else
                    {
                        windowGroupCount++;
                        windowGroupLength[windowGroupCount - 1] = 1;
                    }
                }
                windowCount           = 8;
                swbOffsets            = SWB_OFFSET_SHORT_WINDOW[sf.getIndex()];
                swbCount              = SWB_SHORT_WINDOW_COUNT[sf.getIndex()];
                predictionDataPresent = false;
            }
            else
            {
                maxSFB                = inStream.readBits(6);
                windowCount           = 1;
                swbOffsets            = SWB_OFFSET_LONG_WINDOW[sf.getIndex()];
                swbCount              = SWB_LONG_WINDOW_COUNT[sf.getIndex()];
                predictionDataPresent = inStream.readBool();
                if (predictionDataPresent)
                {
                    readPredictionData(inStream, conf.getProfile(), sf, commonWindow);
                }
            }
        }
        /// <summary>
        /// Initializes the decoder with a MP4 decoder specific info.
        ///
        /// After this the MP4 frames can be passed to the
        /// <code>decodeFrame(byte[], SampleBuffer)</code> method to decode them.
        /// </summary>
        /// <param name="decoderSpecificInfo">decoderSpecificInfo a byte array containing the decoder specific info from an MP4 container</param>
        public Decoder(byte[] decoderSpecificInfo)
        {
            config = DecoderConfig.parseMP4DecoderSpecificInfo(decoderSpecificInfo);
            if (config == null)
            {
                throw new ArgumentException("illegal MP4 decoder specific info");
            }

            if (!canDecode(config.getProfile()))
            {
                throw new AACException("unsupported profile: " + config.getProfile());
            }

            syntacticElements = new SyntacticElements(config);
            filterBank        = new FilterBank(config.isSmallFrameUsed(), (int)config.getChannelConfiguration());

            Logger.LogInfo(string.Format("profile: {0}", config.getProfile()));
            Logger.LogInfo(string.Format("sf: {0}", config.getSampleFrequency().getFrequency()));
            Logger.LogInfo(string.Format("channels: {0}", config.getChannelConfiguration()));
        }
예제 #5
0
        void decode(BitStream inStream, DecoderConfig conf)
        {
            couplingPoint = 2 * inStream.readBit();
            coupledCount  = inStream.readBits(3);
            int gainCount = 0;

            for (int i = 0; i <= coupledCount; i++)
            {
                gainCount++;
                channelPair[i] = inStream.readBool();
                idSelect[i]    = inStream.readBits(4);
                if (channelPair[i])
                {
                    chSelect[i] = inStream.readBits(2);
                    if (chSelect[i] == 3)
                    {
                        gainCount++;
                    }
                }
                else
                {
                    chSelect[i] = 2;
                }
            }
            couplingPoint += inStream.readBit();
            couplingPoint |= (couplingPoint >> 1);

            bool   sign  = inStream.readBool();
            double scale = CCE_SCALE[inStream.readBits(2)];

            ics.decode(inStream, false, conf);
            ICSInfo info             = ics.getInfo();
            int     windowGroupCount = info.getWindowGroupCount();
            int     maxSFB           = info.getMaxSFB();

            //TODO:
            int[,] sfbCB = null;//ics.getSectionData().getSfbCB();

            for (int i = 0; i < gainCount; i++)
            {
                int   idx       = 0;
                int   cge       = 1;
                int   xg        = 0;
                float gainCache = 1.0f;
                if (i > 0)
                {
                    cge       = couplingPoint == 2 ? 1 : inStream.readBit();
                    xg        = cge == 0 ? 0 : Huffman.decodeScaleFactor(inStream) - 60;
                    gainCache = (float)Math.Pow(scale, -xg);
                }
                if (couplingPoint == 2)
                {
                    gain[i, 0] = gainCache;
                }
                else
                {
                    for (int g = 0; g < windowGroupCount; g++)
                    {
                        for (int sfb = 0; sfb < maxSFB; sfb++, idx++)
                        {
                            if (sfbCB[g, sfb] != ZERO_HCB)
                            {
                                if (cge == 0)
                                {
                                    int t = Huffman.decodeScaleFactor(inStream) - 60;
                                    if (t != 0)
                                    {
                                        int s = 1;
                                        t = xg += t;
                                        if (!sign)
                                        {
                                            s  -= 2 * (t & 0x1);
                                            t >>= 1;
                                        }
                                        gainCache = (float)(Math.Pow(scale, -t) * s);
                                    }
                                }
                                gain[i, idx] = gainCache;
                            }
                        }
                    }
                }
            }
        }
        // --- ======== static builder ========= ---

        /// <summary>
        /// Parses the input arrays as a DecoderSpecificInfo, as used in MP4 containers.
        /// </summary>
        /// <param name="data"></param>
        /// <returns>a DecoderConfig</returns>
        public static DecoderConfig parseMP4DecoderSpecificInfo(byte[] data)
        {
            var inStream = new BitStream(data);

            var config = new DecoderConfig();

            try
            {
                config.profile = readProfile(inStream);

                int sf = inStream.readBits(4);
                config.sampleFrequency      = sf == 0xf ? SampleFrequency.forFrequency(inStream.readBits(24)) : SampleFrequency.forInt(sf);
                config.channelConfiguration = (ChannelConfiguration)inStream.readBits(4);

                switch (config.profile.type)
                {
                case Profile.ProfileType.AAC_SBR:
                {
                    config.extProfile = config.profile;
                    config.sbrPresent = true;
                    sf = inStream.readBits(4);
                    // TODO: 24 bits already read; read again?
                    //if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(inStream.readBits(24));
                    // if sample frequencies are the same: downsample SBR
                    config.downSampledSBR  = config.sampleFrequency.getIndex() == sf;
                    config.sampleFrequency = SampleFrequency.forInt(sf);
                    config.profile         = readProfile(inStream);
                } break;

                case Profile.ProfileType.AAC_MAIN:
                case Profile.ProfileType.AAC_LC:
                case Profile.ProfileType.AAC_SSR:
                case Profile.ProfileType.AAC_LTP:
                case Profile.ProfileType.ER_AAC_LC:
                case Profile.ProfileType.ER_AAC_LTP:
                case Profile.ProfileType.ER_AAC_LD:
                {
                    // ga-specific info:
                    config.frameLengthFlag = inStream.readBool();
                    if (config.frameLengthFlag)
                    {
                        throw new AACException("config uses 960-sample frames, not yet supported");                 //TODO: are 960-frames working yet?
                    }
                    config.dependsOnCoreCoder = inStream.readBool();
                    config.coreCoderDelay     = config.dependsOnCoreCoder ? inStream.readBits(14) : 0;
                    config.extensionFlag      = inStream.readBool();

                    if (config.extensionFlag)
                    {
                        if (config.profile.isErrorResilientProfile())
                        {
                            config.sectionDataResilience  = inStream.readBool();
                            config.scalefactorResilience  = inStream.readBool();
                            config.spectralDataResilience = inStream.readBool();
                        }
                        // extensionFlag3
                        inStream.skipBit();
                    }

                    if (config.channelConfiguration == ChannelConfiguration.CHANNEL_CONFIG_NONE || !config.channelConfiguration.ToString().StartsWith("CHANNEL_", StringComparison.Ordinal))
                    {
                        // TODO: is this working correct? -> ISO 14496-3 part 1: 1.A.4.3
                        inStream.skipBits(3); //PCE

                        throw new NotImplementedException();
                        //PCE pce = new PCE();
                        //pce.decode(inStream);
                        //config.profile = pce.getProfile();
                        //config.sampleFrequency = pce.getSampleFrequency();
                        //config.channelConfiguration = ChannelConfiguration.forInt(pce.getChannelCount());
                    }

                    if (inStream.getBitsLeft() > 10)
                    {
                        readSyncExtension(inStream, config);
                    }
                } break;

                default: throw new AACException("profile not supported: " + config.profile.type);
                }
                return(config);
            }
            finally
            {
                inStream.destroy();
            }
        }
예제 #7
0
        public void decode(BitStream inStream, DecoderConfig conf)
        {
            var profile = conf.getProfile();
            var sf      = conf.getSampleFrequency();

            if (sf.getIndex() == -1)
            {
                throw new AACException("invalid sample frequency");
            }

            readElementInstanceTag(inStream);

            commonWindow = inStream.readBool();

            var info = icsL.getInfo();

            if (commonWindow)
            {
                info.decode(inStream, conf, commonWindow);
                icsR.getInfo().setData(info);

                msMask = (MSMask)inStream.readBits(2);
                if (msMask == MSMask.TYPE_USED)
                {
                    int maxSFB           = info.getMaxSFB();
                    int windowGroupCount = info.getWindowGroupCount();

                    for (int idx = 0; idx < windowGroupCount * maxSFB; idx++)
                    {
                        msUsed[idx] = inStream.readBool();
                    }
                }
                else if (msMask == MSMask.TYPE_ALL_1)
                {
                    for (int i = 0; i < msUsed.Length; i++)
                    {
                        msUsed[i] = true;
                    }
                }
                else if (msMask == MSMask.TYPE_ALL_0)
                {
                    for (int i = 0; i < msUsed.Length; i++)
                    {
                        msUsed[i] = false;
                    }
                }
                else
                {
                    throw new AACException("reserved MS mask type used");
                }
            }
            else
            {
                msMask = MSMask.TYPE_ALL_0;
                for (int i = 0; i < msUsed.Length; i++)
                {
                    msUsed[i] = false;
                }
            }

            if (profile.isErrorResilientProfile() && (info.isLTPrediction1Present()))
            {
                if (info.ltpData2Present = inStream.readBool())
                {
                    info.getLTPrediction2().decode(inStream, info, profile);
                }
            }

            icsL.decode(inStream, commonWindow, conf);
            icsR.decode(inStream, commonWindow, conf);
        }
예제 #8
0
        //  /* ========= decoding ========== */
        public void decode(BitStream inStream, bool commonWindow, DecoderConfig conf)
        {
            if (conf.isScalefactorResilienceUsed() && rvlc == null)
            {
                rvlc = new RVLC();
            }
            bool er = conf.getProfile().isErrorResilientProfile();

            globalGain = inStream.readBits(8);

            if (!commonWindow)
            {
                info.decode(inStream, conf, commonWindow);
            }

            decodeSectionData(inStream, conf.isSectionDataResilienceUsed());

            //if(conf.isScalefactorResilienceUsed()) rvlc.decode(in, this, scaleFactors);
            /*else*/
            decodeScaleFactors(inStream);

            pulseDataPresent = inStream.readBool();
            if (pulseDataPresent)
            {
                if (info.isEightShortFrame())
                {
                    throw new AACException("pulse data not allowed for short frames");
                }
                Logger.LogInfo("PULSE");
                throw new NotImplementedException();
                //decodePulseData(inStream);
            }

            tnsDataPresent = inStream.readBool();
            if (tnsDataPresent && !er)
            {
                if (tns == null)
                {
                    tns = new TNS();
                }
                tns.decode(inStream, info);
            }

            gainControlPresent = inStream.readBool();
            if (gainControlPresent)
            {
                if (gainControl == null)
                {
                    gainControl = new GainControl(frameLength);
                }
                Logger.LogInfo("GAIN");
                throw new NotImplementedException();
                // gainControl.decode(inStream, info.getWindowSequence());
            }

            //RVLC spectral data
            //if(conf.isScalefactorResilienceUsed()) rvlc.decodeScalefactors(this, in, scaleFactors);

            if (conf.isSpectralDataResilienceUsed())
            {
                int max = (conf.getChannelConfiguration() == ChannelConfiguration.CHANNEL_CONFIG_STEREO) ? 6144 : 12288;
                reorderedSpectralDataLen = Math.Max(inStream.readBits(14), max);
                longestCodewordLen       = Math.Max(inStream.readBits(6), 49);
                //HCR.decodeReorderedSpectralData(this, in, data, conf.isSectionDataResilienceUsed());
            }
            else
            {
                decodeSpectralData(inStream);
            }
        }