Ejemplo n.º 1
0
 void decodeSBR(BitStream inStream, SampleFrequency sf, int count, bool stereo, bool crc, bool downSampled, bool smallFrames)
 {
     if (sbr == null)
     {
         sbr = new SBR(smallFrames, elementInstanceTag == ELEMENT_CPE, sf, downSampled);
     }
     throw new NotImplementedException();
     //sbr.decode(inStream, count);
 }
        //private void decodeCCE(BitStream in) throws AACException {
        //if(curCCE==MAX_ELEMENTS) throw new AACException("too much CCE elements");
        //if(cces[curCCE]==null) cces[curCCE] = new CCE(config.getFrameLength());
        //cces[curCCE].decode(in, config);
        //curCCE++;
        //}

        //private void decodeDSE(BitStream in) throws AACException {
        //if(curDSE==MAX_ELEMENTS) throw new AACException("too much CCE elements");
        //if(dses[curDSE]==null) dses[curDSE] = new DSE();
        //dses[curDSE].decode(in);
        //curDSE++;
        //}

        //private void decodePCE(BitStream in) throws AACException {
        //pce.decode(in);
        //config.setProfile(pce.getProfile());
        //config.setSampleFrequency(pce.getSampleFrequency());
        //config.setChannelConfiguration(ChannelConfiguration.forInt(pce.getChannelCount()));
        //}

        //private void decodeFIL(BitStream in, Element prev) throws AACException {
        //if(curFIL==MAX_ELEMENTS) throw new AACException("too much FIL elements");
        //if(fils[curFIL]==null) fils[curFIL] = new FIL(config.isSBRDownSampled());
        //fils[curFIL].decode(in, prev, config.getSampleFrequency(), config.isSBREnabled(), config.isSmallFrameUsed());
        //curFIL++;

        //if(prev!=null&&prev.isSBRPresent()) {
        //sbrPresent = true;
        //if(!psPresent&&prev.getSBR().isPSUsed()) psPresent = true;
        //}
        //}

        public void process(FilterBank filterBank)
        {
            Profile         profile = config.getProfile();
            SampleFrequency sf      = config.getSampleFrequency();
            //ChannelConfiguration channels = config.getChannelConfiguration();

            int chs = (int)config.getChannelConfiguration();

            if (chs == 1 && psPresent)
            {
                chs++;
            }
            int mult = sbrPresent ? 2 : 1;

            //only reallocate if needed
            if (data == null || chs != data.Length || (mult * config.getFrameLength()) != data[0].Length)
            {
                data = Enumerable.Range(0, chs).Select(x => new float[mult * config.getFrameLength()]).ToArray();
            }

            int     channel = 0;
            Element e;
            SCE_LFE scelfe;
            CPE     cpe;

            for (int i = 0; i < elements.Length && channel < chs; i++)
            {
                e = elements[i];
                if (e == null)
                {
                    continue;
                }
                if (e is SCE_LFE)
                {
                    scelfe = (SCE_LFE)e;
                    throw new NotImplementedException();
                    // channel += processSingle(scelfe, filterBank, channel, profile, sf);
                }
                else if (e is CPE)
                {
                    cpe = (CPE)e;
                    processPair(cpe, filterBank, channel, profile, sf);
                    channel += 2;
                }
                else if (e is CCE)
                {
                    //applies invquant and save the result in the CCE
                    throw new NotImplementedException();
                    // ((CCE)e).process();
                    channel++;
                }
            }
        }
        void readPredictionData(BitStream inStream, Profile profile, SampleFrequency sf, bool commonWindow)
        {
            switch (profile.type)
            {
            case Profile.ProfileType.AAC_MAIN:
            {
                throw new NotImplementedException();
                //if (icPredict == null) icPredict = new ICPrediction();
                //icPredict.decode(inStream, maxSFB, sf);
            } break;

            case Profile.ProfileType.AAC_LTP:
            {
                throw new NotImplementedException();
                //if (ltpData1Present = inStream.readBool())
                //{
                //  if (ltPredict1 == null) ltPredict1 = new LTPrediction(frameLength);
                //  ltPredict1.decode(inStream, this, profile);
                //}
                //if (commonWindow)
                //{
                //  if (ltpData2Present = inStream.readBool())
                //  {
                //    if (ltPredict2 == null) ltPredict2 = new LTPrediction(frameLength);
                //    ltPredict2.decode(inStream, this, profile);
                //  }
                //}
            } break;

            case Profile.ProfileType.ER_AAC_LTP:
            {
                throw new NotImplementedException();
                //if (!commonWindow)
                //{
                //  if (ltpData1Present = inStream.readBool())
                //  {
                //    if (ltPredict1 == null) ltPredict1 = new LTPrediction(frameLength);
                //    ltPredict1.decode(inStream, this, profile);
                //  }
                //}
            } break;

            default: throw new AACException("unexpected profile for LTP: " + profile);
            }
        }
        public void process(ICStream ics, float[] data, FilterBank filterBank, SampleFrequency sf)
        {
            var info = ics.getInfo();

            if (!info.isEightShortFrame())
            {
                int samples = frameLength << 1;
                var inf     = new float[2048];
                var outf    = new float[2048];

                for (int i = 0; i < samples; i++)
                {
                    inf[i] = states[samples + i - lag] * CODEBOOK[coef];
                }

                throw new NotImplementedException();

                //filterBank.processLTP(info.getWindowSequence(), info.getWindowShape(ICSInfo.CURRENT), info.getWindowShape(ICSInfo.PREVIOUS), inf, outf);

                //if (ics.isTNSDataPresent()) ics.getTNS().process(ics, outf, sf, true);

                var swbOffsets   = info.getSWBOffsets();
                int swbOffsetMax = info.getSWBOffsetMax();
                for (int sfb = 0; sfb < lastBand; sfb++)
                {
                    if (longUsed[sfb])
                    {
                        int low  = swbOffsets[sfb];
                        int high = Math.Min(swbOffsets[sfb + 1], swbOffsetMax);

                        for (int bin = low; bin < high; bin++)
                        {
                            data[bin] += outf[bin];
                        }
                    }
                }
            }
        }
Ejemplo n.º 5
0
        public SBR(bool smallFrames, bool stereo, SampleFrequency sample_rate, bool downSampledSBR)
        {
            this.downSampledSBR = downSampledSBR;
            this.stereo         = stereo;
            this.sample_rate    = sample_rate;

            bs_freq_scale      = 2;
            bs_alter_scale     = true;
            bs_noise_bands     = 2;
            bs_limiter_bands   = 2;
            bs_limiter_gains   = 2;
            bs_interpol_freq   = true;
            bs_smoothing_mode  = true;
            bs_start_freq      = 5;
            bs_amp_res         = true;
            bs_samplerate_mode = 1;
            prevEnvIsShort[0]  = -1;
            prevEnvIsShort[1]  = -1;
            header_count       = 0;
            Reset = true;

            tHFGen = T_HFGEN;
            tHFAdj = T_HFADJ;

            bsco      = 0;
            bsco_prev = 0;
            M_prev    = 0;

            /* force sbr reset */
            bs_start_freq_prev = -1;

            if (smallFrames)
            {
                numTimeSlotsRate = RATE * NO_TIME_SLOTS_960;
                numTimeSlots     = NO_TIME_SLOTS_960;
            }
            else
            {
                numTimeSlotsRate = RATE * NO_TIME_SLOTS;
                numTimeSlots     = NO_TIME_SLOTS;
            }

            GQ_ringbuf_index[0] = 0;
            GQ_ringbuf_index[1] = 0;

            if (stereo)
            {
                /* stereo */
                qmfa[0] = new AnalysisFilterbank(32);
                qmfa[1] = new AnalysisFilterbank(32);
                qmfs[0] = new SynthesisFilterbank(downSampledSBR ? 32 : 64);
                qmfs[1] = new SynthesisFilterbank(downSampledSBR ? 32 : 64);
            }
            else
            {
                /* mono */
                qmfa[0] = new AnalysisFilterbank(32);
                qmfs[0] = new SynthesisFilterbank(downSampledSBR ? 32 : 64);
                qmfs[1] = null;
            }
        }
        //private int processSingle(SCE_LFE scelfe, FilterBank filterBank, int channel, Profile profile, SampleFrequency sf) throws AACException {
        //ICStream ics = scelfe.getICStream();
        //ICSInfo info = ics.getInfo();
        //LTPrediction ltp = info.getLTPrediction1();
        //int elementID = scelfe.getElementInstanceTag();

        ////inverse quantization
        //float[] iqData = ics.getInvQuantData();

        ////prediction
        //if(profile.equals(Profile.AAC_MAIN)&&info.isICPredictionPresent()) info.getICPrediction().process(ics, iqData, sf);
        //if(LTPrediction.isLTPProfile(profile)&&info.isLTPrediction1Present()) ltp.process(ics, iqData, filterBank, sf);

        ////dependent coupling
        //processDependentCoupling(false, elementID, CCE.BEFORE_TNS, iqData, null);

        ////TNS
        //if(ics.isTNSDataPresent()) ics.getTNS().process(ics, iqData, sf, false);

        ////dependent coupling
        //processDependentCoupling(false, elementID, CCE.AFTER_TNS, iqData, null);

        ////filterbank
        //filterBank.process(info.getWindowSequence(), info.getWindowShape(ICSInfo.CURRENT), info.getWindowShape(ICSInfo.PREVIOUS), iqData, data[channel], channel);

        //if(LTPrediction.isLTPProfile(profile)) ltp.updateState(data[channel], filterBank.getOverlap(channel), profile);

        ////dependent coupling
        //processIndependentCoupling(false, elementID, data[channel], null);

        ////gain control
        //if(ics.isGainControlPresent()) ics.getGainControl().process(iqData, info.getWindowShape(ICSInfo.CURRENT), info.getWindowShape(ICSInfo.PREVIOUS), info.getWindowSequence());

        ////SBR
        //int chs = 1;
        //if(sbrPresent&&config.isSBREnabled()) {
        //if(data[channel].Length==config.getFrameLength()) LOGGER.log(Level.WARNING, "SBR data present, but buffer has normal size!");
        //SBR sbr = scelfe.getSBR();
        //if(sbr.isPSUsed()) {
        //chs = 2;
        //scelfe.getSBR().process(data[channel], data[channel+1], false);
        //}
        //else scelfe.getSBR().process(data[channel], false);
        //}
        //return chs;
        //}

        void processPair(CPE cpe, FilterBank filterBank, int channel, Profile profile, SampleFrequency sf)
        {
            var ics1      = cpe.getLeftChannel();
            var ics2      = cpe.getRightChannel();
            var info1     = ics1.getInfo();
            var info2     = ics2.getInfo();
            var ltp1      = info1.getLTPrediction1();
            var ltp2      = cpe.isCommonWindow() ? info1.getLTPrediction2() : info2.getLTPrediction1();
            int elementID = cpe.getElementInstanceTag();

            // inverse quantization
            var iqData1 = ics1.getInvQuantData();
            var iqData2 = ics2.getInvQuantData();

            // MS
            if (cpe.isCommonWindow() && cpe.isMSMaskPresent())
            {
                MS.process(cpe, iqData1, iqData2);
            }
            // main prediction
            if (profile.type == Profile.ProfileType.AAC_MAIN)
            {
                if (info1.isICPredictionPresent())
                {
                    throw new NotImplementedException();
                    // info1.getICPrediction().process(ics1, iqData1, sf);
                }
                if (info2.isICPredictionPresent())
                {
                    throw new NotImplementedException();
                    // info2.getICPrediction().process(ics2, iqData2, sf);
                }
            }
            // IS
            IS.process(cpe, iqData1, iqData2);

            // LTP
            if (LTPrediction.isLTPProfile(profile))
            {
                if (info1.isLTPrediction1Present())
                {
                    ltp1.process(ics1, iqData1, filterBank, sf);
                }
                if (cpe.isCommonWindow() && info1.isLTPrediction2Present())
                {
                    ltp2.process(ics2, iqData2, filterBank, sf);
                }
                else if (info2.isLTPrediction1Present())
                {
                    ltp2.process(ics2, iqData2, filterBank, sf);
                }
            }

            // dependent coupling
            processDependentCoupling(true, elementID, CCE.BEFORE_TNS, iqData1, iqData2);

            // TNS
            if (ics1.isTNSDataPresent())
            {
                ics1.getTNS().process(ics1, iqData1, sf, false);
            }
            if (ics2.isTNSDataPresent())
            {
                ics2.getTNS().process(ics2, iqData2, sf, false);
            }

            // dependent coupling
            processDependentCoupling(true, elementID, CCE.AFTER_TNS, iqData1, iqData2);

            // filterbank
            filterBank.process(info1.getWindowSequence(), info1.getWindowShape(ICSInfo.CURRENT), info1.getWindowShape(ICSInfo.PREVIOUS), iqData1, data[channel], channel);
            filterBank.process(info2.getWindowSequence(), info2.getWindowShape(ICSInfo.CURRENT), info2.getWindowShape(ICSInfo.PREVIOUS), iqData2, data[channel + 1], channel + 1);

            if (LTPrediction.isLTPProfile(profile))
            {
                ltp1.updateState(data[channel], filterBank.getOverlap(channel), profile);
                ltp2.updateState(data[channel + 1], filterBank.getOverlap(channel + 1), profile);
            }

            // independent coupling
            processIndependentCoupling(true, elementID, data[channel], data[channel + 1]);

            // gain control
            if (ics1.isGainControlPresent())
            {
                ics1.getGainControl().process(iqData1, info1.getWindowShape(ICSInfo.CURRENT), info1.getWindowShape(ICSInfo.PREVIOUS), info1.getWindowSequence());
            }
            if (ics2.isGainControlPresent())
            {
                ics2.getGainControl().process(iqData2, info2.getWindowShape(ICSInfo.CURRENT), info2.getWindowShape(ICSInfo.PREVIOUS), info2.getWindowSequence());
            }

            //SBR
            if (sbrPresent && config.isSBREnabled())
            {
                if (data[channel].Length == config.getFrameLength())
                {
                    Logger.LogServe("SBR data present, but buffer has normal size!");
                }
                cpe.getSBR().process(data[channel], data[channel + 1], false);
            }
        }
 public void setSampleFrequency(SampleFrequency sampleFrequency)
 {
     this.sampleFrequency = sampleFrequency;
 }
        // --- ======== static builder ========= ---

        /// <summary>
        /// Parses the input arrays as a DecoderSpecificInfo, as used in MP4 containers.
        /// </summary>
        /// <param name="data"></param>
        /// <returns>a DecoderConfig</returns>
        public static DecoderConfig parseMP4DecoderSpecificInfo(byte[] data)
        {
            var inStream = new BitStream(data);

            var config = new DecoderConfig();

            try
            {
                config.profile = readProfile(inStream);

                int sf = inStream.readBits(4);
                config.sampleFrequency      = sf == 0xf ? SampleFrequency.forFrequency(inStream.readBits(24)) : SampleFrequency.forInt(sf);
                config.channelConfiguration = (ChannelConfiguration)inStream.readBits(4);

                switch (config.profile.type)
                {
                case Profile.ProfileType.AAC_SBR:
                {
                    config.extProfile = config.profile;
                    config.sbrPresent = true;
                    sf = inStream.readBits(4);
                    // TODO: 24 bits already read; read again?
                    //if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(inStream.readBits(24));
                    // if sample frequencies are the same: downsample SBR
                    config.downSampledSBR  = config.sampleFrequency.getIndex() == sf;
                    config.sampleFrequency = SampleFrequency.forInt(sf);
                    config.profile         = readProfile(inStream);
                } break;

                case Profile.ProfileType.AAC_MAIN:
                case Profile.ProfileType.AAC_LC:
                case Profile.ProfileType.AAC_SSR:
                case Profile.ProfileType.AAC_LTP:
                case Profile.ProfileType.ER_AAC_LC:
                case Profile.ProfileType.ER_AAC_LTP:
                case Profile.ProfileType.ER_AAC_LD:
                {
                    // ga-specific info:
                    config.frameLengthFlag = inStream.readBool();
                    if (config.frameLengthFlag)
                    {
                        throw new AACException("config uses 960-sample frames, not yet supported");                 //TODO: are 960-frames working yet?
                    }
                    config.dependsOnCoreCoder = inStream.readBool();
                    config.coreCoderDelay     = config.dependsOnCoreCoder ? inStream.readBits(14) : 0;
                    config.extensionFlag      = inStream.readBool();

                    if (config.extensionFlag)
                    {
                        if (config.profile.isErrorResilientProfile())
                        {
                            config.sectionDataResilience  = inStream.readBool();
                            config.scalefactorResilience  = inStream.readBool();
                            config.spectralDataResilience = inStream.readBool();
                        }
                        // extensionFlag3
                        inStream.skipBit();
                    }

                    if (config.channelConfiguration == ChannelConfiguration.CHANNEL_CONFIG_NONE || !config.channelConfiguration.ToString().StartsWith("CHANNEL_", StringComparison.Ordinal))
                    {
                        // TODO: is this working correct? -> ISO 14496-3 part 1: 1.A.4.3
                        inStream.skipBits(3); //PCE

                        throw new NotImplementedException();
                        //PCE pce = new PCE();
                        //pce.decode(inStream);
                        //config.profile = pce.getProfile();
                        //config.sampleFrequency = pce.getSampleFrequency();
                        //config.channelConfiguration = ChannelConfiguration.forInt(pce.getChannelCount());
                    }

                    if (inStream.getBitsLeft() > 10)
                    {
                        readSyncExtension(inStream, config);
                    }
                } break;

                default: throw new AACException("profile not supported: " + config.profile.type);
                }
                return(config);
            }
            finally
            {
                inStream.destroy();
            }
        }