Ejemplo n.º 1
0
        internal static void unquant_coarse_energy(CeltMode m, int start, int end, int[] oldEBands, int intra, EntropyCoder dec, int C, int LM)
        {
            byte[] prob_model = Tables.e_prob_model[LM][intra];
            int    i, c;

            int[] prev = { 0, 0 };
            int   coef;
            int   beta;
            int   budget;
            int   tell;

            if (intra != 0)
            {
                coef = 0;
                beta = beta_intra;
            }
            else
            {
                beta = beta_coef[LM];
                coef = pred_coef[LM];
            }

            budget = (int)dec.storage * 8;

            /* Decode at a fixed coarse resolution */
            for (i = start; i < end; i++)
            {
                c = 0;
                do
                {
                    int qi;
                    int q;
                    int tmp;

                    /* It would be better to express this invariant as a
                     * test on C at function entry, but that isn't enough
                     * to make the static analyzer happy. */
                    Inlines.OpusAssert(c < 2);
                    tell = dec.tell();
                    if (budget - tell >= 15)
                    {
                        int pi;
                        pi = 2 * Inlines.IMIN(i, 20);
                        qi = Laplace.ec_laplace_decode(dec,
                                                       (uint)prob_model[pi] << 7, prob_model[pi + 1] << 6);
                    }
                    else if (budget - tell >= 2)
                    {
                        qi = dec.dec_icdf(small_energy_icdf, 2);
                        qi = (qi >> 1) ^ -(qi & 1);
                    }
                    else if (budget - tell >= 1)
                    {
                        qi = 0 - dec.dec_bit_logp(1);
                    }
                    else
                    {
                        qi = -1;
                    }
                    q = (int)Inlines.SHL32(qi, CeltConstants.DB_SHIFT); // opus bug: useless extend32

                    oldEBands[i + c * m.nbEBands] = Inlines.MAX16((0 - ((short)(0.5 + (9.0f) * (((int)1) << (CeltConstants.DB_SHIFT)))) /*Inlines.QCONST16(9.0f, CeltConstants.DB_SHIFT)*/), oldEBands[i + c * m.nbEBands]);
                    tmp = Inlines.PSHR32(Inlines.MULT16_16(coef, oldEBands[i + c * m.nbEBands]), 8) + prev[c] + Inlines.SHL32(q, 7);
                    tmp = Inlines.MAX32(-((int)(0.5 + (28.0f) * (((int)1) << (CeltConstants.DB_SHIFT + 7)))) /*Inlines.QCONST32(28.0f, CeltConstants.DB_SHIFT + 7)*/, tmp);
                    oldEBands[i + c * m.nbEBands] = (Inlines.PSHR32(tmp, 7));
                    prev[c] = prev[c] + Inlines.SHL32(q, 7) - Inlines.MULT16_16(beta, Inlines.PSHR32(q, 8));
                } while (++c < C);
            }
        }
Ejemplo n.º 2
0
        internal int opus_decode_frame(byte[] data, int data_ptr,
                                       int len, short[] pcm, int pcm_ptr, int frame_size, int decode_fec)
        {
            SilkDecoder  silk_dec;
            CeltDecoder  celt_dec;
            int          i, silk_ret = 0, celt_ret = 0;
            EntropyCoder dec = new EntropyCoder(); // porting note: stack var
            int          silk_frame_size;
            int          pcm_silk_size;

            short[] pcm_silk;
            int     pcm_transition_silk_size;

            short[] pcm_transition_silk;
            int     pcm_transition_celt_size;

            short[] pcm_transition_celt;
            short[] pcm_transition = null;
            int     redundant_audio_size;

            short[] redundant_audio;

            int      audiosize;
            OpusMode mode;
            int      transition = 0;
            int      start_band;
            int      redundancy       = 0;
            int      redundancy_bytes = 0;
            int      celt_to_silk     = 0;
            int      c;
            int      F2_5, F5, F10, F20;

            int[] window;
            uint  redundant_rng = 0;
            int   celt_accum;

            silk_dec = this.SilkDecoder;
            celt_dec = this.Celt_Decoder;
            F20      = this.Fs / 50;
            F10      = F20 >> 1;
            F5       = F10 >> 1;
            F2_5     = F5 >> 1;
            if (frame_size < F2_5)
            {
                return(OpusError.OPUS_BUFFER_TOO_SMALL);
            }
            /* Limit frame_size to avoid excessive stack allocations. */
            frame_size = Inlines.IMIN(frame_size, this.Fs / 25 * 3);
            /* Payloads of 1 (2 including ToC) or 0 trigger the PLC/DTX */
            if (len <= 1)
            {
                data = null;
                /* In that case, don't conceal more than what the ToC says */
                frame_size = Inlines.IMIN(frame_size, this.frame_size);
            }
            if (data != null)
            {
                audiosize = this.frame_size;
                mode      = this.mode;
                dec.dec_init(data, data_ptr, (uint)len);
            }
            else
            {
                audiosize = frame_size;
                mode      = this.prev_mode;

                if (mode == 0)
                {
                    /* If we haven't got any packet yet, all we can do is return zeros */
                    for (i = pcm_ptr; i < pcm_ptr + (audiosize * this.channels); i++)
                    {
                        pcm[i] = 0;
                    }

                    return(audiosize);
                }

                /* Avoids trying to run the PLC on sizes other than 2.5 (CELT), 5 (CELT),
                 * 10, or 20 (e.g. 12.5 or 30 ms). */
                if (audiosize > F20)
                {
                    do
                    {
                        int ret = opus_decode_frame(null, 0, 0, pcm, pcm_ptr, Inlines.IMIN(audiosize, F20), 0);
                        if (ret < 0)
                        {
                            return(ret);
                        }
                        pcm_ptr   += ret * this.channels;
                        audiosize -= ret;
                    } while (audiosize > 0);

                    return(frame_size);
                }
                else if (audiosize < F20)
                {
                    if (audiosize > F10)
                    {
                        audiosize = F10;
                    }
                    else if (mode != OpusMode.MODE_SILK_ONLY && audiosize > F5 && audiosize < F10)
                    {
                        audiosize = F5;
                    }
                }
            }

            /* In fixed-point, we can tell CELT to do the accumulation on top of the
             * SILK PCM buffer. This saves some stack space. */
            celt_accum = ((mode != OpusMode.MODE_CELT_ONLY) && (frame_size >= F10)) ? 1 : 0;

            pcm_transition_silk_size = 0;
            pcm_transition_celt_size = 0;
            if (data != null && this.prev_mode > 0 && (
                    (mode == OpusMode.MODE_CELT_ONLY && this.prev_mode != OpusMode.MODE_CELT_ONLY && (this.prev_redundancy == 0)) ||
                    (mode != OpusMode.MODE_CELT_ONLY && this.prev_mode == OpusMode.MODE_CELT_ONLY))
                )
            {
                transition = 1;
                /* Decide where to allocate the stack memory for pcm_transition */
                if (mode == OpusMode.MODE_CELT_ONLY)
                {
                    pcm_transition_celt_size = F5 * this.channels;
                }
                else
                {
                    pcm_transition_silk_size = F5 * this.channels;
                }
            }
            pcm_transition_celt = new short[pcm_transition_celt_size];
            if (transition != 0 && mode == OpusMode.MODE_CELT_ONLY)
            {
                pcm_transition = pcm_transition_celt;
                opus_decode_frame(null, 0, 0, pcm_transition, 0, Inlines.IMIN(F5, audiosize), 0);
            }
            if (audiosize > frame_size)
            {
                /*fprintf(stderr, "PCM buffer too small: %d vs %d (mode = %d)\n", audiosize, frame_size, mode);*/

                return(OpusError.OPUS_BAD_ARG);
            }
            else
            {
                frame_size = audiosize;
            }

            /* Don't allocate any memory when in CELT-only mode */
            pcm_silk_size = (mode != OpusMode.MODE_CELT_ONLY && (celt_accum == 0)) ? Inlines.IMAX(F10, frame_size) * this.channels : 0;
            pcm_silk      = new short[pcm_silk_size];

            /* SILK processing */
            if (mode != OpusMode.MODE_CELT_ONLY)
            {
                int     lost_flag, decoded_samples;
                short[] pcm_ptr2;
                int     pcm_ptr2_ptr = 0;

                if (celt_accum != 0)
                {
                    pcm_ptr2     = pcm;
                    pcm_ptr2_ptr = pcm_ptr;
                }
                else
                {
                    pcm_ptr2     = pcm_silk;
                    pcm_ptr2_ptr = 0;
                }

                if (this.prev_mode == OpusMode.MODE_CELT_ONLY)
                {
                    DecodeAPI.silk_InitDecoder(silk_dec);
                }

                /* The SILK PLC cannot produce frames of less than 10 ms */
                this.DecControl.payloadSize_ms = Inlines.IMAX(10, 1000 * audiosize / this.Fs);

                if (data != null)
                {
                    this.DecControl.nChannelsInternal = this.stream_channels;
                    if (mode == OpusMode.MODE_SILK_ONLY)
                    {
                        if (this.bandwidth == OpusBandwidth.OPUS_BANDWIDTH_NARROWBAND)
                        {
                            this.DecControl.internalSampleRate = 8000;
                        }
                        else if (this.bandwidth == OpusBandwidth.OPUS_BANDWIDTH_MEDIUMBAND)
                        {
                            this.DecControl.internalSampleRate = 12000;
                        }
                        else if (this.bandwidth == OpusBandwidth.OPUS_BANDWIDTH_WIDEBAND)
                        {
                            this.DecControl.internalSampleRate = 16000;
                        }
                        else
                        {
                            this.DecControl.internalSampleRate = 16000;
                            Inlines.OpusAssert(false);
                        }
                    }
                    else
                    {
                        /* Hybrid mode */
                        this.DecControl.internalSampleRate = 16000;
                    }
                }

                lost_flag       = data == null ? 1 : 2 * decode_fec;
                decoded_samples = 0;
                do
                {
                    /* Call SILK decoder */
                    int first_frame = (decoded_samples == 0) ? 1 : 0;
                    silk_ret = DecodeAPI.silk_Decode(silk_dec, this.DecControl,
                                                     lost_flag, first_frame, dec, pcm_ptr2, pcm_ptr2_ptr, out silk_frame_size);
                    if (silk_ret != 0)
                    {
                        if (lost_flag != 0)
                        {
                            /* PLC failure should not be fatal */
                            silk_frame_size = frame_size;
                            Arrays.MemSetWithOffset <short>(pcm_ptr2, 0, pcm_ptr2_ptr, frame_size * this.channels);
                        }
                        else
                        {
                            return(OpusError.OPUS_INTERNAL_ERROR);
                        }
                    }
                    pcm_ptr2_ptr    += (silk_frame_size * this.channels);
                    decoded_samples += silk_frame_size;
                } while (decoded_samples < frame_size);
            }

            start_band = 0;
            if (decode_fec == 0 && mode != OpusMode.MODE_CELT_ONLY && data != null &&
                dec.tell() + 17 + 20 * (this.mode == OpusMode.MODE_HYBRID ? 1 : 0) <= 8 * len)
            {
                /* Check if we have a redundant 0-8 kHz band */
                if (mode == OpusMode.MODE_HYBRID)
                {
                    redundancy = dec.dec_bit_logp(12);
                }
                else
                {
                    redundancy = 1;
                }
                if (redundancy != 0)
                {
                    celt_to_silk = dec.dec_bit_logp(1);

                    /* redundancy_bytes will be at least two, in the non-hybrid
                     * case due to the ec_tell() check above */
                    redundancy_bytes = mode == OpusMode.MODE_HYBRID ?
                                       (int)dec.dec_uint(256) + 2 :
                                       len - ((dec.tell() + 7) >> 3);
                    len -= redundancy_bytes;

                    /* This is a sanity check. It should never happen for a valid
                     * packet, so the exact behaviour is not normative. */
                    if (len * 8 < dec.tell())
                    {
                        len = 0;
                        redundancy_bytes = 0;
                        redundancy       = 0;
                    }
                    /* Shrink decoder because of raw bits */
                    dec.storage = (uint)(dec.storage - redundancy_bytes);
                }
            }
            if (mode != OpusMode.MODE_CELT_ONLY)
            {
                start_band = 17;
            }

            {
                int endband = 21;

                switch (this.bandwidth)
                {
                case OpusBandwidth.OPUS_BANDWIDTH_NARROWBAND:
                    endband = 13;
                    break;

                case OpusBandwidth.OPUS_BANDWIDTH_MEDIUMBAND:
                case OpusBandwidth.OPUS_BANDWIDTH_WIDEBAND:
                    endband = 17;
                    break;

                case OpusBandwidth.OPUS_BANDWIDTH_SUPERWIDEBAND:
                    endband = 19;
                    break;

                case OpusBandwidth.OPUS_BANDWIDTH_FULLBAND:
                    endband = 21;
                    break;
                }
                celt_dec.SetEndBand(endband);
                celt_dec.SetChannels(this.stream_channels);
            }

            if (redundancy != 0)
            {
                transition = 0;
                pcm_transition_silk_size = 0;
            }

            pcm_transition_silk = new short[pcm_transition_silk_size];

            if (transition != 0 && mode != OpusMode.MODE_CELT_ONLY)
            {
                pcm_transition = pcm_transition_silk;
                opus_decode_frame(null, 0, 0, pcm_transition, 0, Inlines.IMIN(F5, audiosize), 0);
            }

            /* Only allocation memory for redundancy if/when needed */
            redundant_audio_size = redundancy != 0 ? F5 * this.channels : 0;
            redundant_audio      = new short[redundant_audio_size];

            /* 5 ms redundant frame for CELT->SILK*/
            if (redundancy != 0 && celt_to_silk != 0)
            {
                celt_dec.SetStartBand(0);
                celt_dec.celt_decode_with_ec(data, (data_ptr + len), redundancy_bytes,
                                             redundant_audio, 0, F5, null, 0);
                redundant_rng = celt_dec.GetFinalRange();
            }

            /* MUST be after PLC */
            celt_dec.SetStartBand(start_band);

            if (mode != OpusMode.MODE_SILK_ONLY)
            {
                int celt_frame_size = Inlines.IMIN(F20, frame_size);
                /* Make sure to discard any previous CELT state */
                if (mode != this.prev_mode && this.prev_mode > 0 && this.prev_redundancy == 0)
                {
                    celt_dec.ResetState();
                }
                /* Decode CELT */
                celt_ret = celt_dec.celt_decode_with_ec(decode_fec != 0 ? null : data, data_ptr,
                                                        len, pcm, pcm_ptr, celt_frame_size, dec, celt_accum);
            }
            else
            {
                if (celt_accum == 0)
                {
                    for (i = pcm_ptr; i < (frame_size * this.channels) + pcm_ptr; i++)
                    {
                        pcm[i] = 0;
                    }
                }

                /* For hybrid -> SILK transitions, we let the CELT MDCT
                 * do a fade-out by decoding a silence frame */
                if (this.prev_mode == OpusMode.MODE_HYBRID && !(redundancy != 0 && celt_to_silk != 0 && this.prev_redundancy != 0))
                {
                    celt_dec.SetStartBand(0);
                    celt_dec.celt_decode_with_ec(SILENCE, 0, 2, pcm, pcm_ptr, F2_5, null, celt_accum);
                }
            }

            if (mode != OpusMode.MODE_CELT_ONLY && celt_accum == 0)
            {
                for (i = 0; i < frame_size * this.channels; i++)
                {
                    pcm[pcm_ptr + i] = Inlines.SAT16(Inlines.ADD32(pcm[pcm_ptr + i], pcm_silk[i]));
                }
            }

            window = celt_dec.GetMode().window;

            /* 5 ms redundant frame for SILK->CELT */
            if (redundancy != 0 && celt_to_silk == 0)
            {
                celt_dec.ResetState();
                celt_dec.SetStartBand(0);

                celt_dec.celt_decode_with_ec(data, data_ptr + len, redundancy_bytes, redundant_audio, 0, F5, null, 0);
                redundant_rng = celt_dec.GetFinalRange();
                CodecHelpers.smooth_fade(pcm, pcm_ptr + this.channels * (frame_size - F2_5), redundant_audio, this.channels * F2_5,
                                         pcm, (pcm_ptr + this.channels * (frame_size - F2_5)), F2_5, this.channels, window, this.Fs);
            }
            if (redundancy != 0 && celt_to_silk != 0)
            {
                for (c = 0; c < this.channels; c++)
                {
                    for (i = 0; i < F2_5; i++)
                    {
                        pcm[this.channels * i + c + pcm_ptr] = redundant_audio[this.channels * i + c];
                    }
                }
                CodecHelpers.smooth_fade(redundant_audio, (this.channels * F2_5), pcm, (pcm_ptr + (this.channels * F2_5)),
                                         pcm, (pcm_ptr + (this.channels * F2_5)), F2_5, this.channels, window, this.Fs);
            }
            if (transition != 0)
            {
                if (audiosize >= F5)
                {
                    for (i = 0; i < this.channels * F2_5; i++)
                    {
                        pcm[i] = pcm_transition[i];
                    }
                    CodecHelpers.smooth_fade(pcm_transition, (this.channels * F2_5), pcm, (pcm_ptr + (this.channels * F2_5)),
                                             pcm, (pcm_ptr + (this.channels * F2_5)), F2_5,
                                             this.channels, window, this.Fs);
                }
                else
                {
                    /* Not enough time to do a clean transition, but we do it anyway
                     * This will not preserve amplitude perfectly and may introduce
                     * a bit of temporal aliasing, but it shouldn't be too bad and
                     * that's pretty much the best we can do. In any case, generating this
                     * transition is pretty silly in the first place */
                    CodecHelpers.smooth_fade(pcm_transition, 0, pcm, pcm_ptr,
                                             pcm, pcm_ptr, F2_5,
                                             this.channels, window, this.Fs);
                }
            }

            if (this.decode_gain != 0)
            {
                int gain;
                gain = Inlines.celt_exp2(Inlines.MULT16_16_P15(((short)(0.5 + (6.48814081e-4f) * (((int)1) << (25)))) /*Inlines.QCONST16(6.48814081e-4f, 25)*/, this.decode_gain));
                for (i = pcm_ptr; i < pcm_ptr + (frame_size * this.channels); i++)
                {
                    int x;
                    x      = Inlines.MULT16_32_P16(pcm[i], gain);
                    pcm[i] = (short)Inlines.SATURATE(x, 32767);
                }
            }

            if (len <= 1)
            {
                this.rangeFinal = 0;
            }
            else
            {
                this.rangeFinal = dec.rng ^ redundant_rng;
            }

            this.prev_mode       = mode;
            this.prev_redundancy = (redundancy != 0 && celt_to_silk == 0) ? 1 : 0;

            return(celt_ret < 0 ? celt_ret : audiosize);
        }
Ejemplo n.º 3
0
        internal static int interp_bits2pulses(CeltMode m, int start, int end, int skip_start,
                                               int[] bits1, int[] bits2, int[] thresh, int[] cap, int total, out int _balance,
                                               int skip_rsv, ref int intensity, int intensity_rsv, ref int dual_stereo, int dual_stereo_rsv, int[] bits,
                                               int[] ebits, int[] fine_priority, int C, int LM, EntropyCoder ec, int encode, int prev, int signalBandwidth)
        {
            int psum;
            int lo, hi;
            int i, j;
            int logM;
            int stereo;
            int codedBands = -1;
            int alloc_floor;
            int left, percoeff;
            int done;
            int balance;


            alloc_floor = C << EntropyCoder.BITRES;
            stereo      = C > 1 ? 1 : 0;

            logM = LM << EntropyCoder.BITRES;
            lo   = 0;
            hi   = 1 << ALLOC_STEPS;
            for (i = 0; i < ALLOC_STEPS; i++)
            {
                int mid = (lo + hi) >> 1;
                psum = 0;
                done = 0;
                for (j = end; j-- > start;)
                {
                    int tmp = bits1[j] + (mid * (int)bits2[j] >> ALLOC_STEPS);
                    if (tmp >= thresh[j] || done != 0)
                    {
                        done = 1;
                        /* Don't allocate more than we can actually use */
                        psum += Inlines.IMIN(tmp, cap[j]);
                    }
                    else
                    {
                        if (tmp >= alloc_floor)
                        {
                            psum += alloc_floor;
                        }
                    }
                }
                if (psum > total)
                {
                    hi = mid;
                }
                else
                {
                    lo = mid;
                }
            }
            psum = 0;
            /*printf ("interp bisection gave %d\n", lo);*/
            done = 0;
            for (j = end; j-- > start;)
            {
                int tmp = bits1[j] + (lo * bits2[j] >> ALLOC_STEPS);
                if (tmp < thresh[j] && done == 0)
                {
                    if (tmp >= alloc_floor)
                    {
                        tmp = alloc_floor;
                    }
                    else
                    {
                        tmp = 0;
                    }
                }
                else
                {
                    done = 1;
                }

                /* Don't allocate more than we can actually use */
                tmp     = Inlines.IMIN(tmp, cap[j]);
                bits[j] = tmp;
                psum   += tmp;
            }

            /* Decide which bands to skip, working backwards from the end. */
            for (codedBands = end; ; codedBands--)
            {
                int band_width;
                int band_bits;
                int rem;
                j = codedBands - 1;

                /* Never skip the first band, nor a band that has been boosted by
                 *  dynalloc.
                 * In the first case, we'd be coding a bit to signal we're going to waste
                 *  all the other bits.
                 * In the second case, we'd be coding a bit to redistribute all the bits
                 *  we just signaled should be cocentrated in this band. */
                if (j <= skip_start)
                {
                    /* Give the bit we reserved to end skipping back. */
                    total += skip_rsv;
                    break;
                }

                /*Figure out how many left-over bits we would be adding to this band.
                *  This can include bits we've stolen back from higher, skipped bands.*/
                left       = total - psum;
                percoeff   = Inlines.celt_udiv(left, m.eBands[codedBands] - m.eBands[start]);
                left      -= (m.eBands[codedBands] - m.eBands[start]) * percoeff;
                rem        = Inlines.IMAX(left - (m.eBands[j] - m.eBands[start]), 0);
                band_width = m.eBands[codedBands] - m.eBands[j];
                band_bits  = (int)(bits[j] + percoeff * band_width + rem);

                /*Only code a skip decision if we're above the threshold for this band.
                 * Otherwise it is force-skipped.
                 * This ensures that we have enough bits to code the skip flag.*/
                if (band_bits >= Inlines.IMAX(thresh[j], alloc_floor + (1 << EntropyCoder.BITRES)))
                {
                    if (encode != 0)
                    {
                        /*This if() block is the only part of the allocation function that
                         * is not a mandatory part of the bitstream: any bands we choose to
                         * skip here must be explicitly signaled.*/
                        /*Choose a threshold with some hysteresis to keep bands from
                         * fluctuating in and out.*/
#if FUZZING
                        if ((new Random().Next() & 0x1) == 0)
#else
                        if (codedBands <= start + 2 || (band_bits > ((j < prev ? 7 : 9) * band_width << LM << EntropyCoder.BITRES) >> 4 && j <= signalBandwidth))
#endif
                        {
                            ec.enc_bit_logp(1, 1);
                            break;
                        }
                        ec.enc_bit_logp(0, 1);
                    }
                    else if (ec.dec_bit_logp(1) != 0)
                    {
                        break;
                    }
                    /*We used a bit to skip this band.*/
                    psum      += 1 << EntropyCoder.BITRES;
                    band_bits -= 1 << EntropyCoder.BITRES;
                }
                /*Reclaim the bits originally allocated to this band.*/
                psum -= bits[j] + intensity_rsv;
                if (intensity_rsv > 0)
                {
                    intensity_rsv = LOG2_FRAC_TABLE[j - start];
                }
                psum += intensity_rsv;
                if (band_bits >= alloc_floor)
                {
                    /*If we have enough for a fine energy bit per channel, use it.*/
                    psum   += alloc_floor;
                    bits[j] = alloc_floor;
                }
                else
                {
                    /*Otherwise this band gets nothing at all.*/
                    bits[j] = 0;
                }
            }

            Inlines.OpusAssert(codedBands > start);
            /* Code the intensity and dual stereo parameters. */
            if (intensity_rsv > 0)
            {
                if (encode != 0)
                {
                    intensity = Inlines.IMIN(intensity, codedBands);
                    ec.enc_uint((uint)(intensity - start), (uint)(codedBands + 1 - start));
                }
                else
                {
                    intensity = start + (int)ec.dec_uint((uint)(codedBands + 1 - start));
                }
            }
            else
            {
                intensity = 0;
            }

            if (intensity <= start)
            {
                total          += dual_stereo_rsv;
                dual_stereo_rsv = 0;
            }
            if (dual_stereo_rsv > 0)
            {
                if (encode != 0)
                {
                    ec.enc_bit_logp(dual_stereo, 1);
                }
                else
                {
                    dual_stereo = ec.dec_bit_logp(1);
                }
            }
            else
            {
                dual_stereo = 0;
            }

            /* Allocate the remaining bits */
            left     = total - psum;
            percoeff = Inlines.celt_udiv(left, m.eBands[codedBands] - m.eBands[start]);
            left    -= (m.eBands[codedBands] - m.eBands[start]) * percoeff;
            for (j = start; j < codedBands; j++)
            {
                bits[j] += ((int)percoeff * (m.eBands[j + 1] - m.eBands[j]));
            }
            for (j = start; j < codedBands; j++)
            {
                int tmp = (int)Inlines.IMIN(left, m.eBands[j + 1] - m.eBands[j]);
                bits[j] += tmp;
                left    -= tmp;
            }
            /*for (j=0;j<end;j++)printf("%d ", bits[j]);printf("\n");*/

            balance = 0;
            for (j = start; j < codedBands; j++)
            {
                int N0, N, den;
                int offset;
                int NClogN;
                int excess, bit;

                Inlines.OpusAssert(bits[j] >= 0);
                N0  = m.eBands[j + 1] - m.eBands[j];
                N   = N0 << LM;
                bit = (int)bits[j] + balance;

                if (N > 1)
                {
                    excess  = Inlines.MAX32(bit - cap[j], 0);
                    bits[j] = bit - excess;

                    /* Compensate for the extra DoF in stereo */
                    den = (C * N + ((C == 2 && N > 2 && (dual_stereo == 0) && j < intensity) ? 1 : 0));

                    NClogN = den * (m.logN[j] + logM);

                    /* Offset for the number of fine bits by log2(N)/2 + FINE_OFFSET
                     * compared to their "fair share" of total/N */
                    offset = (NClogN >> 1) - den * CeltConstants.FINE_OFFSET;

                    /* N=2 is the only point that doesn't match the curve */
                    if (N == 2)
                    {
                        offset += den << EntropyCoder.BITRES >> 2;
                    }

                    /* Changing the offset for allocating the second and third
                     *  fine energy bit */
                    if (bits[j] + offset < den * 2 << EntropyCoder.BITRES)
                    {
                        offset += NClogN >> 2;
                    }
                    else if (bits[j] + offset < den * 3 << EntropyCoder.BITRES)
                    {
                        offset += NClogN >> 3;
                    }

                    /* Divide with rounding */
                    ebits[j] = Inlines.IMAX(0, (bits[j] + offset + (den << (EntropyCoder.BITRES - 1))));
                    ebits[j] = Inlines.celt_udiv(ebits[j], den) >> EntropyCoder.BITRES;

                    /* Make sure not to bust */
                    if (C * ebits[j] > (bits[j] >> EntropyCoder.BITRES))
                    {
                        ebits[j] = bits[j] >> stereo >> EntropyCoder.BITRES;
                    }

                    /* More than that is useless because that's about as far as PVQ can go */
                    ebits[j] = Inlines.IMIN(ebits[j], CeltConstants.MAX_FINE_BITS);

                    /* If we rounded down or capped this band, make it a candidate for the
                     *  final fine energy pass */
                    fine_priority[j] = (ebits[j] * (den << EntropyCoder.BITRES) >= bits[j] + offset) ? 1 : 0;

                    /* Remove the allocated fine bits; the rest are assigned to PVQ */
                    bits[j] -= C * ebits[j] << EntropyCoder.BITRES;
                }
                else
                {
                    /* For N=1, all bits go to fine energy except for a single sign bit */
                    excess           = Inlines.MAX32(0, bit - (C << EntropyCoder.BITRES));
                    bits[j]          = bit - excess;
                    ebits[j]         = 0;
                    fine_priority[j] = 1;
                }

                /* Fine energy can't take advantage of the re-balancing in
                 *  quant_all_bands().
                 * Instead, do the re-balancing here.*/
                if (excess > 0)
                {
                    int extra_fine;
                    int extra_bits;
                    extra_fine       = Inlines.IMIN(excess >> (stereo + EntropyCoder.BITRES), CeltConstants.MAX_FINE_BITS - ebits[j]);
                    ebits[j]        += extra_fine;
                    extra_bits       = extra_fine * C << EntropyCoder.BITRES;
                    fine_priority[j] = (extra_bits >= excess - balance) ? 1 : 0;
                    excess          -= extra_bits;
                }
                balance = excess;

                Inlines.OpusAssert(bits[j] >= 0);
                Inlines.OpusAssert(ebits[j] >= 0);
            }

            /* Save any remaining bits over the cap for the rebalancing in
             *  quant_all_bands(). */
            _balance = balance;

            /* The skipped bands use all their bits for fine energy. */
            for (; j < end; j++)
            {
                ebits[j] = bits[j] >> stereo >> EntropyCoder.BITRES;
                Inlines.OpusAssert(C * ebits[j] << EntropyCoder.BITRES == bits[j]);
                bits[j]          = 0;
                fine_priority[j] = (ebits[j] < 1) ? 1 : 0;
            }

            return(codedBands);
        }
Ejemplo n.º 4
0
        /* Decode a frame */
        internal static int silk_Decode( /* O    Returns error code                              */
            SilkDecoder psDec,           /* I/O  State                                           */
            DecControlState decControl,  /* I/O  Control Structure                               */
            int lostFlag,                /* I    0: no loss, 1 loss, 2 decode fec                */
            int newPacketFlag,           /* I    Indicates first decoder call for this packet    */
            EntropyCoder psRangeDec,     /* I/O  Compressor data structure                       */
            short[] samplesOut,          /* O    Decoded output speech vector                    */
            int samplesOut_ptr,
            out int nSamplesOut          /* O    Number of samples decoded                       */
            )
        {
            int           i, n, decode_only_middle = 0, ret = SilkError.SILK_NO_ERROR;
            int           LBRR_symbol;
            BoxedValueInt nSamplesOutDec = new BoxedValueInt();

            short[] samplesOut_tmp;
            int[]   samplesOut_tmp_ptrs = new int[2];
            short[] samplesOut1_tmp_storage1;
            short[] samplesOut1_tmp_storage2;
            short[] samplesOut2_tmp;
            int[]   MS_pred_Q13 = new int[] { 0, 0 };
            short[] resample_out;
            int     resample_out_ptr;

            SilkChannelDecoder[] channel_state = psDec.channel_state;
            int has_side;
            int stereo_to_mono;
            int delay_stack_alloc;

            nSamplesOut = 0;

            Inlines.OpusAssert(decControl.nChannelsInternal == 1 || decControl.nChannelsInternal == 2);

            /**********************************/
            /* Test if first frame in payload */
            /**********************************/
            if (newPacketFlag != 0)
            {
                for (n = 0; n < decControl.nChannelsInternal; n++)
                {
                    channel_state[n].nFramesDecoded = 0;  /* Used to count frames in packet */
                }
            }

            /* If Mono . Stereo transition in bitstream: init state of second channel */
            if (decControl.nChannelsInternal > psDec.nChannelsInternal)
            {
                ret += channel_state[1].silk_init_decoder();
            }

            stereo_to_mono = (decControl.nChannelsInternal == 1 && psDec.nChannelsInternal == 2 &&
                              (decControl.internalSampleRate == 1000 * channel_state[0].fs_kHz)) ? 1 : 0;

            if (channel_state[0].nFramesDecoded == 0)
            {
                for (n = 0; n < decControl.nChannelsInternal; n++)
                {
                    int fs_kHz_dec;
                    if (decControl.payloadSize_ms == 0)
                    {
                        /* Assuming packet loss, use 10 ms */
                        channel_state[n].nFramesPerPacket = 1;
                        channel_state[n].nb_subfr         = 2;
                    }
                    else if (decControl.payloadSize_ms == 10)
                    {
                        channel_state[n].nFramesPerPacket = 1;
                        channel_state[n].nb_subfr         = 2;
                    }
                    else if (decControl.payloadSize_ms == 20)
                    {
                        channel_state[n].nFramesPerPacket = 1;
                        channel_state[n].nb_subfr         = 4;
                    }
                    else if (decControl.payloadSize_ms == 40)
                    {
                        channel_state[n].nFramesPerPacket = 2;
                        channel_state[n].nb_subfr         = 4;
                    }
                    else if (decControl.payloadSize_ms == 60)
                    {
                        channel_state[n].nFramesPerPacket = 3;
                        channel_state[n].nb_subfr         = 4;
                    }
                    else
                    {
                        Inlines.OpusAssert(false);
                        return(SilkError.SILK_DEC_INVALID_FRAME_SIZE);
                    }
                    fs_kHz_dec = (decControl.internalSampleRate >> 10) + 1;
                    if (fs_kHz_dec != 8 && fs_kHz_dec != 12 && fs_kHz_dec != 16)
                    {
                        Inlines.OpusAssert(false);
                        return(SilkError.SILK_DEC_INVALID_SAMPLING_FREQUENCY);
                    }
                    ret += channel_state[n].silk_decoder_set_fs(fs_kHz_dec, decControl.API_sampleRate);
                }
            }

            if (decControl.nChannelsAPI == 2 && decControl.nChannelsInternal == 2 && (psDec.nChannelsAPI == 1 || psDec.nChannelsInternal == 1))
            {
                Arrays.MemSetShort(psDec.sStereo.pred_prev_Q13, 0, 2);
                Arrays.MemSetShort(psDec.sStereo.sSide, 0, 2);
                channel_state[1].resampler_state.Assign(channel_state[0].resampler_state);
            }
            psDec.nChannelsAPI      = decControl.nChannelsAPI;
            psDec.nChannelsInternal = decControl.nChannelsInternal;

            if (decControl.API_sampleRate > (int)SilkConstants.MAX_API_FS_KHZ * 1000 || decControl.API_sampleRate < 8000)
            {
                ret = SilkError.SILK_DEC_INVALID_SAMPLING_FREQUENCY;
                return(ret);
            }

            if (lostFlag != DecoderAPIFlag.FLAG_PACKET_LOST && channel_state[0].nFramesDecoded == 0)
            {
                /* First decoder call for this payload */
                /* Decode VAD flags and LBRR flag */
                for (n = 0; n < decControl.nChannelsInternal; n++)
                {
                    for (i = 0; i < channel_state[n].nFramesPerPacket; i++)
                    {
                        channel_state[n].VAD_flags[i] = psRangeDec.dec_bit_logp(1);
                    }
                    channel_state[n].LBRR_flag = psRangeDec.dec_bit_logp(1);
                }
                /* Decode LBRR flags */
                for (n = 0; n < decControl.nChannelsInternal; n++)
                {
                    Arrays.MemSetInt(channel_state[n].LBRR_flags, 0, SilkConstants.MAX_FRAMES_PER_PACKET);
                    if (channel_state[n].LBRR_flag != 0)
                    {
                        if (channel_state[n].nFramesPerPacket == 1)
                        {
                            channel_state[n].LBRR_flags[0] = 1;
                        }
                        else
                        {
                            LBRR_symbol = psRangeDec.dec_icdf(Tables.silk_LBRR_flags_iCDF_ptr[channel_state[n].nFramesPerPacket - 2], 8) + 1;
                            for (i = 0; i < channel_state[n].nFramesPerPacket; i++)
                            {
                                channel_state[n].LBRR_flags[i] = Inlines.silk_RSHIFT(LBRR_symbol, i) & 1;
                            }
                        }
                    }
                }

                if (lostFlag == DecoderAPIFlag.FLAG_DECODE_NORMAL)
                {
                    /* Regular decoding: skip all LBRR data */
                    for (i = 0; i < channel_state[0].nFramesPerPacket; i++)
                    {
                        for (n = 0; n < decControl.nChannelsInternal; n++)
                        {
                            if (channel_state[n].LBRR_flags[i] != 0)
                            {
                                short[] pulses = new short[SilkConstants.MAX_FRAME_LENGTH];
                                int     condCoding;

                                if (decControl.nChannelsInternal == 2 && n == 0)
                                {
                                    Stereo.silk_stereo_decode_pred(psRangeDec, MS_pred_Q13);
                                    if (channel_state[1].LBRR_flags[i] == 0)
                                    {
                                        BoxedValueInt decodeOnlyMiddleBoxed = new BoxedValueInt(decode_only_middle);
                                        Stereo.silk_stereo_decode_mid_only(psRangeDec, decodeOnlyMiddleBoxed);
                                        decode_only_middle = decodeOnlyMiddleBoxed.Val;
                                    }
                                }
                                /* Use conditional coding if previous frame available */
                                if (i > 0 && (channel_state[n].LBRR_flags[i - 1] != 0))
                                {
                                    condCoding = SilkConstants.CODE_CONDITIONALLY;
                                }
                                else
                                {
                                    condCoding = SilkConstants.CODE_INDEPENDENTLY;
                                }
                                DecodeIndices.silk_decode_indices(channel_state[n], psRangeDec, i, 1, condCoding);
                                DecodePulses.silk_decode_pulses(psRangeDec, pulses, channel_state[n].indices.signalType,
                                                                channel_state[n].indices.quantOffsetType, channel_state[n].frame_length);
                            }
                        }
                    }
                }
            }

            /* Get MS predictor index */
            if (decControl.nChannelsInternal == 2)
            {
                if (lostFlag == DecoderAPIFlag.FLAG_DECODE_NORMAL ||
                    (lostFlag == DecoderAPIFlag.FLAG_DECODE_LBRR && channel_state[0].LBRR_flags[channel_state[0].nFramesDecoded] == 1))
                {
                    Stereo.silk_stereo_decode_pred(psRangeDec, MS_pred_Q13);
                    /* For LBRR data, decode mid-only flag only if side-channel's LBRR flag is false */
                    if ((lostFlag == DecoderAPIFlag.FLAG_DECODE_NORMAL && channel_state[1].VAD_flags[channel_state[0].nFramesDecoded] == 0) ||
                        (lostFlag == DecoderAPIFlag.FLAG_DECODE_LBRR && channel_state[1].LBRR_flags[channel_state[0].nFramesDecoded] == 0))
                    {
                        BoxedValueInt decodeOnlyMiddleBoxed = new BoxedValueInt(decode_only_middle);
                        Stereo.silk_stereo_decode_mid_only(psRangeDec, decodeOnlyMiddleBoxed);
                        decode_only_middle = decodeOnlyMiddleBoxed.Val;
                    }
                    else
                    {
                        decode_only_middle = 0;
                    }
                }
                else
                {
                    for (n = 0; n < 2; n++)
                    {
                        MS_pred_Q13[n] = psDec.sStereo.pred_prev_Q13[n];
                    }
                }
            }

            /* Reset side channel decoder prediction memory for first frame with side coding */
            if (decControl.nChannelsInternal == 2 && decode_only_middle == 0 && psDec.prev_decode_only_middle == 1)
            {
                Arrays.MemSetShort(psDec.channel_state[1].outBuf, 0, SilkConstants.MAX_FRAME_LENGTH + 2 * SilkConstants.MAX_SUB_FRAME_LENGTH);
                Arrays.MemSetInt(psDec.channel_state[1].sLPC_Q14_buf, 0, SilkConstants.MAX_LPC_ORDER);
                psDec.channel_state[1].lagPrev                 = 100;
                psDec.channel_state[1].LastGainIndex           = 10;
                psDec.channel_state[1].prevSignalType          = SilkConstants.TYPE_NO_VOICE_ACTIVITY;
                psDec.channel_state[1].first_frame_after_reset = 1;
            }

            /* Check if the temp buffer fits into the output PCM buffer. If it fits,
             * we can delay allocating the temp buffer until after the SILK peak stack
             * usage. We need to use a < and not a <= because of the two extra samples. */
            delay_stack_alloc = (decControl.internalSampleRate * decControl.nChannelsInternal
                                 < decControl.API_sampleRate * decControl.nChannelsAPI) ? 1 : 0;

            if (delay_stack_alloc != 0)
            {
                samplesOut_tmp         = samplesOut;
                samplesOut_tmp_ptrs[0] = samplesOut_ptr;
                samplesOut_tmp_ptrs[1] = samplesOut_ptr + channel_state[0].frame_length + 2;
            }
            else
            {
                samplesOut1_tmp_storage1 = new short[decControl.nChannelsInternal * (channel_state[0].frame_length + 2)];
                samplesOut_tmp           = samplesOut1_tmp_storage1;
                samplesOut_tmp_ptrs[0]   = 0;
                samplesOut_tmp_ptrs[1]   = channel_state[0].frame_length + 2;
            }

            if (lostFlag == DecoderAPIFlag.FLAG_DECODE_NORMAL)
            {
                has_side = (decode_only_middle == 0) ? 1 : 0;
            }
            else
            {
                has_side = (psDec.prev_decode_only_middle == 0 ||
                            (decControl.nChannelsInternal == 2 &&
                             lostFlag == DecoderAPIFlag.FLAG_DECODE_LBRR &&
                             channel_state[1].LBRR_flags[channel_state[1].nFramesDecoded] == 1)) ? 1 : 0;
            }
            /* Call decoder for one frame */
            for (n = 0; n < decControl.nChannelsInternal; n++)
            {
                if (n == 0 || (has_side != 0))
                {
                    int FrameIndex;
                    int condCoding;

                    FrameIndex = channel_state[0].nFramesDecoded - n;
                    /* Use independent coding if no previous frame available */
                    if (FrameIndex <= 0)
                    {
                        condCoding = SilkConstants.CODE_INDEPENDENTLY;
                    }
                    else if (lostFlag == DecoderAPIFlag.FLAG_DECODE_LBRR)
                    {
                        condCoding = (channel_state[n].LBRR_flags[FrameIndex - 1] != 0) ? SilkConstants.CODE_CONDITIONALLY : SilkConstants.CODE_INDEPENDENTLY;
                    }
                    else if (n > 0 && (psDec.prev_decode_only_middle != 0))
                    {
                        /* If we skipped a side frame in this packet, we don't
                         * need LTP scaling; the LTP state is well-defined. */
                        condCoding = SilkConstants.CODE_INDEPENDENTLY_NO_LTP_SCALING;
                    }
                    else
                    {
                        condCoding = SilkConstants.CODE_CONDITIONALLY;
                    }
                    ret += channel_state[n].silk_decode_frame(psRangeDec, samplesOut_tmp, samplesOut_tmp_ptrs[n] + 2, nSamplesOutDec, lostFlag, condCoding);
                }
                else
                {
                    Arrays.MemSetWithOffset <short>(samplesOut_tmp, 0, samplesOut_tmp_ptrs[n] + 2, nSamplesOutDec.Val);
                }
                channel_state[n].nFramesDecoded++;
            }

            if (decControl.nChannelsAPI == 2 && decControl.nChannelsInternal == 2)
            {
                /* Convert Mid/Side to Left/Right */
                Stereo.silk_stereo_MS_to_LR(psDec.sStereo, samplesOut_tmp, samplesOut_tmp_ptrs[0], samplesOut_tmp, samplesOut_tmp_ptrs[1], MS_pred_Q13, channel_state[0].fs_kHz, nSamplesOutDec.Val);
            }
            else
            {
                /* Buffering */
                Array.Copy(psDec.sStereo.sMid, 0, samplesOut_tmp, samplesOut_tmp_ptrs[0], 2);
                Array.Copy(samplesOut_tmp, samplesOut_tmp_ptrs[0] + nSamplesOutDec.Val, psDec.sStereo.sMid, 0, 2);
            }

            /* Number of output samples */
            nSamplesOut = Inlines.silk_DIV32(nSamplesOutDec.Val * decControl.API_sampleRate, Inlines.silk_SMULBB(channel_state[0].fs_kHz, 1000));

            /* Set up pointers to temp buffers */
            if (decControl.nChannelsAPI == 2)
            {
                samplesOut2_tmp  = new short[nSamplesOut];
                resample_out     = samplesOut2_tmp;
                resample_out_ptr = 0;
            }
            else
            {
                resample_out     = samplesOut;
                resample_out_ptr = samplesOut_ptr;
            }

            if (delay_stack_alloc != 0)
            {
                samplesOut1_tmp_storage2 = new short[decControl.nChannelsInternal * (channel_state[0].frame_length + 2)];
                Array.Copy(samplesOut, samplesOut_ptr, samplesOut1_tmp_storage2, 0, decControl.nChannelsInternal * (channel_state[0].frame_length + 2));
                samplesOut_tmp         = samplesOut1_tmp_storage2;
                samplesOut_tmp_ptrs[0] = 0;
                samplesOut_tmp_ptrs[1] = channel_state[0].frame_length + 2;
            }
            for (n = 0; n < Inlines.silk_min(decControl.nChannelsAPI, decControl.nChannelsInternal); n++)
            {
                /* Resample decoded signal to API_sampleRate */
                ret += Resampler.silk_resampler(channel_state[n].resampler_state, resample_out, resample_out_ptr, samplesOut_tmp, samplesOut_tmp_ptrs[n] + 1, nSamplesOutDec.Val);

                /* Interleave if stereo output and stereo stream */
                if (decControl.nChannelsAPI == 2)
                {
                    int nptr = samplesOut_ptr + n;
                    for (i = 0; i < nSamplesOut; i++)
                    {
                        samplesOut[nptr + 2 * i] = resample_out[resample_out_ptr + i];
                    }
                }
            }

            /* Create two channel output from mono stream */
            if (decControl.nChannelsAPI == 2 && decControl.nChannelsInternal == 1)
            {
                if (stereo_to_mono != 0)
                {
                    /* Resample right channel for newly collapsed stereo just in case
                     * we weren't doing collapsing when switching to mono */
                    ret += Resampler.silk_resampler(channel_state[1].resampler_state, resample_out, resample_out_ptr, samplesOut_tmp, samplesOut_tmp_ptrs[0] + 1, nSamplesOutDec.Val);

                    for (i = 0; i < nSamplesOut; i++)
                    {
                        samplesOut[samplesOut_ptr + 1 + 2 * i] = resample_out[resample_out_ptr + i];
                    }
                }
                else
                {
                    for (i = 0; i < nSamplesOut; i++)
                    {
                        samplesOut[samplesOut_ptr + 1 + 2 * i] = samplesOut[samplesOut_ptr + 2 * i];
                    }
                }
            }

            /* Export pitch lag, measured at 48 kHz sampling rate */
            if (channel_state[0].prevSignalType == SilkConstants.TYPE_VOICED)
            {
                int[] mult_tab = { 6, 4, 3 };
                decControl.prevPitchLag = channel_state[0].lagPrev * mult_tab[(channel_state[0].fs_kHz - 8) >> 2];
            }
            else
            {
                decControl.prevPitchLag = 0;
            }

            if (lostFlag == DecoderAPIFlag.FLAG_PACKET_LOST)
            {
                /* On packet loss, remove the gain clamping to prevent having the energy "bounce back"
                 * if we lose packets when the energy is going down */
                for (i = 0; i < psDec.nChannelsInternal; i++)
                {
                    psDec.channel_state[i].LastGainIndex = 10;
                }
            }
            else
            {
                psDec.prev_decode_only_middle = decode_only_middle;
            }

            return(ret);
        }