public virtual void powerCompensation(ChannelUnitContext ctx, int chIndex, float[] sp, int rngIndex, int sb) { float[] pwcsp = new float[ATRAC3P_SUBBAND_SAMPLES]; int gcv = 0; int swapCh = (ctx.unitType == CH_UNIT_STEREO && ctx.swapChannels[sb] ? 1 : 0); if (ctx.channels[chIndex ^ swapCh].powerLevs[subband_to_powgrp[sb]] == ATRAC3P_POWER_COMP_OFF) { return; } // generate initial noise spectrum for (int i = 0; i < ATRAC3P_SUBBAND_SAMPLES; i++, rngIndex++) { pwcsp[i] = noise_tab[rngIndex & 0x3FF]; } // check gain control information AtracGainInfo g1 = ctx.channels[chIndex ^ swapCh].gainData[sb]; AtracGainInfo g2 = ctx.channels[chIndex ^ swapCh].gainDataPrev[sb]; int gainLev = (g1.numPoints > 0 ? (6 - g1.levCode[0]) : 0); for (int i = 0; i < g2.numPoints; i++) { gcv = max(gcv, gainLev - (g2.levCode[i] - 6)); } for (int i = 0; i < g1.numPoints; i++) { gcv = max(gcv, 6 - g1.levCode[i]); } float grpLev = pwc_levs[ctx.channels[chIndex ^ swapCh].powerLevs[subband_to_powgrp[sb]]] / (1 << gcv); // skip the lowest two quant units (frequencies 0...351 Hz) for subband 0 for (int qu = subband_to_qu[sb] + (sb == 0 ? 2 : 0); qu < subband_to_qu[sb + 1]; qu++) { if (ctx.channels[chIndex].quWordlen[qu] <= 0) { continue; } float quLev = ff_atrac3p_sf_tab[ctx.channels[chIndex].quSfIdx[qu]] * ff_atrac3p_mant_tab[ctx.channels[chIndex].quWordlen[qu]] / (1 << ctx.channels[chIndex].quWordlen[qu]) * grpLev; int dst = ff_atrac3p_qu_to_spec_pos[qu]; int nsp = ff_atrac3p_qu_to_spec_pos[qu + 1] - ff_atrac3p_qu_to_spec_pos[qu]; for (int i = 0; i < nsp; i++) { sp[dst + i] += pwcsp[i] * quLev; } } }
public virtual void generateTones(ChannelUnitContext ctx, int chNum, int sb, float[] @out, int outOffset) { float[] wavreg1 = new float[128]; float[] wavreg2 = new float[128]; WavesData tonesNow = ctx.channels[chNum].tonesInfoPrev[sb]; WavesData tonesNext = ctx.channels[chNum].tonesInfo[sb]; // reconstruct full envelopes for both overlapping regions // from truncated bitstream data if (tonesNext.pendEnv.hasStartPoint && tonesNext.pendEnv.startPos < tonesNext.pendEnv.stopPos) { tonesNext.currEnv.hasStartPoint = true; tonesNext.currEnv.startPos = tonesNext.pendEnv.startPos + 32; } else if (tonesNow.pendEnv.hasStartPoint) { tonesNext.currEnv.hasStartPoint = true; tonesNext.currEnv.startPos = tonesNow.pendEnv.startPos; } else { tonesNext.currEnv.hasStartPoint = false; tonesNext.currEnv.startPos = 0; } if (tonesNow.pendEnv.hasStopPoint && tonesNow.pendEnv.stopPos >= tonesNext.currEnv.startPos) { tonesNext.currEnv.hasStopPoint = true; tonesNext.currEnv.stopPos = tonesNow.pendEnv.stopPos; } else if (tonesNext.pendEnv.hasStopPoint) { tonesNext.currEnv.hasStopPoint = true; tonesNext.currEnv.stopPos = tonesNext.pendEnv.stopPos + 32; } else { tonesNext.currEnv.hasStopPoint = false; tonesNext.currEnv.stopPos = 64; } // is the visible part of the envelope non-zero? bool reg1EnvNonzero = (tonesNow.currEnv.stopPos < 32 ? false : true); bool reg2EnvNonzero = (tonesNext.currEnv.startPos >= 32 ? false : true); // synthesize waves for both overlapping regions if (tonesNow.numWavs > 0 && reg1EnvNonzero) { wavesSynth(ctx.wavesInfoPrev, tonesNow, tonesNow.currEnv, ctx.wavesInfoPrev.phaseShift[sb] && (chNum > 0), 128, wavreg1); } if (tonesNext.numWavs > 0 && reg2EnvNonzero) { wavesSynth(ctx.wavesInfo, tonesNext, tonesNext.currEnv, ctx.wavesInfo.phaseShift[sb] && (chNum > 0), 0, wavreg2); } // Hann windowing for non-faded wave signals if (tonesNow.numWavs > 0 && tonesNext.numWavs > 0 && reg1EnvNonzero && reg2EnvNonzero) { vectorFmul(wavreg1, 0, wavreg1, 0, hann_window, 128, 128); vectorFmul(wavreg2, 0, wavreg2, 0, hann_window, 0, 128); } else { if (tonesNow.numWavs > 0 && !tonesNow.currEnv.hasStopPoint) { vectorFmul(wavreg1, 0, wavreg1, 0, hann_window, 128, 128); } if (tonesNext.numWavs > 0 && !tonesNext.currEnv.hasStartPoint) { vectorFmul(wavreg2, 0, wavreg2, 0, hann_window, 0, 128); } } // Overlap and add to residual for (int i = 0; i < 128; i++) { @out[outOffset + i] += wavreg1[i] + wavreg2[i]; } }