internal static VorbisCodebook Init(VorbisStreamDecoder vorbis, DataPacket packet, int number) { var temp = new VorbisCodebook(); temp.BookNum = number; temp.Init(packet); return temp; }
internal int DecodeScalar(DataPacket packet) { int bitCnt; var bits = (int)packet.TryPeekBits(PrefixBitLength, out bitCnt); if (bitCnt == 0) return -1; // try to get the value from the prefix list... var node = PrefixList[bits]; if (node != null) { packet.SkipBits(node.Length); return node.Value; } // nope, not possible... run the tree bits = (int)packet.TryPeekBits(MaxBits, out bitCnt); node = PrefixOverflowTree; do { if (node.Bits == (bits & node.Mask)) { packet.SkipBits(node.Length); return node.Value; } } while ((node = node.Next) != null); return -1; }
internal static VorbisMode Init(VorbisStreamDecoder vorbis, DataPacket packet) { var mode = new VorbisMode(vorbis); mode.BlockFlag = packet.ReadBit(); mode.WindowType = (int)packet.ReadBits(16); mode.TransformType = (int)packet.ReadBits(16); var mapping = (int)packet.ReadBits(8); if (mode.WindowType != 0 || mode.TransformType != 0 || mapping >= vorbis.Maps.Length) throw new InvalidDataException(); mode.Mapping = vorbis.Maps[mapping]; mode.BlockSize = mode.BlockFlag ? vorbis.Block1Size : vorbis.Block0Size; // now pre-calc the window(s)... if (mode.BlockFlag) { // long block mode._windows = new float[4][]; mode._windows[0] = new float[vorbis.Block1Size]; mode._windows[1] = new float[vorbis.Block1Size]; mode._windows[2] = new float[vorbis.Block1Size]; mode._windows[3] = new float[vorbis.Block1Size]; } else { // short block mode._windows = new float[1][]; mode._windows[0] = new float[vorbis.Block0Size]; } mode.CalcWindows(); return mode; }
internal static VorbisTime Init(VorbisStreamDecoder vorbis, DataPacket packet) { var type = (int)packet.ReadBits(16); VorbisTime time = null; switch (type) { case 0: time = new Time0(vorbis); break; } if (time == null) throw new InvalidDataException(); time.Init(packet); return time; }
internal static VorbisMapping Init(VorbisStreamDecoder vorbis, DataPacket packet) { var type = (int)packet.ReadBits(16); VorbisMapping mapping = null; switch (type) { case 0: mapping = new Mapping0(vorbis); break; } if (mapping == null) throw new InvalidDataException(); mapping.Init(packet); return mapping; }
internal static VorbisFloor Init(VorbisStreamDecoder vorbis, DataPacket packet) { var type = (int)packet.ReadBits(16); VorbisFloor floor = null; switch (type) { case 0: floor = new Floor0(vorbis); break; case 1: floor = new Floor1(vorbis); break; } if (floor == null) throw new InvalidDataException(); floor.Init(packet); return floor; }
internal static VorbisResidue Init(VorbisStreamDecoder vorbis, DataPacket packet) { var type = (int)packet.ReadBits(16); VorbisResidue residue = null; switch (type) { case 0: residue = new Residue0(vorbis); break; case 1: residue = new Residue1(vorbis); break; case 2: residue = new Residue2(vorbis); break; } if (residue == null) throw new InvalidDataException(); residue.Init(packet); return residue; }
internal abstract PacketData UnpackPacket(DataPacket packet, int blockSize, int channel);
void InitLookupTable(DataPacket packet) { MapType = (int)packet.ReadBits(4); if (MapType == 0) return; var minValue = Utils.ConvertFromVorbisFloat32(packet.ReadUInt32()); var deltaValue = Utils.ConvertFromVorbisFloat32(packet.ReadUInt32()); var valueBits = (int)packet.ReadBits(4) + 1; var sequence_p = packet.ReadBit(); var lookupValueCount = Entries * Dimensions; var lookupTable = new float[lookupValueCount]; if (MapType == 1) { lookupValueCount = lookup1_values(); } var multiplicands = new uint[lookupValueCount]; for (var i = 0; i < lookupValueCount; i++) { multiplicands[i] = (uint)packet.ReadBits(valueBits); } // now that we have the initial data read in, calculate the entry tree if (MapType == 1) { for (var idx = 0; idx < Entries; idx++) { var last = 0.0; var idxDiv = 1; for (var i = 0; i < Dimensions; i++) { var moff = (idx / idxDiv) % lookupValueCount; var value = (float)multiplicands[moff] * deltaValue + minValue + last; lookupTable[idx * Dimensions + i] = (float)value; if (sequence_p) last = value; idxDiv *= lookupValueCount; } } } else { for (var idx = 0; idx < Entries; idx++) { var last = 0.0; var moff = idx * Dimensions; for (var i = 0; i < Dimensions; i++) { var value = multiplicands[moff] * deltaValue + minValue + last; lookupTable[idx * Dimensions + i] = (float)value; if (sequence_p) last = value; ++moff; } } } LookupTable = lookupTable; }
internal override PacketData UnpackPacket(DataPacket packet, int blockSize, int channel) { var data = _reusablePacketData[channel]; data.BlockSize = blockSize; data.ForceEnergy = false; data.ForceNoEnergy = false; data.PostCount = 0; Array.Clear(data.Posts, 0, 64); // hoist ReadPosts to here since that's all we're doing... if (packet.ReadBit()) { var postCount = 2; data.Posts[0] = (int)packet.ReadBits(_yBits); data.Posts[1] = (int)packet.ReadBits(_yBits); for (int i = 0; i < _partitionClass.Length; i++) { var clsNum = _partitionClass[i]; var cdim = _classDimensions[clsNum]; var cbits = _classSubclasses[clsNum]; var csub = (1 << cbits) - 1; var cval = 0U; if (cbits > 0) { if ((cval = (uint)_classMasterbooks[clsNum].DecodeScalar(packet)) == uint.MaxValue) { // we read a bad value... bail postCount = 0; break; } } for (int j = 0; j < cdim; j++) { var book = _subclassBooks[clsNum][cval & csub]; cval >>= cbits; if (book != null) { if ((data.Posts[postCount] = book.DecodeScalar(packet)) == -1) { // we read a bad value... bail postCount = 0; i = _partitionClass.Length; break; } } ++postCount; } } data.PostCount = postCount; } return data; }
protected override void Init(DataPacket packet) { _partitionClass = new int[(int)packet.ReadBits(5)]; for (int i = 0; i < _partitionClass.Length; i++) { _partitionClass[i] = (int)packet.ReadBits(4); } var maximum_class = _partitionClass.Max(); _classDimensions = new int[maximum_class + 1]; _classSubclasses = new int[maximum_class + 1]; _classMasterbooks = new VorbisCodebook[maximum_class + 1]; _classMasterBookIndex = new int[maximum_class + 1]; _subclassBooks = new VorbisCodebook[maximum_class + 1][]; _subclassBookIndex = new int[maximum_class + 1][]; for (int i = 0; i <= maximum_class; i++) { _classDimensions[i] = (int)packet.ReadBits(3) + 1; _classSubclasses[i] = (int)packet.ReadBits(2); if (_classSubclasses[i] > 0) { _classMasterBookIndex[i] = (int)packet.ReadBits(8); _classMasterbooks[i] = _vorbis.Books[_classMasterBookIndex[i]]; } _subclassBooks[i] = new VorbisCodebook[1 << _classSubclasses[i]]; _subclassBookIndex[i] = new int[_subclassBooks[i].Length]; for (int j = 0; j < _subclassBooks[i].Length; j++) { var bookNum = (int)packet.ReadBits(8) - 1; if (bookNum >= 0) _subclassBooks[i][j] = _vorbis.Books[bookNum]; _subclassBookIndex[i][j] = bookNum; } } _multiplier = (int)packet.ReadBits(2); _range = _rangeLookup[_multiplier]; _yBits = _yBitsLookup[_multiplier]; ++_multiplier; var rangeBits = (int)packet.ReadBits(4); var xList = new List<int>(); xList.Add(0); xList.Add(1 << rangeBits); for (int i = 0; i < _partitionClass.Length; i++) { var classNum = _partitionClass[i]; for (int j = 0; j < _classDimensions[classNum]; j++) { xList.Add((int)packet.ReadBits(rangeBits)); } } _xList = xList.ToArray(); // precalc the low and high neighbors (and init the sort table) _lNeigh = new int[xList.Count]; _hNeigh = new int[xList.Count]; _sortIdx = new int[xList.Count]; _sortIdx[0] = 0; _sortIdx[1] = 1; for (int i = 2; i < _lNeigh.Length; i++) { _lNeigh[i] = 0; _hNeigh[i] = 1; _sortIdx[i] = i; for (int j = 2; j < i; j++) { var temp = _xList[j]; if (temp < _xList[i]) { if (temp > _xList[_lNeigh[i]]) _lNeigh[i] = j; } else { if (temp < _xList[_hNeigh[i]]) _hNeigh[i] = j; } } } // precalc the sort table for (int i = 0; i < _sortIdx.Length - 1; i++) { for (int j = i + 1; j < _sortIdx.Length; j++) { if (_xList[i] == _xList[j]) throw new InvalidDataException(); if (_xList[_sortIdx[i]] > _xList[_sortIdx[j]]) { // swap the sort indexes var temp = _sortIdx[i]; _sortIdx[i] = _sortIdx[j]; _sortIdx[j] = temp; } } } // pre-create our packet data instances _reusablePacketData = new PacketData1[_vorbis._channels]; for (int i = 0; i < _reusablePacketData.Length; i++) { _reusablePacketData[i] = new PacketData1(); } }
protected override void Init(DataPacket packet) { // this is pretty well stolen directly from libvorbis... BSD license _begin = (int)packet.ReadBits(24); _end = (int)packet.ReadBits(24); _partitionSize = (int)packet.ReadBits(24) + 1; _classifications = (int)packet.ReadBits(6) + 1; _classBook = _vorbis.Books[(int)packet.ReadBits(8)]; _cascade = new int[_classifications]; var acc = 0; for (int i = 0; i < _classifications; i++) { var low_bits = (int)packet.ReadBits(3); if (packet.ReadBit()) { _cascade[i] = (int)packet.ReadBits(5) << 3 | low_bits; } else { _cascade[i] = low_bits; } acc += icount(_cascade[i]); } var bookNums = new int[acc]; for (var i = 0; i < acc; i++) { bookNums[i] = (int)packet.ReadBits(8); if (_vorbis.Books[bookNums[i]].MapType == 0) throw new InvalidDataException(); } var entries = _classBook.Entries; var dim = _classBook.Dimensions; var partvals = 1; while (dim > 0) { partvals *= _classifications; if (partvals > entries) throw new InvalidDataException(); --dim; } // now the lookups dim = _classBook.Dimensions; _books = new VorbisCodebook[_classifications][]; acc = 0; var maxstage = 0; int stages; for (int j = 0; j < _classifications; j++) { stages = Utils.ilog(_cascade[j]); _books[j] = new VorbisCodebook[stages]; if (stages > 0) { maxstage = Math.Max(maxstage, stages); for (int k = 0; k < stages; k++) { if ((_cascade[j] & (1 << k)) > 0) { _books[j][k] = _vorbis.Books[bookNums[acc++]]; } } } } _maxStages = maxstage; _decodeMap = new int[partvals][]; for (int j = 0; j < partvals; j++) { var val = j; var mult = partvals / _classifications; _decodeMap[j] = new int[_classBook.Dimensions]; for (int k = 0; k < _classBook.Dimensions; k++) { var deco = val / mult; val -= deco * mult; mult /= _classifications; _decodeMap[j][k] = deco; } } _entryCache = new int[_partitionSize]; _partWordCache = new int[_vorbis._channels][][]; var maxPartWords = ((_end - _begin) / _partitionSize + _classBook.Dimensions - 1) / _classBook.Dimensions; for (int ch = 0; ch < _vorbis._channels; ch++) { _partWordCache[ch] = new int[maxPartWords][]; } }
void UpdatePosition(int samplesDecoded, DataPacket packet) { _samples += samplesDecoded; if (packet.IsResync) { // during a resync, we have to go through and watch for the next "marker" _currentPosition = -packet.PageGranulePosition; // _currentPosition will now be end of the page... wait for the value to change, then go back and repopulate the granule positions accordingly... _resyncQueue.Push(packet); } else { if (samplesDecoded > 0) { _currentPosition += samplesDecoded; packet.GranulePosition = _currentPosition; if (_currentPosition < 0) { if (packet.PageGranulePosition > -_currentPosition) { // we now have a valid granuleposition... populate the queued packets' GranulePositions var gp = _currentPosition - samplesDecoded; while (_resyncQueue.Count > 0) { var pkt = _resyncQueue.Pop(); var temp = pkt.GranulePosition + gp; pkt.GranulePosition = gp; gp = temp; } } else { packet.GranulePosition = -samplesDecoded; _resyncQueue.Push(packet); } } else if (packet.IsEndOfStream && _currentPosition > packet.PageGranulePosition) { var diff = (int)(_currentPosition - packet.PageGranulePosition); if (diff >= 0) { _preparedLength -= diff; _currentPosition -= diff; } else { // uh-oh. We're supposed to have more samples to this point... _preparedLength = 0; } packet.GranulePosition = packet.PageGranulePosition; _eosFound = true; } } } }
protected override void Init(DataPacket packet) { }
void SetParametersChanging(object sender, ParameterChangeEventArgs e) { _parameterChangePacket = e.FirstPacket; }
protected override bool WriteVectors(VorbisCodebook codebook, DataPacket packet, float[][] residue, int channel, int offset, int partitionSize) { var chPtr = 0; offset /= _channels; for (int c = 0; c < partitionSize; ) { var entry = codebook.DecodeScalar(packet); if (entry == -1) { return true; } for (var d = 0; d < codebook.Dimensions; d++, c++) { residue[chPtr][offset] += codebook[entry, d]; if (++chPtr == _channels) { chPtr = 0; offset++; } } } return false; }
protected override void Init(DataPacket packet) { var submapCount = 1; if (packet.ReadBit()) submapCount += (int)packet.ReadBits(4); // square polar mapping var couplingSteps = 0; if (packet.ReadBit()) { couplingSteps = (int)packet.ReadBits(8) + 1; } var couplingBits = Utils.ilog(_vorbis._channels - 1); CouplingSteps = new CouplingStep[couplingSteps]; for (int j = 0; j < couplingSteps; j++) { var magnitude = (int)packet.ReadBits(couplingBits); var angle = (int)packet.ReadBits(couplingBits); if (magnitude == angle || magnitude > _vorbis._channels - 1 || angle > _vorbis._channels - 1) throw new InvalidDataException(); CouplingSteps[j] = new CouplingStep { Angle = angle, Magnitude = magnitude }; } // reserved bits if (packet.ReadBits(2) != 0UL) throw new InvalidDataException(); // channel multiplex var mux = new int[_vorbis._channels]; if (submapCount > 1) { for (int c = 0; c < ChannelSubmap.Length; c++) { mux[c] = (int)packet.ReadBits(4); if (mux[c] >= submapCount) throw new InvalidDataException(); } } // submaps Submaps = new Submap[submapCount]; for (int j = 0; j < submapCount; j++) { packet.ReadBits(8); // unused placeholder var floorNum = (int)packet.ReadBits(8); if (floorNum >= _vorbis.Floors.Length) throw new InvalidDataException(); var residueNum = (int)packet.ReadBits(8); if (residueNum >= _vorbis.Residues.Length) throw new InvalidDataException(); Submaps[j] = new Submap { Floor = _vorbis.Floors[floorNum], Residue = _vorbis.Residues[floorNum] }; } ChannelSubmap = new Submap[_vorbis._channels]; for (int c = 0; c < ChannelSubmap.Length; c++) { ChannelSubmap[c] = Submaps[mux[c]]; } }
// We can use the type 0 logic by saying we're doing a single channel buffer big enough to hold the samples for all channels // This works because WriteVectors(...) "knows" the correct channel count and processes the data accordingly. internal override float[][] Decode(DataPacket packet, bool[] doNotDecode, int channels, int blockSize) { _channels = channels; return base.Decode(packet, doNotDecode, 1, blockSize * channels); }
protected override bool WriteVectors(VorbisCodebook codebook, DataPacket packet, float[][] residue, int channel, int offset, int partitionSize) { var res = residue[channel]; for (int i = 0; i < partitionSize; ) { var entry = codebook.DecodeScalar(packet); if (entry == -1) { return true; } for (int j = 0; j < codebook.Dimensions; i++, j++) { res[offset + i] += codebook[entry, j]; } } return false; }
protected virtual bool WriteVectors(VorbisCodebook codebook, DataPacket packet, float[][] residue, int channel, int offset, int partitionSize) { var res = residue[channel]; var step = partitionSize / codebook.Dimensions; for (int i = 0; i < step; i++) { if ((_entryCache[i] = codebook.DecodeScalar(packet)) == -1) { return true; } } for (int i = 0; i < codebook.Dimensions; i++) { for (int j = 0; j < step; j++, offset++) { res[offset] += codebook[_entryCache[j], i]; } } return false; }
void InitTree(DataPacket packet) { bool sparse; int total = 0; if (packet.ReadBit()) { // ordered var len = (int)packet.ReadBits(5) + 1; for (var i = 0; i < Entries; ) { var cnt = (int)packet.ReadBits(Utils.ilog(Entries - i)); while (--cnt >= 0) { Lengths[i++] = len; } ++len; } total = 0; sparse = false; } else { // unordered sparse = packet.ReadBit(); for (var i = 0; i < Entries; i++) { if (!sparse || packet.ReadBit()) { Lengths[i] = (int)packet.ReadBits(5) + 1; ++total; } else { Lengths[i] = -1; } } } MaxBits = Lengths.Max(); int sortedCount = 0; int[] codewordLengths = null; if (sparse && total >= Entries >> 2) { codewordLengths = new int[Entries]; Array.Copy(Lengths, codewordLengths, Entries); sparse = false; } // compute size of sorted tables if (sparse) { sortedCount = total; } else { sortedCount = 0; } int sortedEntries = sortedCount; int[] values = null; int[] codewords = null; if (!sparse) { codewords = new int[Entries]; } else if (sortedEntries != 0) { codewordLengths = new int[sortedEntries]; codewords = new int[sortedEntries]; values = new int[sortedEntries]; } if (!ComputeCodewords(sparse, sortedEntries, codewords, codewordLengths, len: Lengths, n: Entries, values: values)) throw new InvalidDataException(); PrefixList = Huffman.BuildPrefixedLinkedList(values ?? Enumerable.Range(0, codewords.Length).ToArray(), codewordLengths ?? Lengths, codewords, out PrefixBitLength, out PrefixOverflowTree); }
bool ProcessStreamHeader(DataPacket packet) { if (!packet.ReadBytes(7).SequenceEqual(new byte[] { 0x01, 0x76, 0x6f, 0x72, 0x62, 0x69, 0x73 })) { // don't mark the packet as done... it might be used elsewhere _glueBits += packet.Length * 8; return false; } if (!_pagesSeen.Contains((_lastPageSeen = packet.PageSequenceNumber))) _pagesSeen.Add(_lastPageSeen); _glueBits += 56; var startPos = packet.BitsRead; if (packet.ReadInt32() != 0) throw new InvalidDataException("Only Vorbis stream version 0 is supported."); _channels = packet.ReadByte(); _sampleRate = packet.ReadInt32(); _upperBitrate = packet.ReadInt32(); _nominalBitrate = packet.ReadInt32(); _lowerBitrate = packet.ReadInt32(); Block0Size = 1 << (int)packet.ReadBits(4); Block1Size = 1 << (int)packet.ReadBits(4); if (_nominalBitrate == 0) { if (_upperBitrate > 0 && _lowerBitrate > 0) { _nominalBitrate = (_upperBitrate + _lowerBitrate) / 2; } } _metaBits += packet.BitsRead - startPos + 8; _wasteHdrBits += 8 * packet.Length - packet.BitsRead; return true; }
bool UnpackPacket(DataPacket packet) { // make sure we're on an audio packet if (packet.ReadBit()) { // we really can't do anything... count the bits as waste return false; } // get mode and prev/next flags var modeBits = _modeFieldBits; _mode = Modes[(int)packet.ReadBits(_modeFieldBits)]; if (_mode.BlockFlag) { _prevFlag = packet.ReadBit(); _nextFlag = packet.ReadBit(); modeBits += 2; } else { _prevFlag = _nextFlag = false; } if (packet.IsShort) return false; var startBits = packet.BitsRead; var halfBlockSize = _mode.BlockSize / 2; // read the noise floor data (but don't decode yet) for (int i = 0; i < _channels; i++) { _floorData[i] = _mode.Mapping.ChannelSubmap[i].Floor.UnpackPacket(packet, _mode.BlockSize, i); _noExecuteChannel[i] = !_floorData[i].ExecuteChannel; // go ahead and clear the residue buffers Array.Clear(_residue[i], 0, halfBlockSize); } // make sure we handle no-energy channels correctly given the couplings... foreach (var step in _mode.Mapping.CouplingSteps) { if (_floorData[step.Angle].ExecuteChannel || _floorData[step.Magnitude].ExecuteChannel) { _floorData[step.Angle].ForceEnergy = true; _floorData[step.Magnitude].ForceEnergy = true; } } var floorBits = packet.BitsRead - startBits; startBits = packet.BitsRead; foreach (var subMap in _mode.Mapping.Submaps) { for (int j = 0; j < _channels; j++) { if (_mode.Mapping.ChannelSubmap[j] != subMap) { _floorData[j].ForceNoEnergy = true; } } var rTemp = subMap.Residue.Decode(packet, _noExecuteChannel, _channels, _mode.BlockSize); for (int c = 0; c < _channels; c++) { var r = _residue[c]; var rt = rTemp[c]; for (int i = 0; i < halfBlockSize; i++) { r[i] += rt[i]; } } } _glueBits += 1; _modeBits += modeBits; _floorBits += floorBits; _resBits += packet.BitsRead - startBits; _wasteBits += 8 * packet.Length - packet.BitsRead; _packetCount += 1; return true; }
protected override void Init(DataPacket packet) { // this is pretty well stolen directly from libvorbis... BSD license _order = (int)packet.ReadBits(8); _rate = (int)packet.ReadBits(16); _bark_map_size = (int)packet.ReadBits(16); _ampBits = (int)packet.ReadBits(6); _ampOfs = (int)packet.ReadBits(8); _books = new VorbisCodebook[(int)packet.ReadBits(4) + 1]; if (_order < 1 || _rate < 1 || _bark_map_size < 1 || _books.Length == 0) throw new InvalidDataException(); _ampDiv = (1 << _ampBits) - 1; for (int i = 0; i < _books.Length; i++) { var num = (int)packet.ReadBits(8); if (num < 0 || num >= _vorbis.Books.Length) throw new InvalidDataException(); var book = _vorbis.Books[num]; if (book.MapType == 0 || book.Dimensions < 1) throw new InvalidDataException(); _books[i] = book; } _bookBits = Utils.ilog(_books.Length); _barkMaps = new Dictionary<int, int[]>(); _barkMaps[_vorbis.Block0Size] = SynthesizeBarkCurve(_vorbis.Block0Size / 2); _barkMaps[_vorbis.Block1Size] = SynthesizeBarkCurve(_vorbis.Block1Size / 2); _wMap = new Dictionary<int, float[]>(); _wMap[_vorbis.Block0Size] = SynthesizeWDelMap(_vorbis.Block0Size / 2); _wMap[_vorbis.Block1Size] = SynthesizeWDelMap(_vorbis.Block1Size / 2); _reusablePacketData = new PacketData0[_vorbis._channels]; for (int i = 0; i < _reusablePacketData.Length; i++) { _reusablePacketData[i] = new PacketData0() { Coeff = new float[_order + 1] }; } }
internal void Init(DataPacket packet) { // first, check the sync pattern var chkVal = packet.ReadBits(24); if (chkVal != 0x564342UL) throw new InvalidDataException(); // get the counts Dimensions = (int)packet.ReadBits(16); Entries = (int)packet.ReadBits(24); // init the storage Lengths = new int[Entries]; InitTree(packet); InitLookupTable(packet); }
internal override float[][] Decode(DataPacket packet, bool[] doNotDecode, int channels, int blockSize) { var residue = GetResidueBuffer(doNotDecode.Length); // this is pretty well stolen directly from libvorbis... BSD license var end = _end < blockSize / 2 ? _end : blockSize / 2; var n = end - _begin; if (n > 0 && doNotDecode.Contains(false)) { var partVals = n / _partitionSize; var partWords = (partVals + _classBook.Dimensions - 1) / _classBook.Dimensions; for (int j = 0; j < channels; j++) { Array.Clear(_partWordCache[j], 0, partWords); } for (int s = 0; s < _maxStages; s++) { for (int i = 0, l = 0; i < partVals; l++) { if (s == 0) { for (int j = 0; j < channels; j++) { var idx = _classBook.DecodeScalar(packet); if (idx >= 0 && idx < _decodeMap.Length) { _partWordCache[j][l] = _decodeMap[idx]; } else { i = partVals; s = _maxStages; break; } } } for (int k = 0; i < partVals && k < _classBook.Dimensions; k++, i++) { var offset = _begin + i * _partitionSize; for (int j = 0; j < channels; j++) { var idx = _partWordCache[j][l][k]; if ((_cascade[idx] & (1 << s)) != 0) { var book = _books[idx][s]; if (book != null) { if (WriteVectors(book, packet, residue, j, offset, _partitionSize)) { // bad packet... exit now and try to use what we already have i = partVals; s = _maxStages; break; } } } } } } } } return residue; }
protected abstract void Init(DataPacket packet);
internal abstract float[][] Decode(DataPacket packet, bool[] doNotDecode, int channels, int blockSize);
internal override PacketData UnpackPacket(DataPacket packet, int blockSize, int channel) { var data = _reusablePacketData[channel]; data.BlockSize = blockSize; data.ForceEnergy = false; data.ForceNoEnergy = false; data.Amp = packet.ReadBits(_ampBits); if (data.Amp > 0f) { // this is pretty well stolen directly from libvorbis... BSD license Array.Clear(data.Coeff, 0, data.Coeff.Length); data.Amp = (float)(data.Amp / _ampDiv * _ampOfs); var bookNum = (uint)packet.ReadBits(_bookBits); if (bookNum >= _books.Length) { // we ran out of data or the packet is corrupt... 0 the floor and return data.Amp = 0; return data; } var book = _books[bookNum]; // first, the book decode... for (int i = 0; i < _order; ) { var entry = book.DecodeScalar(packet); if (entry == -1) { // we ran out of data or the packet is corrupt... 0 the floor and return data.Amp = 0; return data; } for (int j = 0; i < _order && j < book.Dimensions; j++, i++) { data.Coeff[i] = book[entry, j]; } } // then, the "averaging" var last = 0f; for (int j = 0; j < _order; ) { for (int k = 0; j < _order && k < book.Dimensions; j++, k++) { data.Coeff[j] += last; } last = data.Coeff[j - 1]; } } return data; }
void ProcessParameterChange(DataPacket packet) { _parameterChangePacket = null; // try to do a stream header... var wasPeek = false; var doFullReset = false; if (ProcessStreamHeader(packet)) { packet.Done(); wasPeek = true; doFullReset = true; packet = _packetProvider.PeekNextPacket(); if (packet == null) throw new InvalidDataException("Couldn't get next packet!"); } // try to do a comment header... if (LoadComments(packet)) { if (wasPeek) { _packetProvider.GetNextPacket().Done(); } else { packet.Done(); } wasPeek = true; packet = _packetProvider.PeekNextPacket(); if (packet == null) throw new InvalidDataException("Couldn't get next packet!"); } // try to do a book header... if (LoadBooks(packet)) { if (wasPeek) { _packetProvider.GetNextPacket().Done(); } else { packet.Done(); } } ResetDecoder(doFullReset); }