private static void PackModes(EncodeBuffer buffer, CodecSetup codecSetup, int i) { buffer.Write((uint)codecSetup.ModeParams[i].BlockFlag, 1); buffer.Write((uint)codecSetup.ModeParams[i].WindowType, 16); buffer.Write((uint)codecSetup.ModeParams[i].TransformType, 16); buffer.Write((uint)codecSetup.ModeParams[i].Mapping, 8); }
private static void PackFloor(EncodeBuffer buffer, Floor floor) { var count = 0; var maxposit = floor.PostList[1]; var maxclass = -1; // save out partitions buffer.Write((uint)floor.PartitionClass.Length, 5); // only 0 to 31 legal foreach (var partitionClass in floor.PartitionClass) { buffer.Write((uint)partitionClass, 4); // only 0 to 15 legal if (maxclass < partitionClass) { maxclass = partitionClass; } } // save out partition classes for (var j = 0; j < maxclass + 1; j++) { buffer.Write((uint)(floor.ClassDimensions[j] - 1), 3); // 1 to 8 buffer.Write((uint)floor.ClassSubs[j], 2); // 0 to 3 if (floor.ClassSubs[j] != 0) { buffer.Write((uint)floor.ClassBook[j], 8); } for (var k = 0; k < 1 << floor.ClassSubs[j]; k++) { buffer.Write((uint)(floor.ClassSubBook[j][k] + 1), 8); } } // save out the post list, only 1,2,3,4 legal now buffer.Write((uint)(floor.Mult - 1), 2); // maxposit cannot legally be less than 1; this is encode-side, we can assume our setup is OK buffer.Write((uint)Encoding.Log(maxposit - 1), 4); var rangebits = Encoding.Log(maxposit - 1); for (int j = 0, k = 0; j < floor.PartitionClass.Length; j++) { count += floor.ClassDimensions[floor.PartitionClass[j]]; for (; k < count; k++) { buffer.Write((uint)floor.PostList[k + 2], rangebits); } } }
private static void PackInfo(EncodeBuffer buffer, VorbisInfo info) { var codecSetup = info.CodecSetup; // preamble buffer.Write(0x01, 8); buffer.WriteString(VorbisString); // basic information about the stream buffer.Write(0x00, 32); buffer.Write((uint)info.Channels, 8); buffer.Write((uint)info.SampleRate, 32); buffer.Write(0, 32); // Bit rate upper not used buffer.Write((uint)info.BitRateNominal, 32); buffer.Write(0, 32); // Bit rate lower not used buffer.Write((uint)Encoding.Log(codecSetup.BlockSizes[0] - 1), 4); buffer.Write((uint)Encoding.Log(codecSetup.BlockSizes[1] - 1), 4); buffer.Write(1, 1); }
private static void PackComment(EncodeBuffer buffer, Comments vorbisComment) { // Preamble buffer.Write(0x03, 8); buffer.WriteString(VorbisString); // Vendor buffer.Write((uint)VendorString.Length, 32); buffer.WriteString(VendorString); // Comments buffer.Write((uint)vorbisComment.UserComments.Count, 32); foreach (var comment in vorbisComment.UserComments) { if (!string.IsNullOrEmpty(comment)) { buffer.Write((uint)comment.Length, 32); buffer.WriteString(comment); } else { buffer.Write(0, 32); } } buffer.Write(1, 1); }
private static void PackResidue(EncodeBuffer buffer, Residue residue) { buffer.Write((uint)residue.Begin, 24); buffer.Write((uint)residue.End, 24); buffer.Write((uint)(residue.Grouping - 1), 24); // residue vectors to group and code with a partitioned book buffer.Write((uint)(residue.Partitions - 1), 6); // possible partition choices buffer.Write((uint)residue.GroupBook, 8); // group huffman book var acc = 0; // secondstages is a bitmask; as encoding progresses pass by pass, a // bitmask of one indicates this partition class has bits to write this pass for (var j = 0; j < residue.Partitions; j++) { if (Encoding.Log(residue.SecondStages[j]) > 3) { // yes, this is a minor hack due to not thinking ahead buffer.Write((uint)residue.SecondStages[j], 3); buffer.Write(1, 1); buffer.Write((uint)residue.SecondStages[j] >> 3, 5); } else { buffer.Write((uint)residue.SecondStages[j], 4); // trailing zero } acc += Count(residue.SecondStages[j]); } for (var j = 0; j < acc; j++) { buffer.Write((uint)residue.BookList[j], 8); } }
private void Encode( float[][] pcm, int pcmEnd, EncodeBuffer buffer, Mapping mapping, int[][] work, int[][][] floorPosts, PsyLookup psyLookup, float[][] gmdct) { var codecSetup = _vorbisInfo.CodecSetup; var channels = pcm.Length; var nonzero = new bool[channels]; //the next phases are performed once for vbr-only and PACKETBLOB //times for bitrate managed modes. //1) encode actual mode being used //2) encode the floor for each channel, compute coded mask curve/res //3) normalize and couple. //4) encode residue //5) save packet bytes to the packetblob vector // iterate over the many masking curve fits we've created var coupleBundle = new int[channels][]; var zerobundle = new bool[channels]; const int k = PsyGlobal.PacketBlobs / 2; // start out our new packet blob with packet type and mode // Encode the packet type buffer.Write(0, 1); // Encode the modenumber // Encode frame mode, pre,post windowsize, then dispatch var modeBits = Encoding.Log(codecSetup.ModeParams.Count - 1); buffer.Write((uint)_currentWindow, modeBits); if (_currentWindow != 0) { buffer.Write((uint)_lastWindow, 1); buffer.Write((uint)_nextWindow, 1); } // encode floor, compute masking curve, sep out residue for (var i = 0; i < channels; i++) { var submap = mapping.ChannelMuxList[i]; nonzero[i] = _lookups.FloorLookup[mapping.FloorSubMap[submap]].Encode( buffer, codecSetup.BookParams, codecSetup.FullBooks, floorPosts[i][k], work[i], pcmEnd, codecSetup.BlockSizes[_currentWindow] / 2); } // our iteration is now based on masking curve, not prequant and // coupling. Only one prequant/coupling step quantize/couple // incomplete implementation that assumes the tree is all depth // one, or no tree at all psyLookup.CoupleQuantizeNormalize( k, codecSetup.PsyGlobalParam, mapping, gmdct, work, nonzero, codecSetup.PsyGlobalParam.SlidingLowPass[_currentWindow][k], channels); // classify and encode by submap for (var i = 0; i < mapping.SubMaps; i++) { var channelsInBundle = 0; var resNumber = mapping.ResidueSubMap[i]; for (var j = 0; j < channels; j++) { if (mapping.ChannelMuxList[j] == i) { zerobundle[channelsInBundle] = nonzero[j]; coupleBundle[channelsInBundle++] = work[j]; } } var residue = _lookups.ResidueLookup[resNumber]; var classifications = residue.Class( coupleBundle, zerobundle, channelsInBundle); channelsInBundle = 0; for (var j = 0; j < channels; j++) { if (mapping.ChannelMuxList[j] == i) { coupleBundle[channelsInBundle++] = work[j]; } } residue.Forward( buffer, pcmEnd, coupleBundle, zerobundle, channelsInBundle, classifications); } }
private void PackBooks(EncodeBuffer buffer, VorbisInfo info) { var codecSetup = info.CodecSetup; buffer.Write(0x05, 8); buffer.WriteString(VorbisString); buffer.Write((uint)(codecSetup.BookParams.Count - 1), 8); foreach (var book in codecSetup.BookParams) { PackStaticBook(buffer, book); } // times; hook placeholders buffer.Write(0, 6); buffer.Write(0, 16); buffer.Write((uint)(codecSetup.FloorParams.Count - 1), 6); foreach (var floor in codecSetup.FloorParams) { buffer.Write(1, 16); // For now we're only using floor type 1 PackFloor(buffer, floor); } buffer.Write((uint)(codecSetup.ResidueParams.Count - 1), 6); foreach (var residue in codecSetup.ResidueParams) { buffer.Write((uint)residue.ResidueType, 16); PackResidue(buffer, residue); } buffer.Write((uint)(codecSetup.MapParams.Count - 1), 6); foreach (var mapping in codecSetup.MapParams) { buffer.Write(0, 16); // Mapping type is always zero PackMapping(buffer, info, mapping); } buffer.Write((uint)(codecSetup.ModeParams.Count - 1), 6); for (var i = 0; i < codecSetup.ModeParams.Count; i++) { PackModes(buffer, codecSetup, i); } buffer.Write(1, 1); }
private void PackStaticBook(EncodeBuffer buffer, IStaticCodeBook book) { var ordered = false; // first the basic parameters buffer.Write(0x564342, 24); buffer.Write((uint)book.Dimensions, 16); buffer.Write((uint)book.LengthList.Length, 24); // pack the codewords. There are two packing types; length ordered and length random. int i; for (i = 1; i < book.LengthList.Length; i++) { if ((book.LengthList[i - 1] == 0) || (book.LengthList[i] < book.LengthList[i - 1])) { break; } } if (i == book.LengthList.Length) { ordered = true; } if (ordered) { // length ordered. We only need to say how many codewords of each length. The actual codewords are generated deterministically buffer.Write(1, 1); buffer.Write((uint)(book.LengthList[0] - 1), 5); // 1 to 32 var count = 0; for (i = 1; i < book.LengthList.Length; i++) { var current = book.LengthList[i]; var previous = book.LengthList[i - 1]; if (current <= previous) { continue; } for (var j = previous; j < current; j++) { buffer.Write((uint)(i - count), Encoding.Log(book.LengthList.Length - count)); count = i; } } buffer.Write((uint)(i - count), Encoding.Log(book.LengthList.Length - count)); } else { // length unordered. Again, we don't code the codeword itself, just the length. This time, though, we have to encode each length buffer.Write(0, 1); /* algorithmic mapping has use for 'unused entries', which we tag * here. The algorithmic mapping happens as usual, but the unused * entry has no codeword. */ for (i = 0; i < book.LengthList.Length; i++) { if (book.LengthList[i] == 0) { break; } } if (i == book.LengthList.Length) { buffer.Write(0, 1); // no unused entries for (i = 0; i < book.LengthList.Length; i++) { buffer.Write((uint)(book.LengthList[i] - 1), 5); } } else { buffer.Write(1, 1); // we have unused entries; thus we tag for (i = 0; i < book.LengthList.Length; i++) { if (book.LengthList[i] == 0) { buffer.Write(0, 1); } else { buffer.Write(1, 1); buffer.Write((uint)(book.LengthList[i] - 1), 5); } } } } buffer.Write((uint)book.MapType, 4); if (book.MapType == CodeBookMapType.None) { return; } // is the entry number the desired return value, or do we have a mapping? If we have a mapping, what type? if ((book.MapType != CodeBookMapType.Implicit) && (book.MapType != CodeBookMapType.Listed)) { throw new InvalidOperationException("Unknown CodeBookMapType: {book.MapType}"); } if (book.QuantList == null) { throw new InvalidOperationException("QuantList cannot be null"); } // values that define the dequantization buffer.Write((uint)book.QuantMin, 32); buffer.Write((uint)book.QuantDelta, 32); buffer.Write((uint)(book.Quant - 1), 4); buffer.Write((uint)book.QuantSequenceP, 1); var quantVals = 0; switch (book.MapType) { case CodeBookMapType.Implicit: // a single column of (c.entries/c.dim) quantized values for building a full value list algorithmically (square lattice) quantVals = book.GetQuantVals(); break; case CodeBookMapType.Listed: // every value (c.entries*c.dim total) specified explicitly quantVals = book.LengthList.Length * book.Dimensions; break; } // quantized values for (i = 0; i < quantVals; i++) { buffer.Write((uint)Math.Abs(book.QuantList[i]), book.Quant); } }
private static void PackMapping(EncodeBuffer buffer, VorbisInfo info, Mapping mapping) { /* another 'we meant to do it this way' hack... up to beta 4, we * packed 4 binary zeros here to signify one submapping in use. We * now redefine that to mean four bitflags that indicate use of * deeper features; bit0:submappings, bit1:coupling, * bit2,3:reserved. This is backward compatible with all actual uses * of the beta code. */ if (mapping.SubMaps > 1) { buffer.Write(1, 1); buffer.Write((uint)mapping.SubMaps - 1, 4); } else { buffer.Write(0, 1); } if (mapping.CouplingSteps > 0) { buffer.Write(1, 1); buffer.Write((uint)mapping.CouplingSteps - 1, 8); var couplingBits = Encoding.Log(info.Channels - 1); for (var i = 0; i < mapping.CouplingSteps; i++) { buffer.Write((uint)mapping.CouplingMag[i], couplingBits); buffer.Write((uint)mapping.CouplingAng[i], couplingBits); } } else { buffer.Write(0, 1); } buffer.Write(0, 2); // 2,3:reserved // we don't write the channel submappings if we only have one... if (mapping.SubMaps > 1) { for (var i = 0; i < info.Channels; i++) { buffer.Write((uint)mapping.ChannelMuxList[i], 4); } } for (var i = 0; i < mapping.SubMaps; i++) { buffer.Write(0, 8); // time submap unused buffer.Write((uint)mapping.FloorSubMap[i], 8); buffer.Write((uint)mapping.ResidueSubMap[i], 8); } }