// Section 2.4.1.3.19.3 // page 69, 70 private static CopyToken PackCopyToken(byte[] Data, DecompressionState state, UInt16 Offset, UInt16 Length) { var help = CopyToken.CopyTokenHelp(state.DecompressedCurrent, state.DecompressedChunkStart); var temp1 = (ushort)(Offset - 1); var temp2 = (ushort)(16 - help.BitCount); var temp3 = (ushort)(Length - 3); UInt16 TokenAsUInt16 = (UInt16)(((ushort)(temp1 << temp2)) | temp3); var result = new CopyToken(TokenAsUInt16); return(result); }
// Section 2.4.1.3.19.4 // page 70 private static MatchingResult Matching(byte[] Data, DecompressionState state, int DecompressedEnd) { int Candidate = state.DecompressedCurrent - 1; UInt16 BestLength = 0; int BestCandidate = 0; UInt16 Offset; UInt16 Length; while (Candidate >= state.DecompressedChunkStart) { int C = Candidate; int D = state.DecompressedCurrent; UInt16 Len = 0; while (D < DecompressedEnd && Data[D] == Data[C]) { ++Len; ++C; ++D; } if (Len > BestLength) { BestLength = Len; BestCandidate = Candidate; } --Candidate; } if (BestLength >= 3) { ushort MaximumLength = CopyToken.CopyTokenHelp(state.DecompressedCurrent, state.DecompressedChunkStart).MaximumLength; Length = Math.Min(BestLength, MaximumLength); Offset = (UInt16)(state.DecompressedCurrent - BestCandidate); } else { Length = 0; Offset = 0; } return(new MatchingResult(Offset, Length)); }
public TokenSequence(XlBinaryReader CompressedData, int remainingBytes) { this.FlagByte = CompressedData.ReadByte(); --remainingBytes; for (int i = 0; i < 8; i++) { if (remainingBytes == 0) { break; } //int index = (i - 7) * (-1); // The most significant byte describes the first token. The second most significant byte the second token. etc. So we map 0 -> 7, 1 -> 6, 2 -> 5, 3 -> 4, 4 -> 3, 5 -> 2, 6 -> 1, 7 -> 0 TokenType tokenType = GetTokenTypeAtIndex(i); if (tokenType == TokenType.CopyToken) { var token = new CopyToken(CompressedData); this._Tokens.Add(token); remainingBytes -= token.GetSizeInBytes(); } else if (tokenType == TokenType.LiteralToken) { var token = new LiteralToken(CompressedData); this._Tokens.Add(token); remainingBytes -= token.GetSizeInBytes(); } else { throw new Exception(); } // todo: last token sequence could contain less than 8 tokens } }