/* Returns log2 of the size of main ring buffer area. * Allocate at least lgwin + 1 bits for the ring buffer so that the newly * added block fits there completely and we still get lgwin bits and at least * read_block_size_bits + 1 bits because the copy tail length needs to be * smaller than ring-buffer size. */ private static unsafe int ComputeRbBits(BrotliEncoderParams *params_) { return(1 + Math.Max(params_->lgwin, params_->lgblock)); }
/* When searching for backward references and have not seen matches for a long * time, we can skip some match lookups. Unsuccessful match lookups are very * expensive and this kind of a heuristic speeds up compression quite a lot. * At first 8 byte strides are taken and every second byte is put to hasher. * After 4x more literals stride by 16 bytes, every put 4-th byte to hasher. * Applied only to qualities 2 to 9. */ private static unsafe size_t LiteralSpreeLengthForSparseSearch( BrotliEncoderParams *params_) { return(params_->quality < 9 ? 64 : 512); }
/* Number of best candidates to evaluate to expand Zopfli chain. */ private static unsafe size_t MaxZopfliCandidates( BrotliEncoderParams *params_) { return(params_->quality <= 10 ? 1 : 5); }
private static unsafe size_t MaxZopfliLen(BrotliEncoderParams *params_) { return(params_->quality <= 10 ? MAX_ZOPFLI_LEN_QUALITY_10 : MAX_ZOPFLI_LEN_QUALITY_11); }