public abstract void CreateBackwardReferences( ushort *dictionary_hash, size_t num_bytes, size_t position, byte *ringbuffer, size_t ringbuffer_mask, BrotliEncoderParams *params_, HasherHandle hasher, int *dist_cache, size_t *last_insert_len, Command *commands, size_t *num_commands, size_t *num_literals);
public override unsafe void CreateBackwardReferences(ushort *dictionary_hash, size_t num_bytes, size_t position, byte *ringbuffer, size_t ringbuffer_mask, BrotliEncoderParams *params_, HasherHandle hasher, int *dist_cache, size_t *last_insert_len, Command *commands, size_t *num_commands, size_t *num_literals) { throw new InvalidOperationException(); }
/* This function writes bits into bytes in increasing addresses, and within * a byte least-significant-bit first. * * The function can write up to 56 bits in one go with WriteBits * Example: let's assume that 3 bits (Rs below) have been written already: * * BYTE-0 BYTE+1 BYTE+2 * * 0000 0RRR 0000 0000 0000 0000 * * Now, we could write 5 or less bits in MSB by just sifting by 3 * and OR'ing to BYTE-0. * * For n bits, we take the last 5 bits, OR that with high bits in BYTE-0, * and locate the rest in BYTE+1, BYTE+2, etc. */ private static unsafe void BrotliWriteBits(size_t n_bits, ulong bits, size_t *pos, byte *array) { if (BROTLI_LITTLE_ENDIAN) { byte *p = &array[*pos >> 3]; ulong v = *p; v |= bits << (int)(*pos & 7); *(ulong *)p = v; /* Set some bits. */ *pos += n_bits; } else { /* implicit & 0xff is assumed for uint8_t arithmetics */ byte * array_pos = &array[*pos >> 3]; size_t bits_reserved_in_first_byte = (*pos & 7); size_t bits_left_to_write; bits <<= (int)bits_reserved_in_first_byte; *array_pos++ |= (byte)bits; for (bits_left_to_write = n_bits + bits_reserved_in_first_byte; bits_left_to_write >= 9; bits_left_to_write -= 8) { bits >>= 8; *array_pos++ = (byte)bits; } *array_pos = 0; *pos += n_bits; } }
public static unsafe void InitBlockSplitter( ref MemoryManager m, BlockSplitterDistance *self, size_t alphabet_size, size_t min_block_size, double split_threshold, size_t num_symbols, BlockSplit *split, HistogramDistance **histograms, size_t *histograms_size) { size_t max_num_blocks = num_symbols / min_block_size + 1; /* We have to allocate one more histogram than the maximum number of block * types for the current histogram when the meta-block is too big. */ size_t max_num_types = Math.Min(max_num_blocks, BROTLI_MAX_NUMBER_OF_BLOCK_TYPES + 1); self->alphabet_size_ = alphabet_size; self->min_block_size_ = min_block_size; self->split_threshold_ = split_threshold; self->num_blocks_ = 0; self->split_ = split; self->histograms_size_ = histograms_size; self->target_block_size_ = min_block_size; self->block_size_ = 0; self->curr_histogram_ix_ = 0; self->merge_last_count_ = 0; BrotliEnsureCapacity(ref m, sizeof(byte), (void **)&split->types, &split->types_alloc_size, max_num_blocks); BrotliEnsureCapacity(ref m, sizeof(uint), (void **)&split->lengths, &split->lengths_alloc_size, max_num_blocks); self->split_->num_blocks = max_num_blocks; *histograms_size = max_num_types; *histograms = (HistogramDistance *)BrotliAllocate(ref m, *histograms_size * sizeof(HistogramDistance)); self->histograms_ = *histograms; /* Clear only current histogram. */ HistogramDistance.HistogramClear(&self->histograms_[0]); self->last_histogram_ix_0 = self->last_histogram_ix_1 = 0; }
private static unsafe extern void lbm_parse_config_line( [MarshalAs(UnmanagedType.LPStr)] string configLine, [MarshalAs(UnmanagedType.LPStr)] StringBuilder scope, [MarshalAs(UnmanagedType.LPStr)] StringBuilder option, [MarshalAs(UnmanagedType.LPStr)] StringBuilder value, size_t *matches, size_t *confLength);
private static unsafe size_t DecideMultiByteStatsLevel(size_t pos, size_t len, size_t mask, byte *data) { size_t *counts = stackalloc size_t[3]; memset(counts, 0, 3 * sizeof(size_t)); size_t max_utf8 = 1; /* should be 2, but 1 compresses better. */ size_t last_c = 0; size_t i; for (i = 0; i < len; ++i) { size_t c = data[(pos + i) & mask]; ++counts[UTF8Position(last_c, c, 2)]; last_c = c; } if (counts[2] < 500) { max_utf8 = 1; } if (counts[1] + counts[2] < 25) { max_utf8 = 0; } return(max_utf8); }
private static unsafe void BrotliCreateBackwardReferences( size_t num_bytes, size_t position, byte *ringbuffer, size_t ringbuffer_mask, BrotliEncoderParams *params_, HasherHandle hasher, int *dist_cache, size_t *last_insert_len, Command *commands, size_t *num_commands, size_t *num_literals) { switch (params_->hasher.type) { case 2: case 3: case 4: case 5: case 6: case 40: case 41: case 42: case 54: fixed(ushort *ksdh = kStaticDictionaryHash) kHashers[params_->hasher.type].CreateBackwardReferences(ksdh, num_bytes, position, ringbuffer, ringbuffer_mask, params_, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals); break; } }
private static unsafe double ShannonEntropy(uint *population, size_t size, size_t *total) { size_t sum = 0; double retval = 0; uint * population_end = population + size; size_t p; if ((size & 1) != 0) { p = *population++; sum += p; retval -= (double)p * FastLog2(p); } while (population < population_end) { p = *population++; sum += p; retval -= (double)p * FastLog2(p); p = *population++; sum += p; retval -= (double)p * FastLog2(p); } if (sum != 0) { retval += (double)sum * FastLog2(sum); } *total = sum; return(retval); }
private static unsafe void StoreStaticCommandHuffmanTree( size_t *storage_ix, byte *storage) { BrotliWriteBits( 56, 0x92624416307003U, storage_ix, storage); BrotliWriteBits(3, 0x00000000U, storage_ix, storage); }
private static unsafe void EmitUncompressedMetaBlock(byte *input, size_t input_size, size_t *storage_ix, byte *storage) { BrotliStoreMetaBlockHeader(input_size, true, storage_ix, storage); *storage_ix = (*storage_ix + 7u) & ~7u; memcpy(&storage[*storage_ix >> 3], input, input_size); *storage_ix += input_size << 3; storage[*storage_ix >> 3] = 0; }
private static unsafe void BrotliCompressFragmentTwoPass( ref MemoryManager m, byte *input, size_t input_size, bool is_last, uint *command_buf, byte *literal_buf, int *table, size_t table_size, size_t *storage_ix, byte *storage) { CompressFragmentTwoPass.BrotliCompressFragmentTwoPass(ref m, input, input_size, is_last, command_buf, literal_buf, table, table_size, storage_ix, storage); }
public static void CPU_ZERO_S(size_t size, cpu_set_t *set) { size_t *s = (size_t *)set; for (size_t i = 0; i < size / SizeOf.size_t; i++) { s[i] = 0; } }
private static unsafe void RewindBitPosition(size_t new_storage_ix, size_t *storage_ix, byte *storage) { size_t bitpos = new_storage_ix & 7; size_t mask = (1u << (int)bitpos) - 1; storage[new_storage_ix >> 3] &= (byte)mask; *storage_ix = new_storage_ix; }
private static unsafe void BrotliEstimateBitCostsForLiterals(size_t pos, size_t len, size_t mask, byte *data, float *cost) { if (BrotliIsMostlyUTF8(data, pos, mask, len, kMinUTF8Ratio)) { EstimateBitCostsForLiteralsUTF8(pos, len, mask, data, cost); return; } else { size_t *histogram = stackalloc size_t[256]; memset(histogram, 0, 256 * sizeof(size_t)); size_t window_half = 2000; size_t in_window = Math.Min(window_half, len); /* Bootstrap histogram. */ size_t i; for (i = 0; i < in_window; ++i) { ++histogram[data[(pos + i) & mask]]; } /* Compute bit costs with sliding window. */ for (i = 0; i < len; ++i) { size_t histo; if (i >= window_half) { /* Remove a byte in the past. */ --histogram[data[(pos + i - window_half) & mask]]; --in_window; } if (i + window_half < len) { /* Add a byte in the future. */ ++histogram[data[(pos + i + window_half) & mask]]; ++in_window; } histo = histogram[data[(pos + i) & mask]]; if (histo == 0) { histo = 1; } { double lit_cost = FastLog2(in_window) - FastLog2(histo); lit_cost += 0.029; if (lit_cost < 1.0) { lit_cost *= 0.5; lit_cost += 0.5; } cost[i] = (float)lit_cost; } } } }
private static unsafe void BrotliCompressFragmentFast( ref MemoryManager m, byte *input, size_t input_size, bool is_last, int *table, size_t table_size, byte *cmd_depth, ushort *cmd_bits, size_t *cmd_code_numbits, byte *cmd_code, size_t *storage_ix, byte *storage) { CompressFragment.BrotliCompressFragmentFast(ref m, input, input_size, is_last, table, table_size, cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, storage_ix, storage); }
/* Builds a command and distance prefix code (each 64 symbols) into "depth" and * "bits" based on "histogram" and stores it into the bit stream. */ private static unsafe void BuildAndStoreCommandPrefixCode( uint *histogram, byte *depth, ushort *bits, size_t *storage_ix, byte *storage) { /* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */ HuffmanTree *tree = stackalloc HuffmanTree[129]; byte * cmd_depth = stackalloc byte[BROTLI_NUM_COMMAND_SYMBOLS]; memset(cmd_depth, 0, BROTLI_NUM_COMMAND_SYMBOLS * sizeof(byte)); ushort *cmd_bits = stackalloc ushort[64]; BrotliCreateHuffmanTree(histogram, 64, 15, tree, depth); BrotliCreateHuffmanTree(&histogram[64], 64, 14, tree, &depth[64]); /* We have to jump through a few hoops here in order to compute * the command bits because the symbols are in a different order than in * the full alphabet. This looks complicated, but having the symbols * in this order in the command bits saves a few branches in the Emit* * functions. */ memcpy(cmd_depth, depth + 24, 24); memcpy(cmd_depth + 24, depth, 8); memcpy(cmd_depth + 32, depth + 48, 8); memcpy(cmd_depth + 40, depth + 8, 8); memcpy(cmd_depth + 48, depth + 56, 8); memcpy(cmd_depth + 56, depth + 16, 8); BrotliConvertBitDepthsToSymbols(cmd_depth, 64, cmd_bits); memcpy(bits, cmd_bits + 24, 16); memcpy(bits + 8, cmd_bits + 40, 16); memcpy(bits + 16, cmd_bits + 56, 16); memcpy(bits + 24, cmd_bits, 48); memcpy(bits + 48, cmd_bits + 32, 16); memcpy(bits + 56, cmd_bits + 48, 16); BrotliConvertBitDepthsToSymbols(&depth[64], 64, &bits[64]); { /* Create the bit length array for the full command alphabet. */ size_t i; memset(cmd_depth, 0, 64); /* only 64 first values were used */ memcpy(cmd_depth, depth + 24, 8); memcpy(cmd_depth + 64, depth + 32, 8); memcpy(cmd_depth + 128, depth + 40, 8); memcpy(cmd_depth + 192, depth + 48, 8); memcpy(cmd_depth + 384, depth + 56, 8); for (i = 0; i < 8; ++i) { cmd_depth[128 + 8 * i] = depth[i]; cmd_depth[256 + 8 * i] = depth[8 + i]; cmd_depth[448 + 8 * i] = depth[16 + i]; } BrotliStoreHuffmanTree( cmd_depth, BROTLI_NUM_COMMAND_SYMBOLS, tree, storage_ix, storage); } BrotliStoreHuffmanTree(&depth[64], 64, tree, storage_ix, storage); }
public static void CPU_XOR_S(size_t setsize, cpu_set_t *destset, cpu_set_t *srcset1, cpu_set_t *srcset2) { size_t *dst = (size_t *)destset; size_t *src1 = (size_t *)srcset1; size_t *src2 = (size_t *)srcset2; for (size_t i = 0; i < setsize / SizeOf.size_t; i++) { dst[i] = src1[i] ^ src2[i]; } }
private static unsafe void EmitCopyLenLastDistance(size_t copylen, byte *depth, ushort *bits, uint *histo, size_t *storage_ix, byte *storage) { if (copylen < 12) { BrotliWriteBits(depth[copylen - 4], bits[copylen - 4], storage_ix, storage); ++histo[copylen - 4]; } else if (copylen < 72) { size_t tail = copylen - 8; uint nbits = Log2FloorNonZero(tail) - 1; size_t prefix = tail >> (int)nbits; size_t code = (nbits << 1) + prefix + 4; BrotliWriteBits(depth[code], bits[code], storage_ix, storage); BrotliWriteBits(nbits, tail - (prefix << (int)nbits), storage_ix, storage); ++histo[code]; } else if (copylen < 136) { size_t tail = copylen - 8; size_t code = (tail >> 5) + 30; BrotliWriteBits(depth[code], bits[code], storage_ix, storage); BrotliWriteBits(5, tail & 31, storage_ix, storage); BrotliWriteBits(depth[64], bits[64], storage_ix, storage); ++histo[code]; ++histo[64]; } else if (copylen < 2120) { size_t tail = copylen - 72; uint nbits = Log2FloorNonZero(tail); size_t code = nbits + 28; BrotliWriteBits(depth[code], bits[code], storage_ix, storage); BrotliWriteBits(nbits, tail - ((size_t)1 << (int)nbits), storage_ix, storage); BrotliWriteBits(depth[64], bits[64], storage_ix, storage); ++histo[code]; ++histo[64]; } else { BrotliWriteBits(depth[39], bits[39], storage_ix, storage); BrotliWriteBits(24, copylen - 2120, storage_ix, storage); BrotliWriteBits(depth[64], bits[64], storage_ix, storage); ++histo[47]; ++histo[64]; } }
private static unsafe void BrotliWriteHuffmanTreeRepetitions( byte previous_value, byte value, size_t repetitions, size_t *tree_size, byte *tree, byte *extra_bits_data) { if (previous_value != value) { tree[*tree_size] = value; extra_bits_data[*tree_size] = 0; ++(*tree_size); --repetitions; } if (repetitions == 7) { tree[*tree_size] = value; extra_bits_data[*tree_size] = 0; ++(*tree_size); --repetitions; } if (repetitions < 3) { size_t i; for (i = 0; i < repetitions; ++i) { tree[*tree_size] = value; extra_bits_data[*tree_size] = 0; ++(*tree_size); } } else { size_t start = *tree_size; repetitions -= 3; while (true) { tree[*tree_size] = BROTLI_REPEAT_PREVIOUS_CODE_LENGTH; extra_bits_data[*tree_size] = (byte)(repetitions & 0x3); ++(*tree_size); repetitions >>= 2; if (repetitions == 0) { break; } --repetitions; } Reverse(tree, start, *tree_size); Reverse(extra_bits_data, start, *tree_size); } }
private static unsafe void EmitLiterals(byte *input, size_t len, byte *depth, ushort *bits, size_t *storage_ix, byte *storage) { size_t j; for (j = 0; j < len; j++) { byte lit = input[j]; BrotliWriteBits(depth[lit], bits[lit], storage_ix, storage); } }
private static unsafe void EmitUncompressedMetaBlock(byte *begin, byte *end, size_t storage_ix_start, size_t *storage_ix, byte *storage) { size_t len = (size_t)(end - begin); RewindBitPosition(storage_ix_start, storage_ix, storage); BrotliStoreMetaBlockHeader(len, true, storage_ix, storage); *storage_ix = (*storage_ix + 7u) & ~7u; memcpy(&storage[*storage_ix >> 3], begin, len); *storage_ix += len << 3; storage[*storage_ix >> 3] = 0; }
public static bool CPU_EQUAL_S(size_t size, cpu_set_t *set1, cpu_set_t *set2) { size_t *s1 = (size_t *)set1; size_t *s2 = (size_t *)set2; for (int i = 0; i < size / SizeOf.size_t; i++) { if (s1[i] != s2[i]) { return(false); } } return(true); }
/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */ private static unsafe void BrotliZopfliCreateCommands(size_t num_bytes, size_t block_start, size_t max_backward_limit, ZopfliNode *nodes, int *dist_cache, size_t *last_insert_len, Command *commands, size_t *num_literals) { size_t pos = 0; uint offset = nodes[0].u.next; size_t i; for (i = 0; offset != uint.MaxValue; i++) { ZopfliNode *next = &nodes[pos + offset]; size_t copy_length = ZopfliNodeCopyLength(next); size_t insert_length = next->insert_length; pos += insert_length; offset = next->u.next; if (i == 0) { insert_length += *last_insert_len; *last_insert_len = 0; } { size_t distance = ZopfliNodeCopyDistance(next); size_t len_code = ZopfliNodeLengthCode(next); size_t max_distance = Math.Min(block_start + pos, max_backward_limit); bool is_dictionary = (distance > max_distance); size_t dist_code = ZopfliNodeDistanceCode(next); InitCommand( &commands[i], insert_length, copy_length, len_code, dist_code); if (!is_dictionary && dist_code > 0) { dist_cache[3] = dist_cache[2]; dist_cache[2] = dist_cache[1]; dist_cache[1] = dist_cache[0]; dist_cache[0] = (int)distance; } } *num_literals += insert_length; pos += copy_length; } *last_insert_len += num_bytes - pos; }
private static unsafe void EmitDistance(size_t distance, byte *depth, ushort *bits, uint *histo, size_t *storage_ix, byte *storage) { size_t d = distance + 3; uint nbits = Log2FloorNonZero(d) - 1u; size_t prefix = (d >> (int)nbits) & 1; size_t offset = (2 + prefix) << (int)nbits; size_t distcode = 2 * (nbits - 1) + prefix + 80; BrotliWriteBits(depth[distcode], bits[distcode], storage_ix, storage); BrotliWriteBits(nbits, d - offset, storage_ix, storage); ++histo[distcode]; }
public static int CPU_COUNT_S(size_t size, cpu_set_t *set) { size_t *s = (size_t *)set; size_t count = 0; for (size_t i = 0; i < size / SizeOf.ssize_t; i++) { size_t n = s[i]; while (n != 0) { count += n & 1; n >>= 1; } } return((int)count); }
private static unsafe void BrotliWriteHuffmanTreeRepetitionsZeros( size_t repetitions, size_t *tree_size, byte *tree, byte *extra_bits_data) { if (repetitions == 11) { tree[*tree_size] = 0; extra_bits_data[*tree_size] = 0; ++(*tree_size); --repetitions; } if (repetitions < 3) { size_t i; for (i = 0; i < repetitions; ++i) { tree[*tree_size] = 0; extra_bits_data[*tree_size] = 0; ++(*tree_size); } } else { size_t start = *tree_size; repetitions -= 3; while (true) { tree[*tree_size] = BROTLI_REPEAT_ZERO_CODE_LENGTH; extra_bits_data[*tree_size] = (byte)(repetitions & 0x7); ++(*tree_size); repetitions >>= 3; if (repetitions == 0) { break; } --repetitions; } Reverse(tree, start, *tree_size); Reverse(extra_bits_data, start, *tree_size); } }
public static unsafe void BuildAndStoreEntropyCodes(ref MemoryManager m, BlockEncoder *self, HistogramLiteral *histograms, size_t histograms_size, HuffmanTree *tree, size_t *storage_ix, byte *storage) { size_t alphabet_size = self->alphabet_size_; size_t table_size = histograms_size * alphabet_size; self->depths_ = (byte *)BrotliAllocate(ref m, table_size * sizeof(byte)); self->bits_ = (ushort *)BrotliAllocate(ref m, table_size * sizeof(ushort)); { size_t i; for (i = 0; i < histograms_size; ++i) { size_t ix = i * alphabet_size; BuildAndStoreHuffmanTree(&histograms[i].data_[0], alphabet_size, tree, &self->depths_[ix], &self->bits_[ix], storage_ix, storage); } } }
private static unsafe void BrotliCreateZopfliBackwardReferences( ref MemoryManager m, size_t num_bytes, size_t position, byte *ringbuffer, size_t ringbuffer_mask, BrotliEncoderParams *params_, HasherHandle hasher, int *dist_cache, size_t *last_insert_len, Command *commands, size_t *num_commands, size_t *num_literals) { size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params_->lgwin); ZopfliNode *nodes; nodes = (ZopfliNode *)BrotliAllocate(ref m, (num_bytes + 1) * sizeof(ZopfliNode)); BrotliInitZopfliNodes(nodes, num_bytes + 1); *num_commands += BrotliZopfliComputeShortestPath(ref m, num_bytes, position, ringbuffer, ringbuffer_mask, params_, max_backward_limit, dist_cache, hasher, nodes); BrotliZopfliCreateCommands(num_bytes, position, max_backward_limit, nodes, dist_cache, last_insert_len, commands, num_literals); BrotliFree(ref m, nodes); }
public static unsafe void BrotliCompressFragmentFast( ref MemoryManager m, byte *input, size_t input_size, bool is_last, int *table, size_t table_size, byte *cmd_depth, ushort *cmd_bits, size_t *cmd_code_numbits, byte *cmd_code, size_t *storage_ix, byte *storage) { size_t initial_storage_ix = *storage_ix; size_t table_bits = Log2FloorNonZero(table_size); if (input_size == 0) { BrotliWriteBits(1, 1, storage_ix, storage); /* islast */ BrotliWriteBits(1, 1, storage_ix, storage); /* isempty */ *storage_ix = (*storage_ix + 7u) & ~7u; return; } switch ((int)table_bits) { case 9: case 11: case 13: case 15: BrotliCompressFragmentFastImpl( ref m, input, input_size, is_last, table, table_bits, cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, storage_ix, storage); break; } /* If output is larger than single uncompressed block, rewrite it. */ if (*storage_ix - initial_storage_ix > 31 + (input_size << 3)) { EmitUncompressedMetaBlock(input, input + input_size, initial_storage_ix, storage_ix, storage); } if (is_last) { BrotliWriteBits(1, 1, storage_ix, storage); /* islast */ BrotliWriteBits(1, 1, storage_ix, storage); /* isempty */ *storage_ix = (*storage_ix + 7u) & ~7u; } }
private static unsafe void EmitLongInsertLen(size_t insertlen, byte *depth, ushort *bits, uint *histo, size_t *storage_ix, byte *storage) { if (insertlen < 22594) { BrotliWriteBits(depth[62], bits[62], storage_ix, storage); BrotliWriteBits(14, insertlen - 6210, storage_ix, storage); ++histo[22]; } else { BrotliWriteBits(depth[63], bits[63], storage_ix, storage); BrotliWriteBits(24, insertlen - 22594, storage_ix, storage); ++histo[23]; } }