public double AddThresh(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double costThreshold) { double costInitial = -this.BitCost; this.GetCombinedHistogramEntropy(b, stats, bitsEntropy, costThreshold, costInitial, out double cost); return(cost); }
private static int HistogramCopyAndAnalyze(List <Vp8LHistogram> origHistograms, List <Vp8LHistogram> histograms, ushort[] histogramSymbols) { var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int clusterId = 0, i = 0; i < origHistograms.Count; i++) { Vp8LHistogram origHistogram = origHistograms[i]; origHistogram.UpdateHistogramCost(stats, bitsEntropy); // Skip the histogram if it is completely empty, which can happen for tiles with no information (when they are skipped because of LZ77). if (!origHistogram.IsUsed[0] && !origHistogram.IsUsed[1] && !origHistogram.IsUsed[2] && !origHistogram.IsUsed[3] && !origHistogram.IsUsed[4]) { origHistograms[i] = null; histograms[i] = null; histogramSymbols[i] = InvalidHistogramSymbol; } else { histograms[i] = (Vp8LHistogram)origHistogram.DeepClone(); histogramSymbols[i] = (ushort)clusterId++; } } int numUsed = histogramSymbols.Count(h => h != InvalidHistogramSymbol); return(numUsed); }
/// <summary> /// Update the cost diff and combo of a pair of histograms. This needs to be called when the the histograms have been merged with a third one. /// </summary> private static void HistoListUpdatePair(Vp8LHistogram h1, Vp8LHistogram h2, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double threshold, HistogramPair pair) { double sumCost = h1.BitCost + h2.BitCost; pair.CostCombo = 0.0d; h1.GetCombinedHistogramEntropy(h2, stats, bitsEntropy, sumCost + threshold, costInitial: pair.CostCombo, out double cost); pair.CostCombo = cost; pair.CostDiff = pair.CostCombo - sumCost; }
/// <summary> /// Estimate how many bits the combined entropy of literals and distance approximately maps to. /// </summary> /// <returns>Estimated bits.</returns> public double EstimateBits(Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy) { uint notUsed = 0; return (PopulationCost(this.Literal, this.NumCodes(), ref notUsed, ref this.IsUsed[0], stats, bitsEntropy) + PopulationCost(this.Red, WebpConstants.NumLiteralCodes, ref notUsed, ref this.IsUsed[1], stats, bitsEntropy) + PopulationCost(this.Blue, WebpConstants.NumLiteralCodes, ref notUsed, ref this.IsUsed[2], stats, bitsEntropy) + PopulationCost(this.Alpha, WebpConstants.NumLiteralCodes, ref notUsed, ref this.IsUsed[3], stats, bitsEntropy) + PopulationCost(this.Distance, WebpConstants.NumDistanceCodes, ref notUsed, ref this.IsUsed[4], stats, bitsEntropy) + ExtraCost(this.Literal.AsSpan(WebpConstants.NumLiteralCodes), WebpConstants.NumLengthCodes) + ExtraCost(this.Distance, WebpConstants.NumDistanceCodes)); }
/// <summary> /// Performs output = a + b, computing the cost C(a+b) - C(a) - C(b) while comparing /// to the threshold value 'costThreshold'. The score returned is /// Score = C(a+b) - C(a) - C(b), where C(a) + C(b) is known and fixed. /// Since the previous score passed is 'costThreshold', we only need to compare /// the partial cost against 'costThreshold + C(a) + C(b)' to possibly bail-out early. /// </summary> public double AddEval(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double costThreshold, Vp8LHistogram output) { double sumCost = this.BitCost + b.BitCost; costThreshold += sumCost; if (this.GetCombinedHistogramEntropy(b, stats, bitsEntropy, costThreshold, costInitial: 0, out double cost)) { this.Add(b, output); output.BitCost = cost; output.PaletteCodeBits = this.PaletteCodeBits; } return(cost - sumCost); }
public void GetEntropyUnrefined(uint[] x, int length, Vp8LStreaks stats) { int i; int iPrev = 0; uint xPrev = x[0]; this.Init(); for (i = 1; i < length; i++) { uint xi = x[i]; if (xi != xPrev) { this.GetEntropyUnrefined(xi, i, ref xPrev, ref iPrev, stats); } } this.GetEntropyUnrefined(0, i, ref xPrev, ref iPrev, stats); this.Entropy += LosslessUtils.FastSLog2(this.Sum); }
public void UpdateHistogramCost(Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy) { uint alphaSym = 0, redSym = 0, blueSym = 0; uint notUsed = 0; double alphaCost = PopulationCost(this.Alpha, WebpConstants.NumLiteralCodes, ref alphaSym, ref this.IsUsed[3], stats, bitsEntropy); double distanceCost = PopulationCost(this.Distance, WebpConstants.NumDistanceCodes, ref notUsed, ref this.IsUsed[4], stats, bitsEntropy) + ExtraCost(this.Distance, WebpConstants.NumDistanceCodes); int numCodes = this.NumCodes(); this.LiteralCost = PopulationCost(this.Literal, numCodes, ref notUsed, ref this.IsUsed[0], stats, bitsEntropy) + ExtraCost(this.Literal.AsSpan(WebpConstants.NumLiteralCodes), WebpConstants.NumLengthCodes); this.RedCost = PopulationCost(this.Red, WebpConstants.NumLiteralCodes, ref redSym, ref this.IsUsed[1], stats, bitsEntropy); this.BlueCost = PopulationCost(this.Blue, WebpConstants.NumLiteralCodes, ref blueSym, ref this.IsUsed[2], stats, bitsEntropy); this.BitCost = this.LiteralCost + this.RedCost + this.BlueCost + alphaCost + distanceCost; if ((alphaSym | redSym | blueSym) == NonTrivialSym) { this.TrivialSymbol = NonTrivialSym; } else { this.TrivialSymbol = (alphaSym << 24) | (redSym << 16) | (blueSym << 0); } }
/// <summary> /// Evaluates best possible backward references for specified quality. The input cacheBits to 'GetBackwardReferences' /// sets the maximum cache bits to use (passing 0 implies disabling the local color cache). /// The optimal cache bits is evaluated and set for the cacheBits parameter. /// The return value is the pointer to the best of the two backward refs viz, refs[0] or refs[1]. /// </summary> public static Vp8LBackwardRefs GetBackwardReferences( int width, int height, ReadOnlySpan <uint> bgra, int quality, int lz77TypesToTry, ref int cacheBits, MemoryAllocator memoryAllocator, Vp8LHashChain hashChain, Vp8LBackwardRefs best, Vp8LBackwardRefs worst) { int lz77TypeBest = 0; double bitCostBest = -1; int cacheBitsInitial = cacheBits; Vp8LHashChain hashChainBox = null; var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int lz77Type = 1; lz77TypesToTry > 0; lz77TypesToTry &= ~lz77Type, lz77Type <<= 1) { int cacheBitsTmp = cacheBitsInitial; if ((lz77TypesToTry & lz77Type) == 0) { continue; } switch ((Vp8LLz77Type)lz77Type) { case Vp8LLz77Type.Lz77Rle: BackwardReferencesRle(width, height, bgra, 0, worst); break; case Vp8LLz77Type.Lz77Standard: // Compute LZ77 with no cache (0 bits), as the ideal LZ77 with a color cache is not that different in practice. BackwardReferencesLz77(width, height, bgra, 0, hashChain, worst); break; case Vp8LLz77Type.Lz77Box: hashChainBox = new Vp8LHashChain(memoryAllocator, width * height); BackwardReferencesLz77Box(width, height, bgra, 0, hashChain, hashChainBox, worst); break; } // Next, try with a color cache and update the references. cacheBitsTmp = CalculateBestCacheSize(bgra, quality, worst, cacheBitsTmp); if (cacheBitsTmp > 0) { BackwardRefsWithLocalCache(bgra, cacheBitsTmp, worst); } // Keep the best backward references. var histo = new Vp8LHistogram(worst, cacheBitsTmp); double bitCost = histo.EstimateBits(stats, bitsEntropy); if (lz77TypeBest == 0 || bitCost < bitCostBest) { Vp8LBackwardRefs tmp = worst; worst = best; best = tmp; bitCostBest = bitCost; cacheBits = cacheBitsTmp; lz77TypeBest = lz77Type; } } // Improve on simple LZ77 but only for high quality (TraceBackwards is costly). if ((lz77TypeBest == (int)Vp8LLz77Type.Lz77Standard || lz77TypeBest == (int)Vp8LLz77Type.Lz77Box) && quality >= 25) { Vp8LHashChain hashChainTmp = lz77TypeBest == (int)Vp8LLz77Type.Lz77Standard ? hashChain : hashChainBox; BackwardReferencesTraceBackwards(width, height, memoryAllocator, bgra, cacheBits, hashChainTmp, best, worst); var histo = new Vp8LHistogram(worst, cacheBits); double bitCostTrace = histo.EstimateBits(stats, bitsEntropy); if (bitCostTrace < bitCostBest) { best = worst; } } BackwardReferences2DLocality(width, best); hashChainBox?.Dispose(); return(best); }
/// <summary> /// Evaluate optimal cache bits for the local color cache. /// The input bestCacheBits sets the maximum cache bits to use (passing 0 implies disabling the local color cache). /// The local color cache is also disabled for the lower (smaller then 25) quality. /// </summary> /// <returns>Best cache size.</returns> private static int CalculateBestCacheSize(ReadOnlySpan <uint> bgra, int quality, Vp8LBackwardRefs refs, int bestCacheBits) { int cacheBitsMax = quality <= 25 ? 0 : bestCacheBits; if (cacheBitsMax == 0) { // Local color cache is disabled. return(0); } double entropyMin = MaxEntropy; int pos = 0; var colorCache = new ColorCache[WebpConstants.MaxColorCacheBits + 1]; var histos = new Vp8LHistogram[WebpConstants.MaxColorCacheBits + 1]; for (int i = 0; i <= WebpConstants.MaxColorCacheBits; i++) { histos[i] = new Vp8LHistogram(paletteCodeBits: i); colorCache[i] = new ColorCache(); colorCache[i].Init(i); } // Find the cacheBits giving the lowest entropy. for (int idx = 0; idx < refs.Refs.Count; idx++) { PixOrCopy v = refs.Refs[idx]; if (v.IsLiteral()) { uint pix = bgra[pos++]; uint a = (pix >> 24) & 0xff; uint r = (pix >> 16) & 0xff; uint g = (pix >> 8) & 0xff; uint b = (pix >> 0) & 0xff; // The keys of the caches can be derived from the longest one. int key = ColorCache.HashPix(pix, 32 - cacheBitsMax); // Do not use the color cache for cacheBits = 0. ++histos[0].Blue[b]; ++histos[0].Literal[g]; ++histos[0].Red[r]; ++histos[0].Alpha[a]; // Deal with cacheBits > 0. for (int i = cacheBitsMax; i >= 1; --i, key >>= 1) { if (colorCache[i].Lookup(key) == pix) { ++histos[i].Literal[WebpConstants.NumLiteralCodes + WebpConstants.NumLengthCodes + key]; } else { colorCache[i].Set((uint)key, pix); ++histos[i].Blue[b]; ++histos[i].Literal[g]; ++histos[i].Red[r]; ++histos[i].Alpha[a]; } } } else { // We should compute the contribution of the (distance, length) // histograms but those are the same independently from the cache size. // As those constant contributions are in the end added to the other // histogram contributions, we can ignore them, except for the length // prefix that is part of the literal_ histogram. int len = v.Len; uint bgraPrev = bgra[pos] ^ 0xffffffffu; int extraBits = 0, extraBitsValue = 0; int code = LosslessUtils.PrefixEncode(len, ref extraBits, ref extraBitsValue); for (int i = 0; i <= cacheBitsMax; i++) { ++histos[i].Literal[WebpConstants.NumLiteralCodes + code]; } // Update the color caches. do { if (bgra[pos] != bgraPrev) { // Efficiency: insert only if the color changes. int key = ColorCache.HashPix(bgra[pos], 32 - cacheBitsMax); for (int i = cacheBitsMax; i >= 1; --i, key >>= 1) { colorCache[i].Colors[key] = bgra[pos]; } bgraPrev = bgra[pos]; } pos++; }while (--len != 0); } } var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int i = 0; i <= cacheBitsMax; i++) { double entropy = histos[i].EstimateBits(stats, bitsEntropy); if (i == 0 || entropy < entropyMin) { entropyMin = entropy; bestCacheBits = i; } } return(bestCacheBits); }
private static void HistogramRemap(List <Vp8LHistogram> input, List <Vp8LHistogram> output, ushort[] symbols) { int inSize = input.Count; int outSize = output.Count; var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); if (outSize > 1) { for (int i = 0; i < inSize; i++) { if (input[i] == null) { // Arbitrarily set to the previous value if unused to help future LZ77. symbols[i] = symbols[i - 1]; continue; } int bestOut = 0; double bestBits = double.MaxValue; for (int k = 0; k < outSize; k++) { double curBits = output[k].AddThresh(input[i], stats, bitsEntropy, bestBits); if (k == 0 || curBits < bestBits) { bestBits = curBits; bestOut = k; } } symbols[i] = (ushort)bestOut; } } else { for (int i = 0; i < inSize; i++) { symbols[i] = 0; } } // Recompute each output. int paletteCodeBits = output.First().PaletteCodeBits; output.Clear(); for (int i = 0; i < outSize; i++) { output.Add(new Vp8LHistogram(paletteCodeBits)); } for (int i = 0; i < inSize; i++) { if (input[i] == null) { continue; } int idx = symbols[i]; input[i].Add(output[idx], output[idx]); } }
/// <summary> /// Create a pair from indices "idx1" and "idx2" provided its cost is inferior to "threshold", a negative entropy. /// </summary> /// <returns>The cost of the pair, or 0 if it superior to threshold.</returns> private static double HistoPriorityListPush(List <HistogramPair> histoList, int maxSize, List <Vp8LHistogram> histograms, int idx1, int idx2, double threshold, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy) { var pair = new HistogramPair(); if (histoList.Count == maxSize) { return(0.0d); } if (idx1 > idx2) { int tmp = idx2; idx2 = idx1; idx1 = tmp; } pair.Idx1 = idx1; pair.Idx2 = idx2; Vp8LHistogram h1 = histograms[idx1]; Vp8LHistogram h2 = histograms[idx2]; HistoListUpdatePair(h1, h2, stats, bitsEntropy, threshold, pair); // Do not even consider the pair if it does not improve the entropy. if (pair.CostDiff >= threshold) { return(0.0d); } histoList.Add(pair); HistoListUpdateHead(histoList, pair); return(pair.CostDiff); }
/// <summary> /// Perform histogram aggregation using a stochastic approach. /// </summary> /// <returns>true if a greedy approach needs to be performed afterwards, false otherwise.</returns> private static bool HistogramCombineStochastic(List <Vp8LHistogram> histograms, int minClusterSize) { uint seed = 1; int triesWithNoSuccess = 0; int numUsed = histograms.Count(h => h != null); int outerIters = numUsed; int numTriesNoSuccess = outerIters / 2; var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); if (numUsed < minClusterSize) { return(true); } // Priority list of histogram pairs. Its size impacts the quality of the compression and the speed: // the smaller the faster but the worse for the compression. var histoPriorityList = new List <HistogramPair>(); int maxSize = 9; // Fill the initial mapping. int[] mappings = new int[histograms.Count]; for (int j = 0, iter = 0; iter < histograms.Count; iter++) { if (histograms[iter] == null) { continue; } mappings[j++] = iter; } // Collapse similar histograms. for (int iter = 0; iter < outerIters && numUsed >= minClusterSize && ++triesWithNoSuccess < numTriesNoSuccess; iter++) { double bestCost = histoPriorityList.Count == 0 ? 0.0d : histoPriorityList[0].CostDiff; int numTries = numUsed / 2; uint randRange = (uint)((numUsed - 1) * numUsed); // Pick random samples. for (int j = 0; numUsed >= 2 && j < numTries; j++) { // Choose two different histograms at random and try to combine them. uint tmp = MyRand(ref seed) % randRange; int idx1 = (int)(tmp / (numUsed - 1)); int idx2 = (int)(tmp % (numUsed - 1)); if (idx2 >= idx1) { idx2++; } idx1 = mappings[idx1]; idx2 = mappings[idx2]; // Calculate cost reduction on combination. double currCost = HistoPriorityListPush(histoPriorityList, maxSize, histograms, idx1, idx2, bestCost, stats, bitsEntropy); // Found a better pair? if (currCost < 0) { bestCost = currCost; if (histoPriorityList.Count == maxSize) { break; } } } if (histoPriorityList.Count == 0) { continue; } // Get the best histograms. int bestIdx1 = histoPriorityList[0].Idx1; int bestIdx2 = histoPriorityList[0].Idx2; int mappingIndex = Array.IndexOf(mappings, bestIdx2); Span <int> src = mappings.AsSpan(mappingIndex + 1, numUsed - mappingIndex - 1); Span <int> dst = mappings.AsSpan(mappingIndex); src.CopyTo(dst); // Merge the histograms and remove bestIdx2 from the list. HistogramAdd(histograms[bestIdx2], histograms[bestIdx1], histograms[bestIdx1]); histograms.ElementAt(bestIdx1).BitCost = histoPriorityList[0].CostCombo; histograms[bestIdx2] = null; numUsed--; for (int j = 0; j < histoPriorityList.Count;) { HistogramPair p = histoPriorityList[j]; bool isIdx1Best = p.Idx1 == bestIdx1 || p.Idx1 == bestIdx2; bool isIdx2Best = p.Idx2 == bestIdx1 || p.Idx2 == bestIdx2; bool doEval = false; // The front pair could have been duplicated by a random pick so // check for it all the time nevertheless. if (isIdx1Best && isIdx2Best) { histoPriorityList[j] = histoPriorityList[histoPriorityList.Count - 1]; histoPriorityList.RemoveAt(histoPriorityList.Count - 1); continue; } // Any pair containing one of the two best indices should only refer to // bestIdx1. Its cost should also be updated. if (isIdx1Best) { p.Idx1 = bestIdx1; doEval = true; } else if (isIdx2Best) { p.Idx2 = bestIdx1; doEval = true; } // Make sure the index order is respected. if (p.Idx1 > p.Idx2) { int tmp = p.Idx2; p.Idx2 = p.Idx1; p.Idx1 = tmp; } if (doEval) { // Re-evaluate the cost of an updated pair. HistoListUpdatePair(histograms[p.Idx1], histograms[p.Idx2], stats, bitsEntropy, 0.0d, p); if (p.CostDiff >= 0.0d) { histoPriorityList[j] = histoPriorityList[histoPriorityList.Count - 1]; histoPriorityList.RemoveAt(histoPriorityList.Count - 1); continue; } } HistoListUpdateHead(histoPriorityList, p); j++; } triesWithNoSuccess = 0; } bool doGreedy = numUsed <= minClusterSize; return(doGreedy); }
private static void HistogramCombineGreedy(List <Vp8LHistogram> histograms) { int histoSize = histograms.Count(h => h != null); // Priority list of histogram pairs. var histoPriorityList = new List <HistogramPair>(); int maxSize = histoSize * histoSize; var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int i = 0; i < histoSize; i++) { if (histograms[i] == null) { continue; } for (int j = i + 1; j < histoSize; j++) { if (histograms[j] == null) { continue; } HistoPriorityListPush(histoPriorityList, maxSize, histograms, i, j, 0.0d, stats, bitsEntropy); } } while (histoPriorityList.Count > 0) { int idx1 = histoPriorityList[0].Idx1; int idx2 = histoPriorityList[0].Idx2; HistogramAdd(histograms[idx2], histograms[idx1], histograms[idx1]); histograms[idx1].BitCost = histoPriorityList[0].CostCombo; // Remove merged histogram. histograms[idx2] = null; // Remove pairs intersecting the just combined best pair. for (int i = 0; i < histoPriorityList.Count;) { HistogramPair p = histoPriorityList.ElementAt(i); if (p.Idx1 == idx1 || p.Idx2 == idx1 || p.Idx1 == idx2 || p.Idx2 == idx2) { // Replace item at pos i with the last one and shrinking the list. histoPriorityList[i] = histoPriorityList[histoPriorityList.Count - 1]; histoPriorityList.RemoveAt(histoPriorityList.Count - 1); } else { HistoListUpdateHead(histoPriorityList, p); i++; } } // Push new pairs formed with combined histogram to the list. for (int i = 0; i < histoSize; i++) { if (i == idx1 || histograms[i] == null) { continue; } HistoPriorityListPush(histoPriorityList, maxSize, histograms, idx1, i, 0.0d, stats, bitsEntropy); } } }
private static void HistogramCombineEntropyBin( List <Vp8LHistogram> histograms, ushort[] clusters, ushort[] clusterMappings, Vp8LHistogram curCombo, ushort[] binMap, int numBins, double combineCostFactor) { var binInfo = new HistogramBinInfo[BinSize]; for (int idx = 0; idx < numBins; idx++) { binInfo[idx].First = -1; binInfo[idx].NumCombineFailures = 0; } // By default, a cluster matches itself. for (int idx = 0; idx < histograms.Count; idx++) { clusterMappings[idx] = (ushort)idx; } var indicesToRemove = new List <int>(); var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int idx = 0; idx < histograms.Count; idx++) { if (histograms[idx] == null) { continue; } int binId = binMap[idx]; int first = binInfo[binId].First; if (first == -1) { binInfo[binId].First = (short)idx; } else { // Try to merge #idx into #first (both share the same binId) double bitCost = histograms[idx].BitCost; double bitCostThresh = -bitCost * combineCostFactor; double currCostDiff = histograms[first].AddEval(histograms[idx], stats, bitsEntropy, bitCostThresh, curCombo); if (currCostDiff < bitCostThresh) { // Try to merge two histograms only if the combo is a trivial one or // the two candidate histograms are already non-trivial. // For some images, 'tryCombine' turns out to be false for a lot of // histogram pairs. In that case, we fallback to combining // histograms as usual to avoid increasing the header size. bool tryCombine = curCombo.TrivialSymbol != NonTrivialSym || (histograms[idx].TrivialSymbol == NonTrivialSym && histograms[first].TrivialSymbol == NonTrivialSym); int maxCombineFailures = 32; if (tryCombine || binInfo[binId].NumCombineFailures >= maxCombineFailures) { // Move the (better) merged histogram to its final slot. Vp8LHistogram tmp = curCombo; curCombo = histograms[first]; histograms[first] = tmp; histograms[idx] = null; indicesToRemove.Add(idx); clusterMappings[clusters[idx]] = clusters[first]; } else { binInfo[binId].NumCombineFailures++; } } } } foreach (int index in indicesToRemove.OrderByDescending(i => i)) { histograms.RemoveAt(index); } }
private void GetEntropyUnrefined(uint val, int i, ref uint valPrev, ref int iPrev, Vp8LStreaks stats) { int streak = i - iPrev; // Gather info for the bit entropy. if (valPrev != 0) { this.Sum += (uint)(valPrev * streak); this.NoneZeros += streak; this.NoneZeroCode = (uint)iPrev; this.Entropy -= LosslessUtils.FastSLog2(valPrev) * streak; if (this.MaxVal < valPrev) { this.MaxVal = valPrev; } } // Gather info for the Huffman cost. stats.Counts[valPrev != 0 ? 1 : 0] += streak > 3 ? 1 : 0; stats.Streaks[valPrev != 0 ? 1 : 0][streak > 3 ? 1 : 0] += streak; valPrev = val; iPrev = i; }
/// <summary> /// Get the symbol entropy for the distribution 'population'. /// </summary> private static double PopulationCost(uint[] population, int length, ref uint trivialSym, ref bool isUsed, Vp8LStreaks stats, Vp8LBitEntropy bitEntropy) { bitEntropy.Init(); stats.Clear(); bitEntropy.BitsEntropyUnrefined(population, length, stats); trivialSym = (bitEntropy.NoneZeros == 1) ? bitEntropy.NoneZeroCode : NonTrivialSym; // The histogram is used if there is at least one non-zero streak. isUsed = stats.Streaks[1][0] != 0 || stats.Streaks[1][1] != 0; return(bitEntropy.BitsEntropyRefine() + stats.FinalHuffmanCost()); }
private static double GetCombinedEntropy(uint[] x, uint[] y, int length, bool isXUsed, bool isYUsed, bool trivialAtEnd, Vp8LStreaks stats, Vp8LBitEntropy bitEntropy) { stats.Clear(); bitEntropy.Init(); if (trivialAtEnd) { // This configuration is due to palettization that transforms an indexed // pixel into 0xff000000 | (pixel << 8) in BundleColorMap. // BitsEntropyRefine is 0 for histograms with only one non-zero value. // Only FinalHuffmanCost needs to be evaluated. // Deal with the non-zero value at index 0 or length-1. stats.Streaks[1][0] = 1; // Deal with the following/previous zero streak. stats.Counts[0] = 1; stats.Streaks[0][1] = length - 1; return(stats.FinalHuffmanCost()); } if (isXUsed) { if (isYUsed) { bitEntropy.GetCombinedEntropyUnrefined(x, y, length, stats); } else { bitEntropy.GetEntropyUnrefined(x, length, stats); } } else { if (isYUsed) { bitEntropy.GetEntropyUnrefined(y, length, stats); } else { stats.Counts[0] = 1; stats.Streaks[0][length > 3 ? 1 : 0] = length; bitEntropy.Init(); } } return(bitEntropy.BitsEntropyRefine() + stats.FinalHuffmanCost()); }
public bool GetCombinedHistogramEntropy(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitEntropy, double costThreshold, double costInitial, out double cost) { bool trivialAtEnd = false; cost = costInitial; cost += GetCombinedEntropy(this.Literal, b.Literal, this.NumCodes(), this.IsUsed[0], b.IsUsed[0], false, stats, bitEntropy); cost += ExtraCostCombined(this.Literal.AsSpan(WebpConstants.NumLiteralCodes), b.Literal.AsSpan(WebpConstants.NumLiteralCodes), WebpConstants.NumLengthCodes); if (cost > costThreshold) { return(false); } if (this.TrivialSymbol != NonTrivialSym && this.TrivialSymbol == b.TrivialSymbol) { // A, R and B are all 0 or 0xff. uint colorA = (this.TrivialSymbol >> 24) & 0xff; uint colorR = (this.TrivialSymbol >> 16) & 0xff; uint colorB = (this.TrivialSymbol >> 0) & 0xff; if ((colorA == 0 || colorA == 0xff) && (colorR == 0 || colorR == 0xff) && (colorB == 0 || colorB == 0xff)) { trivialAtEnd = true; } } cost += GetCombinedEntropy(this.Red, b.Red, WebpConstants.NumLiteralCodes, this.IsUsed[1], b.IsUsed[1], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Blue, b.Blue, WebpConstants.NumLiteralCodes, this.IsUsed[2], b.IsUsed[2], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Alpha, b.Alpha, WebpConstants.NumLiteralCodes, this.IsUsed[3], b.IsUsed[3], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Distance, b.Distance, WebpConstants.NumDistanceCodes, this.IsUsed[4], b.IsUsed[4], false, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += ExtraCostCombined(this.Distance, b.Distance, WebpConstants.NumDistanceCodes); if (cost > costThreshold) { return(false); } return(true); }