private static int HistogramCopyAndAnalyze(List <Vp8LHistogram> origHistograms, List <Vp8LHistogram> histograms, ushort[] histogramSymbols) { var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int clusterId = 0, i = 0; i < origHistograms.Count; i++) { Vp8LHistogram origHistogram = origHistograms[i]; origHistogram.UpdateHistogramCost(stats, bitsEntropy); // Skip the histogram if it is completely empty, which can happen for tiles with no information (when they are skipped because of LZ77). if (!origHistogram.IsUsed[0] && !origHistogram.IsUsed[1] && !origHistogram.IsUsed[2] && !origHistogram.IsUsed[3] && !origHistogram.IsUsed[4]) { origHistograms[i] = null; histograms[i] = null; histogramSymbols[i] = InvalidHistogramSymbol; } else { histograms[i] = (Vp8LHistogram)origHistogram.DeepClone(); histogramSymbols[i] = (ushort)clusterId++; } } int numUsed = histogramSymbols.Count(h => h != InvalidHistogramSymbol); return(numUsed); }
public double AddThresh(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double costThreshold) { double costInitial = -this.BitCost; this.GetCombinedHistogramEntropy(b, stats, bitsEntropy, costThreshold, costInitial, out double cost); return(cost); }
/// <summary> /// Create a pair from indices "idx1" and "idx2" provided its cost is inferior to "threshold", a negative entropy. /// </summary> /// <returns>The cost of the pair, or 0 if it superior to threshold.</returns> private static double HistoPriorityListPush(List <HistogramPair> histoList, int maxSize, List <Vp8LHistogram> histograms, int idx1, int idx2, double threshold, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy) { var pair = new HistogramPair(); if (histoList.Count == maxSize) { return(0.0d); } if (idx1 > idx2) { int tmp = idx2; idx2 = idx1; idx1 = tmp; } pair.Idx1 = idx1; pair.Idx2 = idx2; Vp8LHistogram h1 = histograms[idx1]; Vp8LHistogram h2 = histograms[idx2]; HistoListUpdatePair(h1, h2, stats, bitsEntropy, threshold, pair); // Do not even consider the pair if it does not improve the entropy. if (pair.CostDiff >= threshold) { return(0.0d); } histoList.Add(pair); HistoListUpdateHead(histoList, pair); return(pair.CostDiff); }
/// <summary> /// Update the cost diff and combo of a pair of histograms. This needs to be called when the the histograms have been merged with a third one. /// </summary> private static void HistoListUpdatePair(Vp8LHistogram h1, Vp8LHistogram h2, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double threshold, HistogramPair pair) { double sumCost = h1.BitCost + h2.BitCost; pair.CostCombo = 0.0d; h1.GetCombinedHistogramEntropy(h2, stats, bitsEntropy, sumCost + threshold, costInitial: pair.CostCombo, out double cost); pair.CostCombo = cost; pair.CostDiff = pair.CostCombo - sumCost; }
/// <summary> /// Performs output = a + b, computing the cost C(a+b) - C(a) - C(b) while comparing /// to the threshold value 'costThreshold'. The score returned is /// Score = C(a+b) - C(a) - C(b), where C(a) + C(b) is known and fixed. /// Since the previous score passed is 'costThreshold', we only need to compare /// the partial cost against 'costThreshold + C(a) + C(b)' to possibly bail-out early. /// </summary> public double AddEval(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitsEntropy, double costThreshold, Vp8LHistogram output) { double sumCost = this.BitCost + b.BitCost; costThreshold += sumCost; if (this.GetCombinedHistogramEntropy(b, stats, bitsEntropy, costThreshold, costInitial: 0, out double cost)) { this.Add(b, output); output.BitCost = cost; output.PaletteCodeBits = this.PaletteCodeBits; } return(cost - sumCost); }
/// <summary> /// Initializes a new instance of the <see cref="Vp8LHistogram"/> class. /// </summary> /// <param name="other">The histogram to create an instance from.</param> private Vp8LHistogram(Vp8LHistogram other) : this(other.PaletteCodeBits) { other.Red.AsSpan().CopyTo(this.Red); other.Blue.AsSpan().CopyTo(this.Blue); other.Alpha.AsSpan().CopyTo(this.Alpha); other.Literal.AsSpan().CopyTo(this.Literal); other.Distance.AsSpan().CopyTo(this.Distance); other.IsUsed.AsSpan().CopyTo(this.IsUsed); this.LiteralCost = other.LiteralCost; this.RedCost = other.RedCost; this.BlueCost = other.BlueCost; this.BitCost = other.BitCost; this.TrivialSymbol = other.TrivialSymbol; this.PaletteCodeBits = other.PaletteCodeBits; }
public void Build(int xSize, int cacheBits, Vp8LBackwardRefs backwardRefs) { var histogram = new Vp8LHistogram(cacheBits); using System.Collections.Generic.List <PixOrCopy> .Enumerator refsEnumerator = backwardRefs.Refs.GetEnumerator(); // The following code is similar to HistogramCreate but converts the distance to plane code. while (refsEnumerator.MoveNext()) { histogram.AddSinglePixOrCopy(refsEnumerator.Current, true, xSize); } ConvertPopulationCountTableToBitEstimates(histogram.NumCodes(), histogram.Literal, this.Literal); ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Red, this.Red); ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Blue, this.Blue); ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Alpha, this.Alpha); ConvertPopulationCountTableToBitEstimates(WebpConstants.NumDistanceCodes, histogram.Distance, this.Distance); }
public void Add(Vp8LHistogram b, Vp8LHistogram output) { int literalSize = this.NumCodes(); this.AddLiteral(b, output, literalSize); this.AddRed(b, output, WebpConstants.NumLiteralCodes); this.AddBlue(b, output, WebpConstants.NumLiteralCodes); this.AddAlpha(b, output, WebpConstants.NumLiteralCodes); this.AddDistance(b, output, WebpConstants.NumDistanceCodes); for (int i = 0; i < 5; i++) { output.IsUsed[i] = this.IsUsed[i] | b.IsUsed[i]; } output.TrivialSymbol = this.TrivialSymbol == b.TrivialSymbol ? this.TrivialSymbol : NonTrivialSym; }
private void AddAlpha(Vp8LHistogram b, Vp8LHistogram output, int size) { if (this.IsUsed[3]) { if (b.IsUsed[3]) { AddVector(this.Alpha, b.Alpha, output.Alpha, size); } else { this.Alpha.AsSpan(0, size).CopyTo(output.Alpha); } } else if (b.IsUsed[3]) { b.Alpha.AsSpan(0, size).CopyTo(output.Alpha); } else { output.Alpha.AsSpan(0, size).Clear(); } }
private void AddBlue(Vp8LHistogram b, Vp8LHistogram output, int size) { if (this.IsUsed[2]) { if (b.IsUsed[2]) { AddVector(this.Blue, b.Blue, output.Blue, size); } else { this.Blue.AsSpan(0, size).CopyTo(output.Blue); } } else if (b.IsUsed[2]) { b.Blue.AsSpan(0, size).CopyTo(output.Blue); } else { output.Blue.AsSpan(0, size).Clear(); } }
private void AddRed(Vp8LHistogram b, Vp8LHistogram output, int size) { if (this.IsUsed[1]) { if (b.IsUsed[1]) { AddVector(this.Red, b.Red, output.Red, size); } else { this.Red.AsSpan(0, size).CopyTo(output.Red); } } else if (b.IsUsed[1]) { b.Red.AsSpan(0, size).CopyTo(output.Red); } else { output.Red.AsSpan(0, size).Clear(); } }
private void AddLiteral(Vp8LHistogram b, Vp8LHistogram output, int literalSize) { if (this.IsUsed[0]) { if (b.IsUsed[0]) { AddVector(this.Literal, b.Literal, output.Literal, literalSize); } else { this.Literal.AsSpan(0, literalSize).CopyTo(output.Literal); } } else if (b.IsUsed[0]) { b.Literal.AsSpan(0, literalSize).CopyTo(output.Literal); } else { output.Literal.AsSpan(0, literalSize).Clear(); } }
private void AddDistance(Vp8LHistogram b, Vp8LHistogram output, int size) { if (this.IsUsed[4]) { if (b.IsUsed[4]) { AddVector(this.Distance, b.Distance, output.Distance, size); } else { this.Distance.AsSpan(0, size).CopyTo(output.Distance); } } else if (b.IsUsed[4]) { b.Distance.AsSpan(0, size).CopyTo(output.Distance); } else { output.Distance.AsSpan(0, size).Clear(); } }
/// <summary> /// Evaluates best possible backward references for specified quality. The input cacheBits to 'GetBackwardReferences' /// sets the maximum cache bits to use (passing 0 implies disabling the local color cache). /// The optimal cache bits is evaluated and set for the cacheBits parameter. /// The return value is the pointer to the best of the two backward refs viz, refs[0] or refs[1]. /// </summary> public static Vp8LBackwardRefs GetBackwardReferences( int width, int height, ReadOnlySpan <uint> bgra, int quality, int lz77TypesToTry, ref int cacheBits, MemoryAllocator memoryAllocator, Vp8LHashChain hashChain, Vp8LBackwardRefs best, Vp8LBackwardRefs worst) { int lz77TypeBest = 0; double bitCostBest = -1; int cacheBitsInitial = cacheBits; Vp8LHashChain hashChainBox = null; var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int lz77Type = 1; lz77TypesToTry > 0; lz77TypesToTry &= ~lz77Type, lz77Type <<= 1) { int cacheBitsTmp = cacheBitsInitial; if ((lz77TypesToTry & lz77Type) == 0) { continue; } switch ((Vp8LLz77Type)lz77Type) { case Vp8LLz77Type.Lz77Rle: BackwardReferencesRle(width, height, bgra, 0, worst); break; case Vp8LLz77Type.Lz77Standard: // Compute LZ77 with no cache (0 bits), as the ideal LZ77 with a color cache is not that different in practice. BackwardReferencesLz77(width, height, bgra, 0, hashChain, worst); break; case Vp8LLz77Type.Lz77Box: hashChainBox = new Vp8LHashChain(memoryAllocator, width * height); BackwardReferencesLz77Box(width, height, bgra, 0, hashChain, hashChainBox, worst); break; } // Next, try with a color cache and update the references. cacheBitsTmp = CalculateBestCacheSize(bgra, quality, worst, cacheBitsTmp); if (cacheBitsTmp > 0) { BackwardRefsWithLocalCache(bgra, cacheBitsTmp, worst); } // Keep the best backward references. var histo = new Vp8LHistogram(worst, cacheBitsTmp); double bitCost = histo.EstimateBits(stats, bitsEntropy); if (lz77TypeBest == 0 || bitCost < bitCostBest) { Vp8LBackwardRefs tmp = worst; worst = best; best = tmp; bitCostBest = bitCost; cacheBits = cacheBitsTmp; lz77TypeBest = lz77Type; } } // Improve on simple LZ77 but only for high quality (TraceBackwards is costly). if ((lz77TypeBest == (int)Vp8LLz77Type.Lz77Standard || lz77TypeBest == (int)Vp8LLz77Type.Lz77Box) && quality >= 25) { Vp8LHashChain hashChainTmp = lz77TypeBest == (int)Vp8LLz77Type.Lz77Standard ? hashChain : hashChainBox; BackwardReferencesTraceBackwards(width, height, memoryAllocator, bgra, cacheBits, hashChainTmp, best, worst); var histo = new Vp8LHistogram(worst, cacheBits); double bitCostTrace = histo.EstimateBits(stats, bitsEntropy); if (bitCostTrace < bitCostBest) { best = worst; } } BackwardReferences2DLocality(width, best); hashChainBox?.Dispose(); return(best); }
/// <summary> /// Evaluate optimal cache bits for the local color cache. /// The input bestCacheBits sets the maximum cache bits to use (passing 0 implies disabling the local color cache). /// The local color cache is also disabled for the lower (smaller then 25) quality. /// </summary> /// <returns>Best cache size.</returns> private static int CalculateBestCacheSize(ReadOnlySpan <uint> bgra, int quality, Vp8LBackwardRefs refs, int bestCacheBits) { int cacheBitsMax = quality <= 25 ? 0 : bestCacheBits; if (cacheBitsMax == 0) { // Local color cache is disabled. return(0); } double entropyMin = MaxEntropy; int pos = 0; var colorCache = new ColorCache[WebpConstants.MaxColorCacheBits + 1]; var histos = new Vp8LHistogram[WebpConstants.MaxColorCacheBits + 1]; for (int i = 0; i <= WebpConstants.MaxColorCacheBits; i++) { histos[i] = new Vp8LHistogram(paletteCodeBits: i); colorCache[i] = new ColorCache(); colorCache[i].Init(i); } // Find the cacheBits giving the lowest entropy. for (int idx = 0; idx < refs.Refs.Count; idx++) { PixOrCopy v = refs.Refs[idx]; if (v.IsLiteral()) { uint pix = bgra[pos++]; uint a = (pix >> 24) & 0xff; uint r = (pix >> 16) & 0xff; uint g = (pix >> 8) & 0xff; uint b = (pix >> 0) & 0xff; // The keys of the caches can be derived from the longest one. int key = ColorCache.HashPix(pix, 32 - cacheBitsMax); // Do not use the color cache for cacheBits = 0. ++histos[0].Blue[b]; ++histos[0].Literal[g]; ++histos[0].Red[r]; ++histos[0].Alpha[a]; // Deal with cacheBits > 0. for (int i = cacheBitsMax; i >= 1; --i, key >>= 1) { if (colorCache[i].Lookup(key) == pix) { ++histos[i].Literal[WebpConstants.NumLiteralCodes + WebpConstants.NumLengthCodes + key]; } else { colorCache[i].Set((uint)key, pix); ++histos[i].Blue[b]; ++histos[i].Literal[g]; ++histos[i].Red[r]; ++histos[i].Alpha[a]; } } } else { // We should compute the contribution of the (distance, length) // histograms but those are the same independently from the cache size. // As those constant contributions are in the end added to the other // histogram contributions, we can ignore them, except for the length // prefix that is part of the literal_ histogram. int len = v.Len; uint bgraPrev = bgra[pos] ^ 0xffffffffu; int extraBits = 0, extraBitsValue = 0; int code = LosslessUtils.PrefixEncode(len, ref extraBits, ref extraBitsValue); for (int i = 0; i <= cacheBitsMax; i++) { ++histos[i].Literal[WebpConstants.NumLiteralCodes + code]; } // Update the color caches. do { if (bgra[pos] != bgraPrev) { // Efficiency: insert only if the color changes. int key = ColorCache.HashPix(bgra[pos], 32 - cacheBitsMax); for (int i = cacheBitsMax; i >= 1; --i, key >>= 1) { colorCache[i].Colors[key] = bgra[pos]; } bgraPrev = bgra[pos]; } pos++; }while (--len != 0); } } var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int i = 0; i <= cacheBitsMax; i++) { double entropy = histos[i].EstimateBits(stats, bitsEntropy); if (i == 0 || entropy < entropyMin) { entropyMin = entropy; bestCacheBits = i; } } return(bestCacheBits); }
private static void HistogramAdd(Vp8LHistogram a, Vp8LHistogram b, Vp8LHistogram output) { a.Add(b, output); output.TrivialSymbol = a.TrivialSymbol == b.TrivialSymbol ? a.TrivialSymbol : NonTrivialSym; }
public bool GetCombinedHistogramEntropy(Vp8LHistogram b, Vp8LStreaks stats, Vp8LBitEntropy bitEntropy, double costThreshold, double costInitial, out double cost) { bool trivialAtEnd = false; cost = costInitial; cost += GetCombinedEntropy(this.Literal, b.Literal, this.NumCodes(), this.IsUsed[0], b.IsUsed[0], false, stats, bitEntropy); cost += ExtraCostCombined(this.Literal.AsSpan(WebpConstants.NumLiteralCodes), b.Literal.AsSpan(WebpConstants.NumLiteralCodes), WebpConstants.NumLengthCodes); if (cost > costThreshold) { return(false); } if (this.TrivialSymbol != NonTrivialSym && this.TrivialSymbol == b.TrivialSymbol) { // A, R and B are all 0 or 0xff. uint colorA = (this.TrivialSymbol >> 24) & 0xff; uint colorR = (this.TrivialSymbol >> 16) & 0xff; uint colorB = (this.TrivialSymbol >> 0) & 0xff; if ((colorA == 0 || colorA == 0xff) && (colorR == 0 || colorR == 0xff) && (colorB == 0 || colorB == 0xff)) { trivialAtEnd = true; } } cost += GetCombinedEntropy(this.Red, b.Red, WebpConstants.NumLiteralCodes, this.IsUsed[1], b.IsUsed[1], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Blue, b.Blue, WebpConstants.NumLiteralCodes, this.IsUsed[2], b.IsUsed[2], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Alpha, b.Alpha, WebpConstants.NumLiteralCodes, this.IsUsed[3], b.IsUsed[3], trivialAtEnd, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += GetCombinedEntropy(this.Distance, b.Distance, WebpConstants.NumDistanceCodes, this.IsUsed[4], b.IsUsed[4], false, stats, bitEntropy); if (cost > costThreshold) { return(false); } cost += ExtraCostCombined(this.Distance, b.Distance, WebpConstants.NumDistanceCodes); if (cost > costThreshold) { return(false); } return(true); }
private static void HistogramCombineEntropyBin( List <Vp8LHistogram> histograms, ushort[] clusters, ushort[] clusterMappings, Vp8LHistogram curCombo, ushort[] binMap, int numBins, double combineCostFactor) { var binInfo = new HistogramBinInfo[BinSize]; for (int idx = 0; idx < numBins; idx++) { binInfo[idx].First = -1; binInfo[idx].NumCombineFailures = 0; } // By default, a cluster matches itself. for (int idx = 0; idx < histograms.Count; idx++) { clusterMappings[idx] = (ushort)idx; } var indicesToRemove = new List <int>(); var stats = new Vp8LStreaks(); var bitsEntropy = new Vp8LBitEntropy(); for (int idx = 0; idx < histograms.Count; idx++) { if (histograms[idx] == null) { continue; } int binId = binMap[idx]; int first = binInfo[binId].First; if (first == -1) { binInfo[binId].First = (short)idx; } else { // Try to merge #idx into #first (both share the same binId) double bitCost = histograms[idx].BitCost; double bitCostThresh = -bitCost * combineCostFactor; double currCostDiff = histograms[first].AddEval(histograms[idx], stats, bitsEntropy, bitCostThresh, curCombo); if (currCostDiff < bitCostThresh) { // Try to merge two histograms only if the combo is a trivial one or // the two candidate histograms are already non-trivial. // For some images, 'tryCombine' turns out to be false for a lot of // histogram pairs. In that case, we fallback to combining // histograms as usual to avoid increasing the header size. bool tryCombine = curCombo.TrivialSymbol != NonTrivialSym || (histograms[idx].TrivialSymbol == NonTrivialSym && histograms[first].TrivialSymbol == NonTrivialSym); int maxCombineFailures = 32; if (tryCombine || binInfo[binId].NumCombineFailures >= maxCombineFailures) { // Move the (better) merged histogram to its final slot. Vp8LHistogram tmp = curCombo; curCombo = histograms[first]; histograms[first] = tmp; histograms[idx] = null; indicesToRemove.Add(idx); clusterMappings[clusters[idx]] = clusters[first]; } else { binInfo[binId].NumCombineFailures++; } } } } foreach (int index in indicesToRemove.OrderByDescending(i => i)) { histograms.RemoveAt(index); } }
public static void GetHistoImageSymbols(int xSize, int ySize, Vp8LBackwardRefs refs, int quality, int histoBits, int cacheBits, List <Vp8LHistogram> imageHisto, Vp8LHistogram tmpHisto, ushort[] histogramSymbols) { int histoXSize = histoBits > 0 ? LosslessUtils.SubSampleSize(xSize, histoBits) : 1; int histoYSize = histoBits > 0 ? LosslessUtils.SubSampleSize(ySize, histoBits) : 1; int imageHistoRawSize = histoXSize * histoYSize; int entropyCombineNumBins = BinSize; ushort[] mapTmp = new ushort[imageHistoRawSize]; ushort[] clusterMappings = new ushort[imageHistoRawSize]; var origHisto = new List <Vp8LHistogram>(imageHistoRawSize); for (int i = 0; i < imageHistoRawSize; i++) { origHisto.Add(new Vp8LHistogram(cacheBits)); } // Construct the histograms from the backward references. HistogramBuild(xSize, histoBits, refs, origHisto); // Copies the histograms and computes its bitCost. histogramSymbols is optimized. int numUsed = HistogramCopyAndAnalyze(origHisto, imageHisto, histogramSymbols); bool entropyCombine = numUsed > entropyCombineNumBins * 2 && quality < 100; if (entropyCombine) { ushort[] binMap = mapTmp; int numClusters = numUsed; double combineCostFactor = GetCombineCostFactor(imageHistoRawSize, quality); HistogramAnalyzeEntropyBin(imageHisto, binMap); // Collapse histograms with similar entropy. HistogramCombineEntropyBin(imageHisto, histogramSymbols, clusterMappings, tmpHisto, binMap, entropyCombineNumBins, combineCostFactor); OptimizeHistogramSymbols(clusterMappings, numClusters, mapTmp, histogramSymbols); } float x = quality / 100.0f; // Cubic ramp between 1 and MaxHistoGreedy: int thresholdSize = (int)(1 + (x * x * x * (MaxHistoGreedy - 1))); bool doGreedy = HistogramCombineStochastic(imageHisto, thresholdSize); if (doGreedy) { RemoveEmptyHistograms(imageHisto); HistogramCombineGreedy(imageHisto); } // Find the optimal map from original histograms to the final ones. RemoveEmptyHistograms(imageHisto); HistogramRemap(origHisto, imageHisto, histogramSymbols); }