// This allows us to influence the way in which libpng chooses the "best" // filter for the current scanline. While the "minimum-sum-of-absolute- // differences metric is relatively fast and effective, there is some // question as to whether it can be improved upon by trying to keep the // filtered data going to zlib more consistent, hopefully resulting in // better compression. public void png_set_filter_heuristics(PNG_FILTER_HEURISTIC heuristic_method, int num_weights, double[] filter_weights, double[] filter_costs) { if(heuristic_method>=PNG_FILTER_HEURISTIC.LAST) { Debug.WriteLine("Unknown filter heuristic method"); return; } if(heuristic_method==PNG_FILTER_HEURISTIC.DEFAULT) heuristic_method=PNG_FILTER_HEURISTIC.UNWEIGHTED; if(num_weights<0||filter_weights==null||heuristic_method==PNG_FILTER_HEURISTIC.UNWEIGHTED) num_weights=0; num_prev_filters=(byte)num_weights; this.heuristic_method=heuristic_method; if(num_weights>0) { if(prev_filters==null) { prev_filters=new byte[num_weights]; // To make sure that the weighting starts out fairly for(int i=0; i<num_weights; i++) prev_filters[i]=255; } if(this.filter_weights==null) { this.filter_weights=new ushort[num_weights]; inv_filter_weights=new ushort[num_weights]; for(int i=0; i<num_weights; i++) inv_filter_weights[i]=this.filter_weights[i]=PNG.WEIGHT_FACTOR; } for(int i=0; i<num_weights; i++) { if(filter_weights[i]<0.0) { inv_filter_weights[i]=this.filter_weights[i]=PNG.WEIGHT_FACTOR; } else { inv_filter_weights[i]=(ushort)(PNG.WEIGHT_FACTOR*filter_weights[i]+0.5); this.filter_weights[i]=(ushort)(PNG.WEIGHT_FACTOR/filter_weights[i]+0.5); } } } // If, in the future, there are other filter methods, this would // need to be based on png_ptr->filter. if(this.filter_costs==null) { this.filter_costs=new ushort[(int)PNG_FILTER_VALUE.LAST]; inv_filter_costs=new ushort[(int)PNG_FILTER_VALUE.LAST]; for(int i=0; i<(int)PNG_FILTER_VALUE.LAST; i++) inv_filter_costs[i]=this.filter_costs[i]=PNG.COST_FACTOR; } // Here is where we set the relative costs of the different filters. We // should take the desired compression level into account when setting // the costs, so that Paeth, for instance, has a high relative cost at low // compression levels, while it has a lower relative cost at higher // compression settings. The filter types are in order of increasing // relative cost, so it would be possible to do this with an algorithm. for(int i=0; i<(int)PNG_FILTER_VALUE.LAST; i++) { if(filter_costs==null||filter_costs[i]<0.0) { inv_filter_costs[i]=this.filter_costs[i]=PNG.COST_FACTOR; } else if(filter_costs[i]>=1.0) { inv_filter_costs[i]=(ushort)(PNG.COST_FACTOR/filter_costs[i]+0.5); this.filter_costs[i]=(ushort)(PNG.COST_FACTOR*filter_costs[i]+0.5); } } }
// This allows us to influence the way in which libpng chooses the "best" // filter for the current scanline. While the "minimum-sum-of-absolute- // differences metric is relatively fast and effective, there is some // question as to whether it can be improved upon by trying to keep the // filtered data going to zlib more consistent, hopefully resulting in // better compression. public void png_set_filter_heuristics(PNG_FILTER_HEURISTIC heuristic_method, int num_weights, double[] filter_weights, double[] filter_costs) { if (heuristic_method >= PNG_FILTER_HEURISTIC.LAST) { Debug.WriteLine("Unknown filter heuristic method"); return; } if (heuristic_method == PNG_FILTER_HEURISTIC.DEFAULT) { heuristic_method = PNG_FILTER_HEURISTIC.UNWEIGHTED; } if (num_weights < 0 || filter_weights == null || heuristic_method == PNG_FILTER_HEURISTIC.UNWEIGHTED) { num_weights = 0; } num_prev_filters = (byte)num_weights; this.heuristic_method = heuristic_method; if (num_weights > 0) { if (prev_filters == null) { prev_filters = new byte[num_weights]; // To make sure that the weighting starts out fairly for (int i = 0; i < num_weights; i++) { prev_filters[i] = 255; } } if (this.filter_weights == null) { this.filter_weights = new ushort[num_weights]; inv_filter_weights = new ushort[num_weights]; for (int i = 0; i < num_weights; i++) { inv_filter_weights[i] = this.filter_weights[i] = PNG.WEIGHT_FACTOR; } } for (int i = 0; i < num_weights; i++) { if (filter_weights[i] < 0.0) { inv_filter_weights[i] = this.filter_weights[i] = PNG.WEIGHT_FACTOR; } else { inv_filter_weights[i] = (ushort)(PNG.WEIGHT_FACTOR * filter_weights[i] + 0.5); this.filter_weights[i] = (ushort)(PNG.WEIGHT_FACTOR / filter_weights[i] + 0.5); } } } // If, in the future, there are other filter methods, this would // need to be based on png_ptr->filter. if (this.filter_costs == null) { this.filter_costs = new ushort[(int)PNG_FILTER_VALUE.LAST]; inv_filter_costs = new ushort[(int)PNG_FILTER_VALUE.LAST]; for (int i = 0; i < (int)PNG_FILTER_VALUE.LAST; i++) { inv_filter_costs[i] = this.filter_costs[i] = PNG.COST_FACTOR; } } // Here is where we set the relative costs of the different filters. We // should take the desired compression level into account when setting // the costs, so that Paeth, for instance, has a high relative cost at low // compression levels, while it has a lower relative cost at higher // compression settings. The filter types are in order of increasing // relative cost, so it would be possible to do this with an algorithm. for (int i = 0; i < (int)PNG_FILTER_VALUE.LAST; i++) { if (filter_costs == null || filter_costs[i] < 0.0) { inv_filter_costs[i] = this.filter_costs[i] = PNG.COST_FACTOR; } else if (filter_costs[i] >= 1.0) { inv_filter_costs[i] = (ushort)(PNG.COST_FACTOR / filter_costs[i] + 0.5); this.filter_costs[i] = (ushort)(PNG.COST_FACTOR * filter_costs[i] + 0.5); } } }