public QuantizedAddLayerArgument Convert(QuantizedAdd layer, ConvertContext context) { var inputARange = context.Quantization.Distributions[layer.InputA.Connection.From].Global; var inputBRange = context.Quantization.Distributions[layer.InputB.Connection.From].Global; var outputRange = context.Quantization.Distributions[layer.Output].Global; (var sa, var ba) = inputARange.GetScaleBias(8); (var sb, var bb) = inputBRange.GetScaleBias(8); (var so, var bo) = outputRange.GetScaleBias(8); (var mulA, var shiftA) = Quantizer.ExtractValueAndShift(sb, 32, 32); (var mulB, var shiftB) = Quantizer.ExtractValueAndShift(sa, 32, 32); (var mulO, var shiftO) = Quantizer.ExtractValueAndShift(so / (sa * sb), 32, 32); return(new QuantizedAddLayerArgument { InputAOffset = (int)ba, InputAMul = (int)Math.Round(mulA), InputAShift = shiftA, InputBOffset = (int)bb, InputBMul = (int)Math.Round(mulB), InputBShift = shiftB, OutputOffset = (int)(-bo), OutputMul = (int)Math.Round(mulO), OutputShift = shiftO, Count = (uint)(layer.Output.Dimensions.GetSize()) }); }
public static void Quantize(ref PilotSettings serverVariables) { Object settingsObject = serverVariables; Object speedSettingsObject = serverVariables.speeds; var varibles = typeof(PilotSettings).GetFields(); for (int i = 0; i < varibles.Length; i++) { if (varibles[i].FieldType == typeof(float)) { var field = typeof(PilotSettings).GetField(varibles[i].Name); field.SetValue(settingsObject, Quantizer.Quantize((float)field.GetValue(settingsObject), 100)); } } var speedVaribles = typeof(PilotSettings.Speeds).GetFields(); for (int i = 0; i < speedVaribles.Length; i++) { if (speedVaribles[i].FieldType == typeof(float)) { var field = typeof(PilotSettings.Speeds).GetField(speedVaribles[i].Name); field.SetValue(speedSettingsObject, Quantizer.Quantize((float)field.GetValue(speedSettingsObject), 100)); } } serverVariables = (PilotSettings)settingsObject; serverVariables.speeds = (PilotSettings.Speeds)speedSettingsObject; }
/// <summary> /// Encodes the captures into a single packet and prepares for it's transmission /// </summary> /// <param name="id">Client Id</param> /// <param name="screenImg">Screenshot</param> /// <param name="logStream">Key & Mouse event log stream</param> /// <param name="isSingleCapture">Live stream or buffered stream</param> public void EncodeCapture(int id, Bitmap screenImg, Stream logStream, bool isSingleCapture) { Quantizer oq = this.GetQuantizer();//new OctreeQuantizer(this.CaptureQuantizePalette, this.CaptureQuantizeDepth); screenImg = oq.Quantize(ScreenSnap.ShrinkBitmap(screenImg, this.CaptureShrinkFactor)); this.EncodeCapture(id, new CapturePacket(ScreenSnap.SnapshotToStream(screenImg, System.Drawing.Imaging.ImageFormat.Png), logStream), isSingleCapture); }
public QuantizedMaxPool2dLayerArgument Convert(QuantizedMaxPool2d layer, ConvertContext context) { var inputRange = context.Quantization.Distributions[layer.Input.Connection.From].Global; var outputRange = context.Quantization.Distributions[layer.Output].Global; (var sa, var ba) = inputRange.GetScaleBias(8); (var so, var bo) = outputRange.GetScaleBias(8); (var mulO, var shiftO) = Quantizer.ExtractValueAndShift(so / sa, 32, 32); return(new QuantizedMaxPool2dLayerArgument { InputWidth = (uint)layer.Input.Dimensions[3], InputHeight = (uint)layer.Input.Dimensions[2], InputChannels = (uint)layer.Input.Dimensions[1], OutputWidth = (uint)layer.Output.Dimensions[3], OutputHeight = (uint)layer.Output.Dimensions[2], OutputChannels = (uint)layer.Output.Dimensions[1], KernelWidth = (uint)layer.FilterWidth, KernelHeight = (uint)layer.FilterHeight, StrideWidth = (uint)layer.StrideWidth, StrideHeight = (uint)layer.StrideHeight, PaddingWidth = (uint)Layer.GetPadding(layer.Input.Dimensions[3], layer.Output.Dimensions[3], layer.StrideWidth, 1, layer.FilterWidth), PaddingHeight = (uint)Layer.GetPadding(layer.Input.Dimensions[2], layer.Output.Dimensions[2], layer.StrideHeight, 1, layer.FilterHeight) }); }
private static (double scale, double bias) QuantizeInput(QuantizationRange range, K210ConvLayerConfig config) { (var scale, var bias) = range.GetScaleBias(8); (var mul, var shift) = Quantizer.ExtractValueAndShift(bias, 24, 15); config.ArgW = (int)Math.Round(mul); config.ShiftW = shift; return(scale, bias); }
/// <summary> /// Writes to the specified stream. /// </summary> /// <param name="writer">The writer.</param> /// <param name="animation">The animation.</param> /// <returns>True if it writes successfully, false otherwise.</returns> public override bool Write(BinaryWriter writer, Animation animation) { var Quantized = Quantizer.Quantize(animation[0], Quality); LoadAnimation(animation, Quantized); WriteToFile(writer); return(true); }
/// <summary> /// Writes to the specified stream. /// </summary> /// <param name="stream">The stream.</param> /// <param name="image">The image.</param> /// <returns>True if it writes successfully, false otherwise.</returns> public override bool Write(BinaryWriter stream, Image image) { var Quantized = Quantizer.Quantize(image, Quality); LoadImage(image, Quantized); WriteToFile(stream); return(true); }
/// <summary> Creates a ROIScaler object. The Quantizer is the source of data to /// scale. /// /// <p>The ROI Scaler creates a ROIMaskGenerator depending on what ROI /// information is in the ParameterList. If only rectangular ROI are used, /// the fast mask generator for rectangular ROI can be used.</p> /// /// </summary> /// <param name="src">The source of data to scale /// /// </param> /// <param name="pl">The parameter list (or options). /// /// </param> /// <param name="encSpec">The encoder specifications for addition of roi specs /// /// </param> /// <exception cref="IllegalArgumentException">If an error occurs while parsing /// the options in 'pl' /// /// </exception> public static ROIScaler createInstance(Quantizer src, ParameterList pl, EncoderSpecs encSpec) { System.Collections.ArrayList roiVector = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); ROIMaskGenerator maskGen = null; // Check parameters pl.checkList(OPT_PREFIX, CSJ2K.j2k.util.ParameterList.toNameArray(pinfo)); // Get parameters and check if there are and ROIs specified System.String roiopt = pl.getParameter("Rroi"); if (roiopt == null) { // No ROIs specified! Create ROIScaler with no mask generator return(new ROIScaler(src, null, false, -1, false, encSpec)); } // Check if the lowest resolution levels should belong to the ROI int sLev = pl.getIntParameter("Rstart_level"); // Check if the ROIs are block-aligned bool useBlockAligned = pl.getBooleanParameter("Ralign"); // Check if generic mask generation is specified bool onlyRect = !pl.getBooleanParameter("Rno_rect"); // Parse the ROIs parseROIs(roiopt, src.NumComps, roiVector); ROI[] roiArray = new ROI[roiVector.Count]; roiVector.CopyTo(roiArray); // If onlyRect has been forced, check if there are any non-rectangular // ROIs specified. Currently, only the presence of circular ROIs will // make this false if (onlyRect) { for (int i = roiArray.Length - 1; i >= 0; i--) { if (!roiArray[i].rect) { onlyRect = false; break; } } } if (onlyRect) { // It's possible to use the fast ROI mask generation when only // rectangular ROIs are specified. maskGen = new RectROIMaskGenerator(roiArray, src.NumComps); } else { // It's necessary to use the generic mask generation maskGen = new ArbROIMaskGenerator(roiArray, src.NumComps, src); } return(new ROIScaler(src, maskGen, true, sLev, useBlockAligned, encSpec)); }
private void writeDCDInfo(CLDHandler cldHandler, FileSystemWriter fsw) { foreach (KeyValuePair <string, Bitmap> image in images) { cldHandler.calcDescriptorInfo(image.Value); // write to file string[] pathParts = image.Key.Split('\\'); string imageNameAndExt = pathParts[pathParts.Length - 1]; string imageName = imageNameAndExt.Split('.').ElementAt(0); List <int[]> rgbList = cldHandler.RGBValues; Quantizer q = null; if (Reduce16PositionsCheck.Checked) { rgbList = cldHandler.getReduce64To16Cells(rgbList); } if (hsl_15_rb.Checked) { q = getQuantizer("HSV_QUANTIZER"); q.process_cld(Quantizer.BINS.BINS_15, rgbList); } else if (hsl_27_rb.Checked) { q = getQuantizer("HSV_QUANTIZER"); q.process_cld(Quantizer.BINS.BINS_27, rgbList); } else if (hsl_48_rb.Checked) { q = getQuantizer("HSV_QUANTIZER"); q.process_cld(Quantizer.BINS.BINS_48, rgbList); } else if (rgb_27_rb.Checked) { q = getQuantizer("COLOUR_27_QUANTIZER"); q.process_cld(Quantizer.BINS.BINS_27, rgbList); } else if (rgb_64_rb.Checked) { q = getQuantizer("COLOUR_27_QUANTIZER"); q.process_cld(Quantizer.BINS.BINS_64, rgbList); } else if (lab_x_rb.Checked) { q = getQuantizer("RGB_HISTOGRAM_QUANTIZER"); } fsw.writeFile(imageName, q.getList(Quantizer.FEATURE_VECTOR.POSITION)); } // close the connection fsw.closeFile(); MessageBox.Show("CLD DONE!"); }
public static (IEnumerable <int> indeces, IList <Color> palette) Quantize(Bitmap image, QuantizeImageSettings settings) { Setup(image, settings); var colors = Decompose(image); var indices = settings.Ditherer?.Process(colors) ?? settings.Quantizer.Process(colors); return(indices, settings.Quantizer.GetPalette()); }
public void CircleTest() { var circle = Circle(); var quantizer = new Quantizer(8); quantizer.trainCenteroids(circle); var sequence = quantizer.getObservationSequence(circle); }
public RequantizeLayerArgument Convert(Requantize layer, ConvertContext context) { var ir = context.Quantization.Distributions[layer.Input.Connection.From].Global; var or = context.Quantization.Distributions[layer.Output].Global; return(new RequantizeLayerArgument { Count = (uint)layer.Input.Dimensions.GetSize(), Table = Quantizer.GetRequantizeTable(ir, or) }); }
public static byte[] EncodeJPEG(Image jpgImage) { Tiler imgtiler; ForwCompTransf fctransf; ImgDataConverter converter; EncoderSpecs encSpec; ForwardWT dwt; Quantizer quant; ROIScaler rois; EntropyCoder ecoder; PostCompRateAllocator ralloc; HeaderEncoder headenc; CodestreamWriter bwriter; float rate = Single.MaxValue; ImgReaderGDI imgsrc = new ImgReaderGDI(jpgImage); imgtiler = new Tiler(imgsrc, 0, 0, 0, 0, jpgImage.Width, jpgImage.Height); int ntiles = imgtiler.getNumTiles(); encSpec = new EncoderSpecs(ntiles, 3, imgsrc, pl); fctransf = new ForwCompTransf(imgtiler, encSpec); converter = new ImgDataConverter(fctransf); dwt = ForwardWT.createInstance(converter, pl, encSpec); quant = Quantizer.createInstance(dwt, encSpec); rois = ROIScaler.createInstance(quant, pl, encSpec); ecoder = EntropyCoder.createInstance(rois, pl, encSpec.cblks, encSpec.pss, encSpec.bms, encSpec.mqrs, encSpec.rts, encSpec.css, encSpec.sss, encSpec.lcs, encSpec.tts); using (MemoryStream stream = new MemoryStream()) { bwriter = new FileCodestreamWriter(stream, Int32.MaxValue); ralloc = PostCompRateAllocator.createInstance(ecoder, pl, rate, bwriter, encSpec); headenc = new HeaderEncoder(imgsrc, new bool[3], dwt, imgtiler, encSpec, rois, ralloc, pl); ralloc.HeaderEncoder = headenc; headenc.encodeMainHeader(); ralloc.initialize(); headenc.reset(); headenc.encodeMainHeader(); bwriter.commitBitstreamHeader(headenc); ralloc.runAndWrite(); bwriter.close(); return(stream.ToArray()); } }
public void GenerateImagesForDoc() { //var testData0500 = TestDataFactory.Data.Single(x => x.ImageKey == "0500"); //var testData0413 = TestDataFactory.Data.Single(x => x.ImageKey == "0413"); var image = new Mat(@"C:\Users\Winkler\Desktop\orig.png", LoadImageType.AnyColor); var keypoints = new List <Point> { new Point(140, 116), new Point(477, 120), new Point(163, 370), new Point(447, 369) }; var testData0500 = new { Keypoints = keypoints, Image = image }; var configMock = TestHelper.GetFakeConfig(); //var quantizer = new Quantizer(configMock.Object); // simple threshold IQuantizer quantizer = new SimpleThresholdQuantizer { Threshold = 220 }; quantizer.Keypoints = testData0500.Keypoints; var quantizedImage = quantizer.Quantize(testData0500.Image); //TestHelper.Show(quantizedImage); TestHelper.Save(quantizedImage, "threshold_simple.png"); // adaptive threshold quantizer = new Quantizer(configMock.Object) { ThresholdBlockSize = 17, ThresholdConstant = 6 }; quantizer.Keypoints = testData0500.Keypoints; quantizedImage = quantizer.Quantize(testData0500.Image); //TestHelper.Show(quantizedImage); TestHelper.Save(quantizedImage, "threshold_adaptive.png"); // warp quantizer = new WarpOnlyQuantizer(); quantizer.Keypoints = testData0500.Keypoints; quantizedImage = quantizer.Quantize(testData0500.Image); //TestHelper.Show(quantizedImage); TestHelper.Save(quantizedImage, "warp_result.png"); // morphological quantizer = new MorphologyQuantizer(configMock.Object) { ThresholdBlockSize = 17, ThresholdConstant = 6 }; quantizer.Keypoints = testData0500.Keypoints; quantizedImage = quantizer.Quantize(testData0500.Image); //TestHelper.Show(quantizedImage); TestHelper.Save(quantizedImage, "opening_result.png"); }
public void TestQuantize() { Quantizer q = new Quantizer(0.0, 0.1, 0.3); Assert.AreEqual(0, q.Quantize(-0.1)); Assert.AreEqual(1, q.Quantize(0.0)); Assert.AreEqual(1, q.Quantize(0.03)); Assert.AreEqual(2, q.Quantize(0.1)); Assert.AreEqual(2, q.Quantize(0.13)); Assert.AreEqual(3, q.Quantize(0.3)); Assert.AreEqual(3, q.Quantize(0.33)); Assert.AreEqual(3, q.Quantize(1000.0)); }
public static (double[] scale, double bias) QuantizeWeights(bool isConv2d, Tensor <float> weights, K210ConvLayerConfig config, int weightsBits) { #if CHANNEL_WISE var kernels = weights.ToDenseTensor().Buffer.Span; var channels = weights.Dimensions[isConv2d ? 0 : 1]; var channelSize = weights.Dimensions.GetSize() / channels; var totalRange = Quantizer.GetRange(kernels); var scales = new double[channels]; for (int i = 0; i < channels; i++) { double s; var buffer = kernels.Slice(i * channelSize, channelSize); var range = Quantizer.GetRange(buffer); var s1 = totalRange.Max / range.Max; var s2 = totalRange.Min / range.Min; s = (s1 < 0 || s2 < 0) ? Math.Max(s1, s2) : Math.Min(s1, s2); Debug.Assert(s > 0); for (int j = 0; j < buffer.Length; j++) { buffer[j] = (float)(buffer[j] * s); } scales[i] = s; } (var scale, var bias) = Quantizer.GetRange(kernels).GetScaleBias(weightsBits); (var mul, var shift) = Quantizer.ExtractValueAndShift(bias, 24, 15); config.Weights = Quantizer.Quantize(kernels, scale, bias, weightsBits); config.ArgX = (int)Math.Round(mul); config.ShiftX = shift; for (int i = 0; i < scales.Length; i++) { scales[i] *= scale; } return(scales, bias); #else var buffer = weights.ToDenseTensor().Buffer.Span; (var scale, var bias) = GetRange(buffer).GetScaleBias(); (var mul, var shift) = ExtractValueAndShift(bias, 24, 15); config.Weights = Quantize(buffer, scale, bias); config.ArgX = (int)Math.Round(mul); config.ShiftX = shift; return(Enumerable.Repeat(scale, weights.Dimensions[0]).ToArray(), bias); #endif }
/// <summary> Constructor of the ROI scaler, takes a Quantizer as source of data to /// scale. /// /// </summary> /// <param name="src">The quantizer that is the source of data. /// /// </param> /// <param name="mg">The mask generator that will be used for all components /// /// </param> /// <param name="roi">Flag indicating whether there are rois specified. /// /// </param> /// <param name="sLev">The resolution levels that belong entirely to ROI /// /// </param> /// <param name="uba">Flag indicating whether block aligning is used. /// /// </param> /// <param name="encSpec">The encoder specifications for addition of roi specs /// /// </param> public ROIScaler(Quantizer src, ROIMaskGenerator mg, bool roi, int sLev, bool uba, EncoderSpecs encSpec) : base(src) { this.src = src; this.roi = roi; this.useStartLevel = sLev; if (roi) { // If there is no ROI, no need to do this this.mg = mg; roiMask = new DataBlkInt(); calcMaxMagBits(encSpec); blockAligned = uba; } }
public static void TestDCT_Quantize() { Dct dct = new Dct(); print("DCT Test"); var d = dct.Go(testdata); print("Quantized dct"); Quantizer.QuantizeLuminance(ref d); d.PrintArray(); print("Quantized Inverse"); Quantizer.InverseQuantizeLuminance(ref d); print("Dct Inverse"); dct.GoBack(d).PrintArray(); }
public void PngCore() { using (MemoryStream memoryStream = new MemoryStream()) { Quantizer <Rgba32> quantizer = this.UseOctreeQuantizer ? (Quantizer <Rgba32>) new OctreeQuantizer <Rgba32>() : new PaletteQuantizer <Rgba32>(); PngEncoderOptions options = new PngEncoderOptions() { Quantizer = quantizer }; this.bmpCore.SaveAsPng(memoryStream, options); } }
public static IQuantizer GetInstance(this Quantizer mode) { if (mode == Quantizer.Octree) { return(new OctreeQuantizer()); } if (mode == Quantizer.Palette) { return(new PaletteQuantizer()); } if (mode == Quantizer.Wu) { return(new WuQuantizer()); } throw new NotImplementedException(); }
/// <summary> /// Loads the animation. /// </summary> /// <param name="animation">The animation.</param> /// <param name="quantizedImage">The quantized image.</param> private void LoadAnimation(Animation animation, QuantizedImage quantizedImage) { var TempImage = animation[0]; var TransparencyIndex = quantizedImage.TransparentIndex; Header = new FileHeader(); ScreenDescriptor = new LogicalScreenDescriptor(TempImage, TransparencyIndex, BitDepth); Frames.Add(new Frame(TempImage, quantizedImage, BitDepth, animation.Delay)); if (animation.Count > 1) { AppExtension = new ApplicationExtension(animation.RepeatCount, animation.Count); for (int x = 1; x < animation.Count; ++x) { quantizedImage = Quantizer.Quantize(animation[x], Quality); TempImage = animation[x]; Frames.Add(new Frame(TempImage, quantizedImage, BitDepth, animation.Delay)); } } }
private void ButtonLoadImage_Click(object sender, EventArgs e) { if (OpenFileDialogImageLoader.ShowDialog() == DialogResult.OK) { ButtonSaveImage.Enabled = false; Quantizer quantizer = GetQuantizer(); IDitherer ditherer = GetDitherer(); ProgressBarQuantization.Value = 0; ImagePath = OpenFileDialogImageLoader.FileName; LabelPath.Text = ImagePath; var image = new Bitmap(ImagePath); PictureBoxLoadedImage.Image = image; imageStore = new ImageStore(image, quantizer, ditherer); drawer = GetDrawer(imageStore); imageStore.InitFinished += AfterInit; drawer.ProgressUpdate += ProgressUpdate; } }
public static Image Quantize(Image image, Quality quality = Quality.Bpp8) { if (quality == Quality.Inherit) { return(image); } Quantizer quantizer = quality switch { Quality.Grayscale => new GrayscaleQuantizer(), Quality.Bpp1 => new OctreeQuantizer(1, 1), Quality.Bpp2 => new OctreeQuantizer(3, 2), Quality.Bpp4 => new OctreeQuantizer(15, 4), Quality.Bpp8 => new OctreeQuantizer(255, 8), _ => new OctreeQuantizer(255, 8) }; return(quantizer.Quantize(image)); } }
public static void CreateGifStream(Image image, Stream stream, Quality quality = Quality.Bpp8) { if (quality == Quality.Inherit) { image.Save(stream, ImageFormat.Gif); } else { Quantizer quantizer = quality switch { Quality.Grayscale => new GrayscaleQuantizer(), Quality.Bpp1 => new OctreeQuantizer(1, 1), Quality.Bpp2 => new OctreeQuantizer(3, 2), Quality.Bpp4 => new OctreeQuantizer(15, 4), Quality.Bpp8 => new OctreeQuantizer(255, 8), _ => new OctreeQuantizer(255, 8) }; using (Bitmap result = quantizer.Quantize(image)) result.Save(stream, ImageFormat.Gif); } }
/// <summary> /// See <see cref="BaseColorDitherer.OnProcessPixel"/> for more details. /// </summary> protected override Boolean OnProcessPixel(Pixel sourcePixel, Pixel targetPixel) { // reads the source pixel Color oldColor = SourceBuffer.GetColorFromPixel(sourcePixel); // converts alpha to solid color oldColor = QuantizationHelper.ConvertAlpha(oldColor); // retrieves matrix coordinates Int32 x = targetPixel.X % MatrixWidth; Int32 y = targetPixel.Y % MatrixHeight; // determines the threshold Int32 threshold = Convert.ToInt32(CachedMatrix[x, y]); // only process dithering if threshold is substantial if (threshold > 0) { Int32 red = GetClampedColorElement(oldColor.R + threshold); Int32 green = GetClampedColorElement(oldColor.G + threshold); Int32 blue = GetClampedColorElement(oldColor.B + threshold); Color newColor = Color.FromArgb(255, red, green, blue); if (TargetBuffer.IsIndexed) { Byte newPixelIndex = (Byte)Quantizer.GetPaletteIndex(newColor, targetPixel.X, targetPixel.Y); targetPixel.Index = newPixelIndex; } else { targetPixel.Color = newColor; } } // writes the process pixel return(true); }
public Classifier(Filter filter, Quantizer quantizer) { m_filter = filter; m_quantizer = quantizer; }
public async void InitPlot( ) { var reader = new CsvGridReader( 1024, ';' ); OpenFileDialog openFileDialog = new OpenFileDialog { Filter = "Text files|*.csv", ValidateNames = true }; var column = 0; var fileName = openFileDialog.ShowDialog( ) == true ? openFileDialog.FileName : null; if ( fileName == null ) { return; } var grid = await Task<IGrid>.Factory.StartNew( ( ) => reader.Read( fileName, false, false ) ); double left = double.MaxValue, right = double.MinValue; for ( int i = 0; i < grid.RowCount; ++i ) { var value = grid.GetValue( i, column ); left = left < value ? left : value; right = right > value ? right : value; } var quantizer = new Quantizer( left, right ); var empirical = new EmpiricalDistribution( grid, column ); var q = await Task<IQuantization>.Factory.StartNew( ( ) => quantizer.Quantize( 15, 1e-3, empirical ) ); var zero = new LineSeries { Color = OxyColor.FromRgb( 0, 0, 0 ), StrokeThickness = 1 }; zero.Points.Add( new DataPoint( left, 0 ) ); zero.Points.Add( new DataPoint( right, 0 ) ); plot.Series.Add( zero ); var func = new FunctionSeries( x => empirical.Density( x ), left, right, 1e-2 ); plot.Series.Add( func ); foreach ( var border in q.Borders ) { var line = new LineSeries { LineStyle = LineStyle.Dash, Color = OxyColor.FromRgb( 0, 0, 0 ), StrokeThickness = 1 }; line.Points.Add( new DataPoint( border, 3e-1 ) ); line.Points.Add( new DataPoint( border, -3e-2 ) ); plot.Series.Add( line ); } foreach ( var code in q.Codes ) { var line = new LineSeries { LineStyle = LineStyle.Dash, Color = OxyColor.FromRgb( 140, 140, 140 ), StrokeThickness = 0.5 }; line.Points.Add( new DataPoint( code, 3e-1 ) ); line.Points.Add( new DataPoint( code, -3e-2 ) ); plot.Series.Add( line ); } var codes = from code in q.Codes select new ScatterPoint( code, empirical.Density( code ) ); var points = new ScatterSeries { MarkerType = MarkerType.Circle, MarkerStroke = OxyColor.FromRgb( 2, 133, 230 )/*( 255, 0, 0 )*/, MarkerFill = OxyColor.FromRgb( 2, 133, 230 )/*( 255, 115, 41 )*/ }; points.Points.AddRange( codes ); plot.Series.Add( points ); PlotView.Model = plot; }
private static void QuantizeActivation(K210Conv2d layer, double postMul, QuantizationRange range, QuantizationRange beforeActRange, K210ConvLayerConfig config) { if (layer.NonTrivialActivation == null) { switch (layer.FusedActivationFunction) { case ActivationFunctionType.Linear: case ActivationFunctionType.Relu: case ActivationFunctionType.Relu6: break; default: throw new NotSupportedException($"Activation of {layer.FusedActivationFunction} is not supported."); } var starts = new ulong[] { 0x800000000, 0xf7d4cf4b8, 0xf8ed5a20c, 0xfa05e4f60, 0xfb2e05baa, 0xfc46908fe, 0xfd5f1b652, 0xfe77a63a6, 0xff9fc6ff0, 0xfffd4a9b7, 0, 0x7FFFFFFF0, 0x7FFFFFFF1, 0x7FFFFFFF2, 0x7FFFFFFF3, 0x7FFFFFFF4 }; for (int i = 0; i < starts.Length; i++) { var param = config.ActConfigs[i] = new K210LayerActConfig(); param.StartX = starts[i]; if (i == 10) { (var mul, var shift) = Quantizer.ExtractValueAndShift(1 / postMul, 16, 20); param.Mul = (int)Math.Round(mul); param.Shift = shift; } } } else if (layer.NonTrivialActivation is LeakyRelu leakyRelu) { (var scale, var bias) = range.GetScaleBias(8); var zero = (long)(Quantizer.Quantize(0, scale, bias) * postMul); var yTable = Generator.IntegerStep(0, (int)-bias, 15).Take(14).ToArray(); for (int i = 0; i < 16; i++) { var param = config.ActConfigs[i] = new K210LayerActConfig(); if (i == 0) { param.StartX = 0x800000000; } else if (i == 15) { (var mul, var shift) = Quantizer.ExtractValueAndShift(1 / postMul, 16, 20); param.StartX = (ulong)zero; param.Mul = (int)Math.Round(mul); param.Shift = shift; param.Add = (byte)(-bias); } else { // f(x) = (1 - slope) * zero + x * slope // f(x1) - f(x0) = (x1 - x0) * slope // x0 = zero - (zero - y0) / slope var add = (byte)yTable[i - 1]; var y0 = add * postMul; var x0 = zero - (zero - y0) / leakyRelu.Slope; (var mul, var shift) = Quantizer.ExtractValueAndShift(1 / postMul * leakyRelu.Slope, 16, 20); param.StartX = (ulong)(long)Math.Floor(x0); param.Mul = (int)Math.Round(mul); param.Shift = shift; param.Add = add; } } } else { throw new NotSupportedException($"Activation of {layer.NonTrivialActivation.GetType().Name} is not supported."); } }
private CompressionStats Compress( IGrid grid, ICompressor compressor, double[] errors, string outName, ProgressViewModel progressBar ) { double[] leftBorders = new double[grid.ColumnCount]; double[] rightBorders = new double[grid.ColumnCount]; var qs = new IQuantization[grid.ColumnCount]; var distrs = new IDistribution[grid.ColumnCount]; progressBar.Status = "Quantizing columns..."; Parallel.For( 0, grid.ColumnCount, column => { var distr = new EmpiricalDistribution( grid, column ); leftBorders[column] = double.MaxValue; rightBorders[column] = double.MinValue; for ( int row = 0; row < grid.RowCount; ++row ) { double value = grid.GetValue( row, column ); leftBorders[column] = leftBorders[column] < value ? leftBorders[column] : value; rightBorders[column] = rightBorders[column] > value ? rightBorders[column] : value; } var quantizer = new Quantizer( leftBorders[column], rightBorders[column] ); var quantization = quantizer.Quantize( errors[column], distr ); lock ( _lockGuard ) { progressBar.Progress += 1.0 / ( grid.ColumnCount + 1 ); distrs[column] = distr; qs[column] = quantization; } } ); var quantizations = new List<IQuantization>( qs ); var distributions = new List<IDistribution>( distrs ); progressBar.Status = "Writing archive..."; progressBar.Progress = ( double )grid.ColumnCount / ( grid.ColumnCount + 1 ); ICompressionResult result; using ( var stream = new FileOutputStream( outName ) ) { result = compressor.Compress( grid, quantizations, stream ); } progressBar.Progress = 1.0; progressBar.TryClose( ); return new CompressionStats { CompressionResult = result, Distributions = distributions, LeftBorders = leftBorders, RightBorders = rightBorders, Quantizations = quantizations }; }
/// <summary> The constructor of the arbitrary mask generator /// /// </summary> /// <param name="rois">The ROI info. /// /// </param> /// <param name="nrc">The number of components /// /// </param> /// <param name="src">The quantizer module /// /// </param> public ArbROIMaskGenerator(ROI[] rois, int nrc, Quantizer src) : base(rois, nrc) { roiMask = new int[nrc][]; this.src = src; }
private void ReadPixels(byte[] pixels) { if (QuantizationType == ColorQuantizationType.Neural) { #region Neural if (GlobalQuantizer == null || !UseGlobalColorTable) { GlobalQuantizer = new NeuralQuantizer(SamplingFactor, MaximumNumberColor) { MaxColors = MaximumNumberColor, TransparentColor = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null }; GlobalQuantizer.FirstPass(pixels); ColorTable = GlobalQuantizer.GetPalette(); } //Indexes the pixels to the color table. IndexedPixels = GlobalQuantizer.SecondPass(pixels); #endregion } else if (QuantizationType == ColorQuantizationType.Octree) { #region Octree var quantizer = new OctreeQuantizer { MaxColors = MaximumNumberColor, TransparentColor = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null }; IndexedPixels = quantizer.Quantize(pixels); ColorTable = quantizer.ColorTable; #endregion } else if (QuantizationType == ColorQuantizationType.MedianCut) { #region Median cut if (GlobalQuantizer == null || !UseGlobalColorTable) { GlobalQuantizer = new MedianCutQuantizer { MaxColors = MaximumNumberColor, TransparentColor = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null }; GlobalQuantizer.FirstPass(pixels); ColorTable = GlobalQuantizer.GetPalette(); } //Indexes the pixels to the color table. IndexedPixels = GlobalQuantizer.SecondPass(pixels); #endregion } else if (QuantizationType == ColorQuantizationType.Grayscale) { #region Grayscale //This quantizer uses a fixed palette (generated during object instantiation), so most calculations are called one time. if (GlobalQuantizer == null) { //Since the color table does not change among frames, it can be stored globally. UseGlobalColorTable = true; var transparent = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null; GlobalQuantizer = new GrayscaleQuantizer(transparent, MaximumNumberColor) { MaxColors = MaximumNumberColor, TransparentColor = transparent }; ColorTable = GlobalQuantizer.GetPalette(); } //Each frame still needs to be quantized. IndexedPixels = GlobalQuantizer.SecondPass(pixels); #endregion } else if (QuantizationType == ColorQuantizationType.MostUsed) { #region Most used colors if (GlobalQuantizer == null || !UseGlobalColorTable) { GlobalQuantizer = new MostUsedQuantizer { MaxColors = MaximumNumberColor, TransparentColor = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null }; GlobalQuantizer.FirstPass(pixels); ColorTable = GlobalQuantizer.GetPalette(); } //Indexes the pixels to the color table. IndexedPixels = GlobalQuantizer.SecondPass(pixels); #endregion } else { #region Palette //This quantizer uses a fixed palette (generated during object instantiation), so it will be only called once. if (GlobalQuantizer == null) { //Since the color table does not change among frames, it can be stored globally. UseGlobalColorTable = true; var transparent = !IsFirstFrame || UseGlobalColorTable || UseFullTransparency ? TransparentColor : null; //TODO: Pass the palette. //Default palettes: Windows, etc. //User submitted > Presets > Generate palette based on first frame. GlobalQuantizer = new PaletteQuantizer(new ArrayList()) { MaxColors = MaximumNumberColor, TransparentColor = transparent }; ColorTable = GlobalQuantizer.GetPalette(); } //Each frame still needs to be quantized. IndexedPixels = GlobalQuantizer.SecondPass(pixels); #endregion } //I need to signal the other method that I'll need transparency. ColorTableHasTransparency = TransparentColor.HasValue && ColorTable.Contains(TransparentColor.Value); }
private static void QuantizeBiasAndOutput(K210Conv2d layer, Tensor <float> bias, ChannelwiseRange range, ChannelwiseRange beforeActRange, double[] scale, K210ConvLayerConfig config) { var upshift = 10; var postMul = Math.Pow(2, upshift); if (layer.IsChannelwiseOutput) { for (int i = 0; i < config.BNConfigs.Length; i++) { (var so, var bo) = range.Channels[i].GetScaleBias(8); var b = bias[i]; var scomb = so * postMul / scale[i]; (var mul, var shift) = Quantizer.ExtractValueAndShift(scomb, 22, 15); config.BNConfigs[i] = new K210LayerBNConfig { Mul = (int)Math.Round(mul), Shift = shift, Add = (int)Math.Round((b * so - bo) * postMul) }; } } else { (var so, var bo) = range.Global.GetScaleBias(8); #if CHANNEL_WISE for (int i = 0; i < config.BNConfigs.Length; i++) { var b = bias[i]; var scomb = so * postMul / scale[i]; (var mul, var shift) = Quantizer.ExtractValueAndShift(scomb, 22, 15); config.BNConfigs[i] = new K210LayerBNConfig { Mul = (int)Math.Round(mul), Shift = shift, Add = (int)Math.Round((b * so - bo) * postMul) }; } #else var scomb = so / scale[0]; (var mul, var shift) = ExtractValueAndShift(scomb, 22, 255); var upscale = shift - 15; Debug.Assert(upscale >= 0); var postMul = Math.Round(mul) / mul * Math.Pow(2, upscale); for (int i = 0; i < config.BNConfigs.Length; i++) { var b = bias[i]; config.BNConfigs[i] = new K210LayerBNConfig { Mul = (int)Math.Round(mul), Shift = 15, Add = (int)Math.Round((b * so - bo) * postMul) }; } #endif } QuantizeActivation(layer, postMul, range.Global, beforeActRange.Global, config); }