Esempio n. 1
0
        public static UMat getSharpened(UMat img, double sigma, double amount)
        {
            /// <summary>
            /// Apply sharpening to the given UMat.
            /// </summary>
            /// <param name="img">The src UMat.</param>
            /// <param name="sigma">Sigma value for shrpening.</param>
            /// <param name="amount">Amount of sharpening required.</param>
            /// <returns>
            /// Sharpened UMat image.
            /// </returns>
            /// <remarks>
            /// Used for image sharpening.
            /// </remarks>
            ///


            UMat dblImg     = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat dblBlurImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg     = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            int k = 2 * (int)Math.Round(3.0 * sigma) + 1;

            CvInvoke.GaussianBlur(dblImg, dblBlurImg, new Size(k, k), sigma, sigma);
            CvInvoke.AddWeighted(dblImg, 1.0 + amount, dblBlurImg, -amount, 0, outImg);
            dblImg.Dispose();
            dblBlurImg.Dispose();
            img.Dispose();
            return(outImg);
        }
        public VectorOfUMat ForwardFt(UMat img)
        {
            ///outputs quadrant-rearranged {magnitude, phase} UMat array
            ///FT stuff, reference: https://docs.opencv.org/master/d8/d01/tutorial_discrete_fourier_transform.html
            //convert image to 32bit float because spacial frequency domain values are way bigger than spatial domain values.
            img.ConvertTo(img, DepthType.Cv32F);

            //create imaginary image of zeros of same depthType and size as image representing real plane
            UMat zeros = new UMat(img.Size, img.Depth, 1);

            zeros.SetTo(new MCvScalar(0));

            //Dft accepts 2-channel images, so we use Merge to merge our 2 1-channel images into a single 2-channel image.
            //Merge accepts object arrays, so we create a VectorOfUMat of our 2 images to feed into Merge.
            VectorOfUMat vec = new VectorOfUMat(img, zeros);            //img will be at 0 index of vector

            using (UMat cImg = new UMat()) {
                CvInvoke.Merge(vec, cImg);
                zeros.Dispose();                              // TODO: fix this bad programming and other instances of it.
                CvInvoke.Dft(cImg, cImg, DxtType.Forward, 0); //use back the same image memory
                SwitchQuadrants(cImg);
                CvInvoke.Split(cImg, vec);
            }

            //make the 2-channel array into 2 1-channel arrays
            return(vec);            //[0] index contains the real values and [1] index the complex values
        }
Esempio n. 3
0
        public static UMat getContrastAdjusted(ref UMat img, double cont1, double cont2)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, cont1, dblImg, 0, cont1 * (-128) + cont2 + 128, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
Esempio n. 4
0
        public static UMat getExposureCorrected(ref UMat img, double ev)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, ev, dblImg, 0, 0, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
Esempio n. 5
0
 private void Button3_Click(object sender, EventArgs e)
 {
     imgGray = img.Convert <Gray, Byte>();
     //Converted to blurred
     CvInvoke.GaussianBlur(imgGray, imgGray, new Size(5, 5), 0);
     // using adaptive threshhold
     CvInvoke.AdaptiveThreshold(imgGray, imgGray, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.BinaryInv, 75, 10);
     CvInvoke.Canny(imgGray, cannyImage, 75, 200);
     cannyImage.ConvertTo(imgGray, Emgu.CV.CvEnum.DepthType.Default, -1, 0);
     Emgu.CV.Util.VectorOfVectorOfPoint vector = new Emgu.CV.Util.VectorOfVectorOfPoint();
     CvInvoke.FindContours(cannyImage, vector, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
     CvInvoke.DrawContours(img, vector, -1, new MCvScalar(240, 0, 159), 3);
     MessageBox.Show("Question Part Detected");
     sheetDetectImage.Image = img;
 }
Esempio n. 6
0
        public static UMat getColorAdjusted(ref UMat img, double redshift, double greenshift, double blueshift)
        {
            double shift;
            UMat   dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            var colors = new VectorOfUMat(3);

            CvInvoke.Split(img, colors);
            shift = (1 + redshift) > 0 ? (1 + redshift) : 0;
            CvInvoke.AddWeighted(colors[2], shift, colors[2], 0, 0, colors[2]);
            shift = (1 + greenshift) > 0 ? (1 + greenshift) : 0;
            CvInvoke.AddWeighted(colors[1], shift, colors[1], 0, 0, colors[1]);
            shift = (1 + blueshift) > 0 ? (1 + blueshift) : 0;
            CvInvoke.AddWeighted(colors[0], shift, colors[0], 0, 0, colors[0]);
            CvInvoke.Merge(colors, dblImg);
            img.Dispose();
            return(dblImg);
        }
Esempio n. 7
0
        public static UMat getN1MeanFiltered(ref UMat img, float h, float hcolor)
        {
            /// <summary>
            /// Apply N1 Mean filtering to the given UMat.
            /// </summary>
            /// <param name="img">The src UMat.</param>
            /// <param name="h">Strength parameter.</param>
            /// <param name="hcolor">Color strength parameter.</param>
            /// <returns>
            /// Filtered UMat image.
            /// </returns>
            /// <remarks>
            /// Used for image filtering using N1 mean filtering.
            /// </remarks>
            UMat tmp = new UMat();

            img.ConvertTo(tmp, DepthType.Cv8U);
            UMat outImg = new UMat(tmp.Rows, tmp.Cols, tmp.Depth, tmp.NumberOfChannels);

            CvInvoke.FastNlMeansDenoisingColored(tmp, outImg, h, hcolor);
            img.Dispose();
            tmp.Dispose();
            return(outImg);
        }
Esempio n. 8
0
        public void ProcessAddPhDiffNInverseT(
            double rDist,
            double sensorPxWidth,
            double sensorPxHeight,
            double wavelength,
            UMat magFoT,
            UMat phFoT,
            Rectangle ctrRoi,
            Point ctrRoiMaxMagLoc,
            UMat magFo,
            UMat phFo,
            ref UMat unitPhDiffDM
            )
        {
            ///Based on a bunch of parameters, calculates the phase difference at each point
            ///between the aperture(hologram) plane and the image plane. It adds this phase
            ///difference to each plane wave(point on the fourier spectrum), turning the fourier
            ///spectrum to one at the image plane, and inverse DFTs to get the predicted magnitude
            ///and phase of light at the image plane, as magFo and phFo.

            //if unassigned, generate unit phase difference matrix
            if (unitPhDiffDM == null)
            {
                unitPhDiffDM = GetUnitPhDiffDoubleM(phFoT.Size, phFoT.Size, sensorPxWidth, sensorPxHeight, wavelength);
            }
            else
            {
                //if new phase transform image is wider or higher than current
                //(giant) unitPhDiff matrix, dispose current giant matrix and
                //generate a bigger one
                if (phFoT.Rows > unitPhDiffDM.Rows || phFoT.Cols > unitPhDiffDM.Cols)
                {
                    int biggerRows = Math.Max(phFoT.Rows, unitPhDiffDM.Rows);
                    int biggerCols = Math.Max(phFoT.Cols, unitPhDiffDM.Cols);
                    unitPhDiffDM.Dispose();
                    unitPhDiffDM = GetUnitPhDiffDoubleM(new Size(biggerCols, biggerRows), phFoT.Size,
                                                        sensorPxWidth, sensorPxHeight, wavelength);
                }
            }

            //Calculate phase difference matrix to add to sinewaves at particular (x,y)s.
            //phDiffM = rDist * unitPhDiffM, but first select the right region on unitPhDiffM.
            Point     unitPhDiff0HzLoc = new Point(unitPhDiffDM.Cols / 2, unitPhDiffDM.Rows / 2);
            Rectangle unitPhDiffCtrRoi = new Rectangle(unitPhDiff0HzLoc - (Size)ctrRoiMaxMagLoc, ctrRoi.Size);

            //get ROI on the (giant) unitPhDiffM and multiply rDist by only that part.
            //the output goes into this.phDiffFM.
            UMat phDiffFM = this.phDiffFM;

            using (UMat unitPhDiffDMCtr = new UMat(unitPhDiffDM, unitPhDiffCtrRoi)) {
                unitPhDiffDMCtr.ConvertTo(phDiffFM, DepthType.Cv32F, alpha: rDist);                //convert phDiffDM to float to make same type as
                //phFoT, we add them together later
            }

            //if video mode is off, add phase difference to a copy of the first-order-only image,
            //because we don't want to modify the input and feed the output back to the input as
            //this function is called again when one of the parameters changes
            UMat phFoT2Mod;             //phFoT to modify

            if (videoMode)
            {
                phFoT2Mod = phFoT;
            }
            else
            {
                phFoT2Mod = magFo;                //we're gonna update it later anyway
                phFoT.CopyTo(phFoT2Mod);
            }
            using (UMat phFoT2ModCtr = new UMat(phFoT2Mod, ctrRoi)) {
                CvInvoke.Add(phFoT2ModCtr, phDiffFM, phFoT2ModCtr, dtype: DepthType.Cv32F);
            }

            //convert magnitude and phase Transform parts at the image plane
            //to real and imaginary parts and Inverse (fourier) Transform to
            //get magnitude and phase at the image plane (which is what we want!)
            UMat reFoT = magFo, imFoT = phFo;                      //we're gonna update it later anyway

            CvInvoke.PolarToCart(magFoT, phFoT2Mod, reFoT, imFoT); //reusing memory here.
            InverseT(reFoT, imFoT, magFo, phFo);                   //reusing memory here as well.
        }
Esempio n. 9
0
        public void ProcessForwardT(UMat inImg, UMat outMagT, UMat outPhT, bool zeroPad = false, bool switchQuadrants = true)
        {
            ///Accepts a 1-channel image, updates outMagT and outPhT.
            ///magnitude and phase, cause I can't think of why you
            ///would wanna look at real and imaginary Transforms.
            ///Also can't think of how you can get complex-valued images.
            ///Quadrant rearranging doesn't support odd rows or cols.
            ///T stuff, reference: https://docs.opencv.org/master/d8/d01/tutorial_discrete_fourier_transform.html
            //convert image to 32bit float because spacial frequency
            //domain values are way bigger than spatial domain values.
            UMat re = outMagT;            //32-bit float real image, use memory from

            //outMagT cause it's gonna be updated anyway
            inImg.ConvertTo(re, DepthType.Cv32F);
            if (zeroPad)
            {
                //zero pad for faster dft
                ZeroPadImage(re);
            }
            //create imaginary image of zeros of same depthType
            //and size as image representing real plane
            UMat im = outPhT;                                           //imaginary

            im.Create(re.Rows, re.Cols, re.Depth, re.NumberOfChannels); //allocate memory so you can set it to zero array
            //if memory hasn't already been allocated for it
            im.SetTo(new MCvScalar(0));

            /// Quick exerpt about VectorOfUMat:
            /// if you create a VectorOfUMat vec, only if the first UMat variable
            /// to store the object is the vector node, like
            /// VectorOfUMat vec = new VectorOfUmat(new Umat(), new Umat());
            /// vec.Push(someUMat.Clone());
            /// then vec.Dispose/Clear actually disposes all the objects referenced
            /// to by the UMats in vec. In this case, if you did:
            /// VectorOfUMat vec = new VectorOfUMat(inImg.Clone(), inImg.Clone());
            /// UMat one = vec[0];
            /// one.Dispose();
            /// one.Dispose actually does nothing.
            /// Otherwise, if
            /// UMat one = new UMat();
            /// UMat two = new UMat();
            /// VectorOfUMat vec = new VectorOfUmat(one);
            /// vec.Push(two);
            /// calling vec.Dispose() doesn't dispose the objects.
            /// you have to call one.Dispose and two.Dispose.
            /// Note: no matter whether the UMat's first variable stored
            /// in is in a vector node or not, calling vec[index].Dispose
            /// does NOTHING.
            /// The situation is the same for vec.Clear, except Clear doesn't
            /// dispose of vec itself, it just disposes the objects the UMats in
            /// it reference to.
            //Dft accepts 2-channel images, so we use Merge to merge
            //our 2 1-channel images into a single 2-channel image.
            //Merge accepts object arrays, so we create a VectorOfUMat
            //of our 2 images to feed into Merge.

            VectorOfUMat vec  = this.vec;
            UMat         cImg = this.img32f2c;

            vec.Push(re);
            vec.Push(im);            //vec[0] = re, vec[1] = im
            ;
            CvInvoke.Merge(vec, cImg);
            CvInvoke.Dft(cImg, cImg, DxtType.Forward, 0);            //use back the same memory
            //switch quadrants while images are still combined
            if (switchQuadrants)
            {
                SwitchQuadrants(cImg);
            }
            //make the 2-channel array into 2 1-channel arrays
            CvInvoke.Split(cImg, vec); //vec[0] is reT, vec[1] is imT, they are new objects.
            CvInvoke.CartToPolar(vec[0], vec[1], outMagT, outPhT);
            vec.Clear();               //dispose reT and imT.TODO: find a way to get rid of allocating memory for reT and imT.
        }
Esempio n. 10
0
        public static double[,] GetImageHistogramEfficient(UMat image, Image <Gray, byte> mask = null)
        {
            if (uniformBins == null)
            {
                uniformBins = new int[(int)Math.Pow(2, numNeighbors)];
                for (int i = 0; i < uniformBins.Length; i++)
                {
                    int bin = GetPatternNum(i);
                    uniformBins[i] = bin;
                }
            }

            //int numNeighbors = 12, radius = 2;
            //int numNeighbors = 8, radius = 1;
            int width = image.Cols, height = image.Rows;

            UMat orig = new UMat();

            image.ConvertTo(orig, Emgu.CV.CvEnum.DepthType.Cv32F);
            UMat[] neighbors = new UMat[numNeighbors];
            UMat   patterns  = new UMat(height, width, Emgu.CV.CvEnum.DepthType.Cv32F, 1);

            patterns.SetTo(new MCvScalar(0));
            UMat mean = new UMat(height, width, Emgu.CV.CvEnum.DepthType.Cv32F, 1);

            mean.SetTo(new MCvScalar(0));
            for (int i = 0; i < numNeighbors; i++)
            {
                UMat           img    = new UMat(height, width, Emgu.CV.CvEnum.DepthType.Cv32F, 1);
                Matrix <float> filter = new Matrix <float>(2 * radius + 1, 2 * radius + 1);
                filter.SetZero();

                float x = (float)radius * (float)Math.Cos(2.0 * Math.PI * i / (double)numNeighbors);
                float y = (float)radius * (float)Math.Sin(2.0 * Math.PI * i / (double)numNeighbors);

                // relative indices
                int fx = (int)Math.Floor(x);
                int fy = (int)Math.Floor(y);
                int cx = (int)Math.Ceiling(x);
                int cy = (int)Math.Ceiling(y);

                // fractional part
                float ty = y - fy;
                float tx = x - fx;

                // set interpolation weights
                float w1 = (1 - tx) * (1 - ty);
                float w2 = tx * (1 - ty);
                float w3 = (1 - tx) * ty;
                float w4 = tx * ty;

                filter[fy + radius, fx + radius] = w1;
                if (cx != fx)
                {
                    filter[fy + radius, cx + radius] = w2;
                }
                if (cy != fy)
                {
                    filter[cy + radius, fx + radius] = w3;
                }
                if (cx != fx && cy != fy)
                {
                    filter[cy + radius, cx + radius] = w4;
                }

                CvInvoke.Filter2D(orig, img, filter.ToUMat(), new Point(radius, radius), 0, Emgu.CV.CvEnum.BorderType.Isolated);
                CvInvoke.Subtract(img, orig, img);

                neighbors[i] = img;

                UMat imgThresh = new UMat(height, width, Emgu.CV.CvEnum.DepthType.Cv32F, 1);
                CvInvoke.Threshold(img, imgThresh, 0, (double)(1 << i), Emgu.CV.CvEnum.ThresholdType.Binary);
                CvInvoke.Add(patterns, imgThresh, patterns);
                imgThresh.Dispose();

                CvInvoke.AddWeighted(mean, 1.0, img, 1.0 / numNeighbors, 0, mean);

                filter.Dispose();
            }

            UMat variances = new UMat(height, width, Emgu.CV.CvEnum.DepthType.Cv32F, 1);

            variances.SetTo(new MCvScalar(0));
            for (int i = 0; i < numNeighbors; i++)
            {
                UMat img = neighbors[i];
                CvInvoke.Subtract(img, mean, img);
                CvInvoke.Multiply(img, img, img);
                CvInvoke.AddWeighted(variances, 1.0, img, 1.0 / numNeighbors, 0, variances);
            }

            Image <Gray, float> patternImg = patterns.ToImage <Gray, float>();
            Image <Gray, float> varImg     = variances.ToImage <Gray, float>();

            double[,] histogram = new double[numNeighbors + 2, NUM_VAR_BINS];
            int[,] counters     = new int[numNeighbors + 2, NUM_VAR_BINS];
            //double[,] histogram = new double[(int)Math.Pow(2, numNeighbors), 1];
            //int[,] counters = new int[(int)Math.Pow(2, numNeighbors), 1];
            byte[,,] maskData = mask == null ? null : mask.Data;
            Parallel.For(0, width * height, (int i) =>
            {
                int y = i / width;
                int x = i % width;

                if (mask == null || maskData[y, x, 0] > 0)
                {
                    int pattern     = (int)Math.Round(patternImg.Data[y, x, 0]);
                    double variance = varImg.Data[y, x, 0];
                    if (double.IsNaN(variance))
                    {
                        variance = 0;
                    }

                    int LBPBin = uniformBins[pattern];
                    //int LBPBin = pattern;
                    //int LBPBin = GetPatternNum(pattern);
                    //int VARBin = 0;
                    int VARBin = GetBin(variance, varBinCuts);
                    Interlocked.Increment(ref counters[LBPBin, VARBin]);
                }
            });
            for (int i = 0; i < counters.GetLength(0); i++)
            {
                for (int j = 0; j < counters.GetLength(1); j++)
                {
                    histogram[i, j] = counters[i, j];
                }
            }

            patternImg.Dispose();
            varImg.Dispose();
            variances.Dispose();
            mean.Dispose();
            patterns.Dispose();
            foreach (UMat neighbor in neighbors)
            {
                neighbor.Dispose();
            }
            orig.Dispose();

            return(NormalizeHistogram(histogram));
        }
Esempio n. 11
0
        private void processVideo(object sender, DoWorkEventArgs e)
        {
            CvInvoke.UseOpenCL    = true;
            CvInvoke.UseOptimized = true;

            //determine amount of maps already present in the world
            int     mapCount = 0;
            NbtFile map      = null;

            try {
                map      = new NbtFile(Path.Join(WorldFolderPath, "\\data\\idcounts.dat"));
                mapCount = Int32.Parse(map.RootTag.Get <NbtCompound>("data").Get <NbtInt>("map").Value.ToString()) + 1;
            } catch { }
            Debug.Write("MapCount:" + mapCount + "\n");

            //start videocapture
            VideoCapture video = new VideoCapture(VideoFilePath);

            //get framerate
            framerate = video.GetCaptureProperty(CapProp.Fps);
            //get framecount
            frames = video.GetCaptureProperty(CapProp.FrameCount);

            //calculate allFrames at the target framerate
            reducedFrames = Math.Floor((frames / framerate) * TargetFrameRate);

            if (map != null)
            {
                map.RootTag.Get <NbtCompound>("data").Get <NbtInt>("map").Value = mapCount + ((int)reducedFrames) * 15;
                map.SaveToFile(Path.Join(WorldFolderPath, "\\data\\idcounts.dat"), NbtCompression.None);
            }

            //create Preset for map data
            NbtCompound preset = new NbtCompound("")
            {
                new NbtCompound("data")
                {
                    new NbtString("dimension", "minecraft:overworld"),
                    new NbtLong("xCenter", 128),
                    new NbtLong("zCenter", 128),
                    new NbtByte("scale", 3),
                    new NbtByte("locked", 1),
                    new NbtByte("unlimitedTracking", 0),
                    new NbtByte("trackingPosition", 0),
                    new NbtByteArray("colors")
                },
                new NbtInt("DataVersion", 2584)
            };

            //create path to output folder
            string mapOutputFolder = Path.Join(WorldFolderPath, "/data");

            UMat ones = new UMat(1, 3, DepthType.Cv8U, 1, UMat.Usage.AllocateDeviceMemory);

            ones.SetTo(new MCvScalar(1));


            UMat calculation   = new UMat(new Size(640, 384), DepthType.Cv32S, 3, UMat.Usage.AllocateDeviceMemory);
            UMat singleChannel = new UMat(new Size(640, 384), DepthType.Cv32S, 1, UMat.Usage.AllocateDeviceMemory);

            //keeps lowest value
            UMat lowestDiversion = new UMat(new Size(640, 384), DepthType.Cv32S, 1, UMat.Usage.AllocateDeviceMemory);
            //bool
            UMat lessDiversion = new UMat(new Size(640, 384), DepthType.Cv8U, 1, UMat.Usage.AllocateDeviceMemory);
            //store block value
            UMat blocks = new UMat(new Size(640, 384), DepthType.Cv8U, 1, UMat.Usage.AllocateDeviceMemory);


            while (frame < reducedFrames)
            {
                //calculate position in video and set to next frame
                position = frame / reducedFrames;
                video.SetCaptureProperty(CapProp.PosFrames, Math.Round(position * frames));

                var watch = System.Diagnostics.Stopwatch.StartNew();

                //get video frame
                if (!video.Read(singleFrame))
                {
                    break;
                }

                //resize to minecraft compatible resolution
                CvInvoke.Resize(singleFrame, singleFrame, new Size(640, 384));
                singleFrame.ConvertTo(singleFrame, DepthType.Cv32F);

                //display current Frame to user
                if (PreviewPicture.Image != null)
                {
                    PreviewPicture.Image.Dispose();
                }
                PreviewPicture.Image = singleFrame.ToBitmap();

                lowestDiversion.SetTo(new MCvScalar(255));
                lessDiversion.SetTo(new MCvScalar(0));
                blocks.SetTo(Colors.minecraftColors[Colors.minecraftColors.Length - 1]);

                for (int i = 0; i < Colors.minecraftColors.Length; i++)
                {
                    calculation = singleFrame - Colors.minecraftColors[i];
                    CvInvoke.Multiply(calculation, calculation, calculation);
                    CvInvoke.Transform(calculation, singleChannel, ones);

                    CvInvoke.Sqrt(singleChannel, singleChannel);
                    singleChannel.ConvertTo(singleChannel, DepthType.Cv32S);

                    CvInvoke.Compare(singleChannel, lowestDiversion, lessDiversion, CmpType.LessThan);

                    singleChannel.CopyTo(lowestDiversion, lessDiversion);

                    blocks.SetTo(new MCvScalar(i + 4), lessDiversion);
                }

                for (int y = 0; y < 3; y++)
                {
                    for (int x = 0; x < 5; x++)
                    {
                        UMat output = new UMat(blocks, new Rectangle(128 * x, 128 * y, 128, 128));
                        preset.Get <NbtCompound>("data").Get <NbtByteArray>("colors").Value = output.Bytes;

                        NbtFile file = new NbtFile(preset);
                        file.SaveToFile(Path.Join(mapOutputFolder, String.Concat("map_", mapCount + (frame * 15) + (y * 5 + x), ".dat")), NbtCompression.None);
                    }
                }

                watch.Stop();
                elapsedTime = watch.ElapsedMilliseconds;
                Debug.Write("Took:" + elapsedTime + "\n");

                System.GC.Collect();

                //send progress update to ui and console
                worker.ReportProgress((int)Math.Round(position * 100), elapsedTime * (reducedFrames - frame));
                Debug.Write(frame + "/" + reducedFrames + "-" + position + "\n");

                //increase framecount
                frame++;
            }

            worker.ReportProgress(100, 0.0);

            //pass the values to display to the user
            e.Result = new convertResult((int)frame - 1, mapCount);
        }