Exemplo n.º 1
0
        /*
         * /// <summary>
         * /// Raises the show debug toggle event.
         * /// </summary>
         * public void OnShowDebugToggle()
         * {
         *  Debug.Log("User clicked [Debug] button.");
         *  m_debug = !m_debug;
         *  Debug.Log("Debug mode: " + m_debug + ".");
         * }
         */

        /// <summary>
        /// Raises the save button event.
        /// </summary>
        public void OnSaveButton()
        {
            Debug.Log("User clicked [Save] button.");

            if (model != null)
            {
                // Cleaning old files.
                string[] filePaths = Directory.GetFiles(Application.temporaryCachePath);
                foreach (string filePath in filePaths)
                {
                    File.SetAttributes(filePath, FileAttributes.Normal);
                    File.Delete(filePath);
                }

                // save the train data.
                model.save(Application.temporaryCachePath + "/traindata.yml");

                // save the preprocessedfaces.
                for (int i = 0; i < m_numPersons; ++i)
                {
                    Imgcodecs.imwrite(Application.temporaryCachePath + "/preprocessedface" + i + ".jpg", preprocessedFaces [m_latestFaces [i]]);
                }
            }
            else
            {
                Debug.Log("save failure. train data does not exist.");
            }
        }
Exemplo n.º 2
0
    // function that is triggered on the click of the snap button
    public void saveButtonClicked()
    {
        // aquire the latest frame from the camera

        // establish the save directory and file path
        string outpath = Application.persistentDataPath + "/3dImages/";

        Directory.CreateDirectory(outpath);
        string filePath = outpath + "/image";

        // post process the depth image

        Mat filteredRGB;

        Debug.Log("Improving the quality of the RGB Image");
        ProcessRBGImage(color, out filteredRGB);

        // Watermark the depth and color image
        Debug.Log("Applying watermarks");
        Mat watermarkedDepth;
        Mat watermarkedColor;

        // write the resulting images to disk

        Debug.Log("writing color to " + filePath + ".png");
        Imgcodecs.imwrite(filePath + ".png", color);
        Debug.Log("writing depth to " + filePath + "_depth.png");
        Imgcodecs.imwrite(filePath + "_depth.png", depth);


        // Load the directory where the images are saved
        Debug.Log("explorer.exe" + " /n, /e, " + outpath.Replace('/', '\\'));
        System.Diagnostics.Process.Start("explorer.exe", "/n, /e, " + outpath.Replace('/', '\\'));
        gameObject.SetActive(false);
    }
        /// <summary>
        /// Raises the render image event.
        /// </summary>
        /// <param name="source">Source.</param>
        /// <param name="destination">Destination.</param>
        void OnRenderImage(RenderTexture source, RenderTexture destination)
        {
            if (captureFlag)
            {
                //            Debug.Log ("source.width " + source.width + "source.height " + source.height);

                Mat       cameraMat = new Mat(source.height, source.width, CvType.CV_8UC4);
                Texture2D texture   = new Texture2D(cameraMat.width(), cameraMat.height(), TextureFormat.ARGB32, false);

                Utils.textureToTexture2D(source, texture);
                Utils.texture2DToMat(texture, cameraMat);

                Imgproc.cvtColor(cameraMat, cameraMat, Imgproc.COLOR_RGBA2BGRA);

                Imgproc.rectangle(cameraMat, new Point(0, 0), new Point(cameraMat.width(), cameraMat.height()), new Scalar(0, 0, 255, 255), 3);
                Imgproc.putText(cameraMat, "SavePath:", new Point(5, cameraMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(0, 0, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(cameraMat, savePath, new Point(5, cameraMat.rows() - 8), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 0, Imgproc.LINE_AA, false);

                Imgcodecs.imwrite(savePath, cameraMat);

                savePathInputField.text = savePath;
                Debug.Log("savePath: " + savePath);

                captureFlag = false;
            }

            Graphics.Blit(source, destination);
        }
Exemplo n.º 4
0
    // Update is called once per frame
    public void OnShot(Frame3DGPU frame3Dgpu)
    {
        if (frame3Dgpu == null)
        {
            return;
        }
        if (frame3Dgpu.snapped)
        {
            frame3Dgpu.snapped = false;
            Debug.Log("writing color to " + filePath + ".png");
            Mat colorFlipped = new Mat();
            Core.flip(frame3Dgpu.waterMarkedColor, colorFlipped, 1);
            Mat rgb = new Mat();
            Imgproc.cvtColor(colorFlipped, rgb, Imgproc.COLOR_RGB2BGR);
            Imgcodecs.imwrite(filePath + ".png", rgb);

            Mat depthFlipped = new Mat();
            Core.flip(frame3Dgpu.waterMarkedDepth, depthFlipped, 1);
            Debug.Log("writing depth to " + filePath + "_depth.png");
            Imgcodecs.imwrite(filePath + "_depth.png", depthFlipped);

            // Load the directory where the images are saved
            Debug.Log("explorer.exe" + " /n, /e, " + outpath.Replace('/', '\\'));
            System.Diagnostics.Process.Start("explorer.exe", "/n, /e, " + outpath.Replace('/', '\\'));
            ShowWindow(GetActiveWindow(), 2);
        }
    }
Exemplo n.º 5
0
        /// <summary>
        /// Raises the render image event.
        /// </summary>
        /// <param name="source">Source.</param>
        /// <param name="destination">Destination.</param>
        void OnRenderImage(RenderTexture source, RenderTexture destination)
        {
            if (captureFlag)
            {
                //            Debug.Log ("source.width " + source.width + "source.height " + source.height);

                Mat       cameraMat = new Mat(source.height, source.width, CvType.CV_8UC4);
                Texture2D texture   = new Texture2D(cameraMat.width(), cameraMat.height(), TextureFormat.ARGB32, false);

                Utils.textureToTexture2D(source, texture);
                Utils.texture2DToMat(texture, cameraMat);

                Imgproc.cvtColor(cameraMat, cameraMat, Imgproc.COLOR_RGBA2BGRA);

                Imgproc.rectangle(cameraMat, new Point(0, 0), new Point(cameraMat.width(), cameraMat.height()), new Scalar(0, 0, 255, 255), 3);
                Imgproc.putText(cameraMat, "W:" + cameraMat.width() + " H:" + cameraMat.height() + " SO:" + Screen.orientation, new Point(5, cameraMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                //Please set your savepath;
                Imgcodecs.imwrite(savePath, cameraMat);


                captureFlag = false;
            }

            Graphics.Blit(source, destination);
        }
        /// <summary>
        /// Raises the save button click event.
        /// </summary>
        public void OnSaveButtonClick()
        {
            string saveDirectoryPath = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample");

            if (!Directory.Exists(saveDirectoryPath))
            {
                Directory.CreateDirectory(saveDirectoryPath);
            }

            string calibratonDirectoryName         = "camera_parameters" + bgrMat.width() + "x" + bgrMat.height();
            string saveCalibratonFileDirectoryPath = Path.Combine(saveDirectoryPath, calibratonDirectoryName);

            // Clean up old files.
            if (Directory.Exists(saveCalibratonFileDirectoryPath))
            {
                DirectoryInfo directoryInfo = new DirectoryInfo(saveCalibratonFileDirectoryPath);
                foreach (FileInfo fileInfo in directoryInfo.GetFiles())
                {
                    if ((fileInfo.Attributes & FileAttributes.ReadOnly) == FileAttributes.ReadOnly)
                    {
                        fileInfo.Attributes = FileAttributes.Normal;
                    }
                }
                if ((directoryInfo.Attributes & FileAttributes.ReadOnly) == FileAttributes.ReadOnly)
                {
                    directoryInfo.Attributes = FileAttributes.Directory;
                }
                directoryInfo.Delete(true);
            }
            Directory.CreateDirectory(saveCalibratonFileDirectoryPath);

            // save the calibraton file.
            string           savePath   = Path.Combine(saveCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");
            int              frameCount = (markerType == MarkerType.ChArUcoBoard) ? allCorners.Count : imagePoints.Count;
            CameraParameters param      = new CameraParameters(frameCount, bgrMat.width(), bgrMat.height(), calibrationFlags, camMatrix, distCoeffs, repErr);
            XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));

            using (var stream = new FileStream(savePath, FileMode.Create))
            {
                serializer.Serialize(stream, param);
            }

            // save the calibration images.
#if UNITY_WEBGL && !UNITY_EDITOR
            string   format            = "jpg";
            MatOfInt compressionParams = new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 100);
#else
            string   format            = "png";
            MatOfInt compressionParams = new MatOfInt(Imgcodecs.IMWRITE_PNG_COMPRESSION, 0);
#endif
            for (int i = 0; i < allImgs.Count; ++i)
            {
                Imgcodecs.imwrite(Path.Combine(saveCalibratonFileDirectoryPath, calibratonDirectoryName + "_" + i.ToString("00") + "." + format), allImgs[i], compressionParams);
            }

            savePathInputField.text = savePath;
            Debug.Log("Saved the CameraParameters to disk in XML file format.");
            Debug.Log("savePath: " + savePath);
        }
        /// <summary>
        /// Raises the save button click event.
        /// </summary>
        public void OnSaveButtonClick()
        {
            if (patternRawImage.texture != null)
            {
                Texture2D patternTexture = (Texture2D)patternRawImage.texture;
                Mat       patternMat     = new Mat(patternRect.size(), CvType.CV_8UC3);
                Utils.texture2DToMat(patternTexture, patternMat);
                Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_RGB2BGR);

                string savePath = Application.persistentDataPath;
                Debug.Log("savePath " + savePath);

                Imgcodecs.imwrite(savePath + "/patternImg.jpg", patternMat);

                SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
            }
        }
Exemplo n.º 8
0
        public void takePhoto()
        {
            snapshotCount = 40;
            Debug.Log("TAKE PHOTO");
            Texture2D tex = new Texture2D(Screen.width, Screen.height, TextureFormat.RGBA32, false, true);

            drawFlag = true;
            InvokeRepeating("pauseForPhoto", 0.001f, 0.001f);

            Imgproc.resize(rgbMat, textureInstance, new Size(Screen.width, Screen.height));
            Debug.Log("texture is" + textureInstance.width() + ", " + textureInstance.height());
            Debug.Log("tex is" + tex.width + ", " + tex.height);
            Utils.fastMatToTexture2D(textureInstance, tex);

            //write to singleton
            ImageManager.instance.photo = tex;

            //write image
            Imgcodecs.imwrite("Assets/snapshot.jpeg", textureInstance);
        }
Exemplo n.º 9
0
        private void SaveMarkerImg()
        {
            // save the markerImg.
            string saveDirectoryPath = Path.Combine(Application.persistentDataPath, "ArUcoCreateMarkerExample");
            string savePath          = "";

            #if UNITY_WEBGL && !UNITY_EDITOR
            string   format            = "jpg";
            MatOfInt compressionParams = new MatOfInt(Imgcodecs.CV_IMWRITE_JPEG_QUALITY, 100);
            #else
            string   format            = "png";
            MatOfInt compressionParams = new MatOfInt(Imgcodecs.CV_IMWRITE_PNG_COMPRESSION, 0);
            #endif
            switch (markerType)
            {
            default:
            case MarkerType.CanonicalMarker:
                savePath = Path.Combine(saveDirectoryPath, "CanonicalMarker-d" + (int)dictionaryId + "-i" + (int)markerId + "-sp" + markerSize + "-bb" + borderBits + "." + format);
                break;

            case MarkerType.GridBoard:
                savePath = Path.Combine(saveDirectoryPath, "GridBoard-mx" + gridBoradMarkersX + "-my" + gridBoradMarkersY + "-d" + (int)dictionaryId + "-os" + markerSize + "-bb" + borderBits + "." + format);
                break;

            case MarkerType.ChArUcoBoard:
                savePath = Path.Combine(saveDirectoryPath, "ChArUcoBoard-mx" + chArUcoBoradMarkersX + "-my" + chArUcoBoradMarkersY + "-d" + (int)dictionaryId + "-os" + markerSize + "-bb" + borderBits + "." + format);
                break;
            }

            if (!Directory.Exists(saveDirectoryPath))
            {
                Directory.CreateDirectory(saveDirectoryPath);
            }

            Imgcodecs.imwrite(savePath, markerImg, compressionParams);

            savePathInputField.text = savePath;
            Debug.Log("savePath: " + savePath);
        }
Exemplo n.º 10
0
        //Used to save pictures of faces to a jpg in the parent folder of the project, used in training the Eigen Face Recognizer.

        public bool outRectToFile(Mat frame, ref int fileCounter)
        {
            Mat    localCpy = new Mat();
            int    counter  = 0;
            String myFile;

            foreach (OpenCVForUnity.Rect face in faceSquares.toArray())
            {
                myFile = "face" + fileCounter + ".jpg";
                fileCounter++;
                counter++;
                //localCpy = new Mat (new Size (face.width, face.height))
                localCpy = new Mat(frame, face);
                Imgcodecs.imwrite(myFile, localCpy);
            }
            if (counter == 0)
            {
                return(false);
            }
            else
            {
                return(true);
            }
        }
Exemplo n.º 11
0
        public async Task <List <DetectedObject> > DetectAsync(CameraFrame frame)
        {
            if (frame == null)
            {
                throw new ArgumentNullException(nameof(frame));
            }
            Imgcodecs.imwrite(Application.persistentDataPath + "/testB.jpg", frame.Mat);

            try
            {
                Debug.Log($"Enter PredictImageAsync with mat {frame.Mat}");
                var imageWidth  = frame.Width;
                var imageHeight = frame.Height;

                Mat rgb = new Mat(imageWidth, imageHeight, CvType.CV_8UC3);
                if (frame.Format == ColorFormat.Grayscale)
                {
                    Imgproc.cvtColor(frame.Mat, rgb, Imgproc.COLOR_GRAY2RGB);
                    Debug.Log($"Converted gray2rgb to {rgb}");
                }
                else
                {
                    frame.Mat.copyTo(rgb);
                }

                //Mat rgba = new Mat();
                //Imgproc.cvtColor(rgb, rgba, Imgproc.COLOR_RGB2RGBA);

                float newHeight = 416.0f / imageWidth * imageHeight;
                Mat   resized   = new Mat(416, 416, CvType.CV_8UC3);
                Imgproc.resize(rgb, resized, new Size(416, newHeight), 0.5, 0.5, Imgproc.INTER_LINEAR);
                //Imgproc.resize(rgb, resized, new Size(targetWidth, targetHeight), 0.5, 0.5, Imgproc.INTER_LINEAR);
                Debug.Log($"Resized {resized}");

                Mat resizedBorder = new Mat();
                Core.copyMakeBorder(resized, resizedBorder, 0, (int)(416 - newHeight), 0, 0, Core.BORDER_CONSTANT, new Scalar(0, 0, 0));

                /*Mat rgba = new Mat();
                 * Imgproc.cvtColor(resizedBorder, rgba, Imgproc.COLOR_RGB2RGBA);*/

                Texture2D texture = new Texture2D(416, 416, TextureFormat.RGB24, false);
                Utils.matToTexture2D(resizedBorder, texture, true);
                //texture.Apply();
                Color32[] pixels32 = texture.GetPixels32();

                byte[] encodeArrayToJPG = ImageConversion.EncodeArrayToJPG(pixels32, GraphicsFormat.R8G8B8A8_UInt, 416, 416);
                File.WriteAllBytes(Application.persistentDataPath + "/testA.jpg", encodeArrayToJPG);

                using (var tensor = TransformInput(pixels32, ImageNetSettings.imageWidth, ImageNetSettings.imageHeight))
                {
                    var inputs = new Dictionary <string, Tensor>();
                    inputs.Add(ModelSettings.ModelInput, tensor);
                    //yield return StartCoroutine(worker.StartManualSchedule(inputs));
                    //var output = engine.Execute(inputs).PeekOutput();
                    var output  = engine.Execute(inputs).PeekOutput(ModelSettings.ModelOutput);
                    var results = outputParser.ParseOutputs(output, MINIMUM_CONFIDENCE);
                    var boxes   = outputParser.FilterBoundingBoxes(results, 10, MINIMUM_CONFIDENCE);
                    foreach (var box in boxes)
                    {
                        Debug.Log($"{box.tagName}, {box.probability}, {box.boundingBox.left},{box.boundingBox.top},{box.boundingBox.width},{box.boundingBox.height},");
                    }

                    List <DetectedObject> detectedObjects = boxes.Select(prediction => CreateDetectedObject(frame, prediction, (int)newHeight)).ToList();
                    int count = 0;
                    foreach (var detectedObject in detectedObjects)
                    {
                        count++;
                        Mat clone = frame.Mat.clone();
                        Imgproc.rectangle(clone, detectedObject.Rect.tl(), detectedObject.Rect.br(), new Scalar(255, 255, 255), 10, 1, 0);
                        Imgcodecs.imwrite(Application.persistentDataPath + "/clone-" + count + ".jpg", clone);
                    }
                }
            }
            catch (Exception e)
            {
                Debug.LogException(e);
                throw e;
            }

            return(new List <DetectedObject>());
        }
Exemplo n.º 12
0
    private void DisplayImage(string path)
    {
        if (System.IO.File.Exists(path))
        {
            path = Application.persistentDataPath + "/" + imageName + ".jpg";
            byte[]    bytes   = System.IO.File.ReadAllBytes(path);
            Texture2D texture = new Texture2D(1, 1);
            texture.LoadImage(bytes);
            unalteredScene = new Texture2D(1, 1);
            unalteredScene.LoadImage(bytes);
            Texture2D orgTexture = new Texture2D(1, 1);
            orgTexture.LoadImage(bytes);

            float aspectRatio = texture.width / texture.height;


            if (Screen.orientation == ScreenOrientation.Portrait)
            {
                int width = (int)Math.Round(480 * aspectRatio);
                texture        = ScaleTexture(texture, texture.width, texture.height);
                unalteredScene = ScaleTexture(texture, texture.width, texture.height);
                orgTexture     = ScaleTexture(orgTexture, texture.width, texture.height);
            }
            else
            {
                int height = (int)Math.Round(480 / aspectRatio);
                texture        = ScaleTexture(texture, texture.width, texture.height);
                unalteredScene = ScaleTexture(texture, texture.width, texture.height);
                orgTexture     = ScaleTexture(orgTexture, texture.width, texture.height);
            }

            //OPENCV Color Picker
            Mat OrgTextureMat = new Mat(orgTexture.height, orgTexture.width, CvType.CV_8UC4);
            Utils.texture2DToMat(orgTexture, OrgTextureMat);


            Mat samples    = OrgTextureMat.reshape(1, OrgTextureMat.cols() * OrgTextureMat.rows());
            Mat samples32f = new Mat();
            samples.convertTo(samples32f, CvType.CV_32F, 1.0 / 255.0);

            Mat          labels   = new Mat();
            TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 100, 1);
            centers = new Mat();
            Core.kmeans(samples32f, 10, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centers);

            //centers.convertTo(centers, CvType.CV_8UC1, 255.0);
            //centers.reshape(3);

            Debug.Log(centers);
            Debug.Log(centers.rows());
            Debug.Log(centers.cols());

            texture = changeColor(texture, centers);
            //OPENCV
            Mat TextureMat     = new Mat(texture.height, texture.width, CvType.CV_8UC4);
            Mat CopyTextureMat = new Mat(texture.height, texture.width, CvType.CV_8UC4);

            Imgproc.cvtColor(CopyTextureMat, CopyTextureMat, Imgproc.COLOR_RGB2BGRA);

            Utils.texture2DToMat(texture, TextureMat);
            Utils.texture2DToMat(texture, CopyTextureMat);
            Imgproc.cvtColor(CopyTextureMat, CopyTextureMat, Imgproc.COLOR_RGB2BGRA);
            Imgcodecs.imwrite("D:\\Thesis\\Outputs\\ColorChange_" + imageName + ".jpg", CopyTextureMat);

            //Mat refImg = new Mat(texture.height, texture.width, CvType.CV_8UC4);
            Mat Canvas = new Mat(texture.height, texture.width, CvType.CV_8UC4);
            //int[] radius = new int[] { 8, 4, 2 };

            if (resultMan.getSelectedPalette() == 0)
            {
                pre1886ColorChangedTexture = texture;
            }
            else if (resultMan.getSelectedPalette() == 1)
            {
                post1886ColorChangedTexture = texture;
            }
            else if (resultMan.getSelectedPalette() == 2)
            {
                finalYearsColorChangedTexture = texture;
            }

            // ZMQ BRUSH STROKE ALGORITHM
            byte[] colorChangedPic = texture.EncodeToJPG();
            string image64         = Convert.ToBase64String(colorChangedPic);

            // Copy this structure to add in change color
            clientSocket = new ClientSocketScript(image64, "GetBrushStrokes", resultMan.getBrushStrokeIndex(), resultMan.getBrushValues());
            clientSocket.Start();
            while (clientSocket.getResponse() == null)
            {
                clientSocket.Update();
            }


            bytes = Convert.FromBase64String(clientSocket.getResponse());
            clientSocket.Stop();
            texture.LoadImage(bytes);

            /*Mat grayMat = new Mat(TextureMat.rows(), TextureMat.cols(), CvType.CV_8UC1);
             * Mat gradientx = new Mat(TextureMat.rows(), TextureMat.cols(), CvType.CV_8UC1);
             * Mat gradienty = new Mat(TextureMat.rows(), TextureMat.cols(), CvType.CV_8UC1);
             *
             * Mat rgbaMat = new Mat(texture.height, texture.width, CvType.CV_8UC4);
             * Utils.texture2DToMat(texture, rgbaMat);
             *
             * Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
             * Imgproc.Scharr(grayMat, gradientx, rgbaMat.depth(), 1, 0, 1 / 15.36);
             * Imgproc.Scharr(grayMat, gradienty, rgbaMat.depth(), 0, 1, 1 / 15.36);
             *
             * //int gradient_smoothing_radius = Math.Round(Math.Max(rgbaMat.dims) / 50);
             * //chosen stroke scale: 2
             * //chosen gradient smoothing radius: 16
             * Imgproc.GaussianBlur(gradientx, gradientx, new Size(2 * 16 + 1, 2 * 16 + 1), 0);
             * Imgproc.GaussianBlur(gradienty, gradienty, new Size(2 * 16 + 1, 2 * 16 + 1), 0);
             *
             * Imgproc.medianBlur(rgbaMat, rgbaMat, 11);
             *
             * List<int> gridx = new List<int>();
             * List<int> gridy = new List<int>();
             * int index = 0;
             * System.Random rnd = new System.Random();
             *
             * //new grid
             * for (int i = 0; i < texture.width; i += 3)
             * {
             *  for (int j = 0; j < texture.height; j += 3)
             *  {
             *      int x = rnd.Next(-1, 2) + i;
             *      int y = rnd.Next(-1, 2) + j;
             *
             *      gridy.Add(y % texture.height);
             *      gridx.Add(x % texture.width);
             *      index++;
             *  }
             * }
             * //shuffle grid
             * int n = gridy.Count;
             * while (n > 1)
             * {
             *  n--;
             *  int k = rnd.Next(n + 1);
             *  int temp = gridy[k];
             *  gridy[k] = gridy[n];
             *  gridy[n] = temp;
             *
             *  temp = gridx[k];
             *  gridx[k] = gridx[n];
             *  gridx[n] = temp;
             * }
             *
             * int batch_size = 10000;
             * Debug.Log(gridx.Count + " " + gridy.Count);
             * List<Color32> pixels = new List<Color32>();
             * List<Color32> orgPixels = new List<Color32>();
             * double shortest = 50, longest = 0, angleShort = 0;
             * for (int h = 0; h < index - 1; h += batch_size)
             * {
             *  pixels = new List<Color32>();
             *  orgPixels = new List<Color32>();
             *  int endpoint = h + batch_size;
             *  if (endpoint > index - 1)
             *      endpoint = index - 1;
             *  //get the color from the texture
             *  for (int px = h; px < endpoint; px++)
             *  {
             *      Color32 cpixel = texture.GetPixel(gridx[px], (gridy[px] - (texture.height - 1)) * (-1));
             *      pixels.Add(cpixel);
             *      cpixel = orgTexture.GetPixel(gridx[px], (gridy[px] - (orgTexture.height - 1)) * (-1));
             *      orgPixels.Add(cpixel);
             *  }
             *  int cindex = 0;
             *  for (int px = h; px < endpoint; px++)
             *  {
             *      int x = gridx[px],
             *          y = gridy[px];
             *
             *      //get color
             *      Color32 cpixel;
             *      //use color of pixel
             *      int cprob = rnd.Next(1, 11);
             *
             *      //if(cprob <= 10) {
             *      Color a, b;
             *      a = orgPixels[cindex];
             *      b = pixels[cindex];
             *      //here lol
             *      cpixel = (a + b) / 2;
             *      //cpixel = a;
             *      //} else {
             *      //if (cprob > 5) {
             *      List<Color32> c_palette = color_palette;
             *      c_palette.Remove(pixels[cindex]);
             *      cprob = 0;
             *      cprob = rnd.Next(0, c_palette.Count - 1);
             *      a = c_palette[cprob];
             *      b = cpixel;
             *      //cpixel = (a  + b )/2;
             *      //cpixel = b*1.1f;
             *
             *
             *      //cpixel = a;
             *
             *      //}
             *      cindex++;
             *      //get angle
             *      double length = Math.Round(2 + 2 * Math.Sqrt(Math.Sqrt(gradienty.get(y, x)[0] * gradienty.get(y, x)[0] + gradientx.get(y, x)[0] * gradientx.get(y, x)[0])));
             *      double angle = (180 / Math.PI) * (Math.Atan2(gradienty.get(y, x)[0], gradientx.get(y, x)[0])) + 90;
             *      double lengthb = 1;
             *      if (length < shortest)
             *      {
             *          shortest = length;
             *          angleShort = angle;
             *      }
             *      if (length > longest)
             *          longest = length;
             *      if (length > 2 && angle != 90)
             *      {
             *          length /= 3;
             *      }
             *      else
             *      {
             *          angle += 80;
             *          length = 10;
             *          lengthb = 2;
             *          float H, S, V;
             *          Color.RGBToHSV(b, out H, out S, out V);
             *          float sat = cprob * 0.01f;
             *          sat += 1;
             *          S *= sat;
             *          cpixel = Color.HSVToRGB(H, S, V);
             *      }
             *      Imgproc.ellipse(rgbaMat, new Point(x, y), new Size(length, lengthb), angle, 0, 360, new Scalar(cpixel.r, cpixel.g, cpixel.b), -1, Imgproc.LINE_AA);
             *  }
             *  //Imgcodecs.imwrite("D:\\Thesis\\Outputs\\PredecessorBrushStroke" + h + ".jpg", rgbaMat);
             * }
             * Debug.Log("Longest : " + longest);
             * Debug.Log("Shortest : " + shortest);
             * Debug.Log("Angle : " + angleShort);
             * Utils.matToTexture2D(rgbaMat, texture);*/


            //Imgproc.cvtColor(Canvas, Canvas, Imgproc.COLOR_RGB2BGRA);
            //Utils.matToTexture2D(Canvas, texture);

            //texture = changeColor(texture, centers);

            Imgproc.cvtColor(Canvas, Canvas, Imgproc.COLOR_RGB2BGRA);
            Utils.texture2DToMat(texture, Canvas);
            Imgproc.cvtColor(Canvas, Canvas, Imgproc.COLOR_RGB2BGRA);

            //Imgcodecs.imwrite("D:\\Thesis\\Outputs\\OldBrushStroke_" + imageName + "_Final.jpg", Canvas);
            //Imgcodecs.imwrite("D:\\Thesis\\Outputs\\OldBrushStroke_" + imageName +"_ColorChanged.jpg", Canvas);

            texture.Apply();

            Sprite sprite = Sprite.Create(texture, new UnityEngine.Rect(0, 0, texture.width, texture.height), new Vector2(0.5f, 0.5f), 100);

            string imageNameToBeRendered = "Image" + resultMan.getBrushStrokeIndex().ToString() + "_";

            if (resultMan.getSelectedPalette() == 0)
            {
                imageNameToBeRendered += "Pre";
            }
            else if (resultMan.getSelectedPalette() == 1)
            {
                imageNameToBeRendered += "Post";
            }
            else if (resultMan.getSelectedPalette() == 2)
            {
                imageNameToBeRendered += "Final";
            }

            for (int i = 0; i < images.Length; i++)
            {
                if (imageNameToBeRendered.Contains(images[i].name))
                {
                    images[i].sprite  = sprite;
                    images[i].enabled = true;
                    images[i].GetComponentInChildren <ImageParameters>().setBrushStrokeOpacities(resultMan.getBrushValues());
                    images[i].GetComponentInChildren <ImageParameters>().setColorPaletteSelected(resultMan.getSelectedPalette());
                    images[i].GetComponentInChildren <ImageParameters>().setBrushStrokeIndex(resultMan.getBrushStrokeIndex());
                    resultMan.setCurrentDisplayedImageName(images[i].name);
                }
            }

            //screenshot = sprite;
            arcamera.SetActive(false);

            resultMan.setIsRendered(true);
            resultMan.resetColorPaletteButtons();

            /*Mat grad_x = new Mat();
             * Mat grad_y = new Mat();
             * Mat abs_grad_x = new Mat();
             * Mat abs_grad_y = new Mat();
             * Mat dst = new Mat();
             * Mat final = new Mat();
             * Mat sobelMat = new Mat(texture.height, texture.width, CvType.CV_8UC4);
             *
             *
             * Utils.texture2DToMat(texture, sobelMat);
             *
             * Imgproc.GaussianBlur(sobelMat, sobelMat, new Size(3, 3), 0, 0);
             * Imgproc.cvtColor(sobelMat, dst, Imgproc.COLOR_RGBA2GRAY);
             *
             * Imgproc.Sobel(dst, grad_x, CvType.CV_16S, 1, 0, 3, 1, 0);
             * Core.convertScaleAbs(grad_x, abs_grad_x);
             *
             * Imgproc.Sobel(dst, grad_y, CvType.CV_16S, 0, 1, 3, 1, 0);
             * Core.convertScaleAbs(grad_y, abs_grad_y);
             *
             * Core.addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, final);
             *
             * Imgcodecs.imwrite("D:\\Thesis\\Outputs\\Final_Edge.jpg", final);*/
        }
    }