Esempio n. 1
0
    void sharpenImage(Frame3DGPU f)
    {
        computeShader.SetInt("image_width", f.colorWidth);
        computeShader.SetInt("image_height", f.colorHeight);

        // set uniform values to shader
        computeShader.SetInt("blur_r", (int)blurR);

        if (f.snapped)
        {
            //if(depthThreshold > 3){ // differing bias as there is a non linear factor applied to the depth
            //	depthThreshold += 3f;
            //	depthThreshold *= 6.0f;
            //	}// it's hard coded bias. should be checked.
            //else
            float tempDepthThreshold = depthThreshold;
            tempDepthThreshold += 9.4f;             // hand tuned linar offset :/
            tempDepthThreshold *= 3.6f;
            computeShader.SetFloat("depth_threshold", tempDepthThreshold);
            computeShader.SetTexture(0, "in_color_texture", f.postprocessedRGBImage);
            var tempDepth = new Mat();
            Core.bitwise_not(f.refinedDepth, tempDepth);
            f.refinedDepthTex2D = (Texture2D)Util.toTexture(tempDepth, TextureFormat.R8, true, -1);
            computeShader.SetTexture(0, "in_depth_texture", f.refinedDepthTex2D);
        }
        else
        {
            computeShader.SetFloat("depth_threshold", depthThreshold);
            computeShader.SetTexture(0, "in_color_texture", f.rgbImage);
            computeShader.SetTexture(0, "in_depth_texture", f.depthImage);
        }
        computeShader.SetTexture(0, "out_color_texture", f.filteredImage);
        computeShader.Dispatch(0, f.colorWidth / 8, f.colorHeight / 8, 1);
    }
Esempio n. 2
0
    // Update is called once per frame
    public void OnShot(Frame3DGPU frame3Dgpu)
    {
        if (frame3Dgpu == null)
        {
            return;
        }
        if (frame3Dgpu.snapped)
        {
            frame3Dgpu.snapped = false;
            Debug.Log("writing color to " + filePath + ".png");
            Mat colorFlipped = new Mat();
            Core.flip(frame3Dgpu.waterMarkedColor, colorFlipped, 1);
            Mat rgb = new Mat();
            Imgproc.cvtColor(colorFlipped, rgb, Imgproc.COLOR_RGB2BGR);
            Imgcodecs.imwrite(filePath + ".png", rgb);

            Mat depthFlipped = new Mat();
            Core.flip(frame3Dgpu.waterMarkedDepth, depthFlipped, 1);
            Debug.Log("writing depth to " + filePath + "_depth.png");
            Imgcodecs.imwrite(filePath + "_depth.png", depthFlipped);

            // Load the directory where the images are saved
            Debug.Log("explorer.exe" + " /n, /e, " + outpath.Replace('/', '\\'));
            System.Diagnostics.Process.Start("explorer.exe", "/n, /e, " + outpath.Replace('/', '\\'));
            ShowWindow(GetActiveWindow(), 2);
        }
    }
Esempio n. 3
0
    // Update is called once per frame
    void Update()
    {
        if (depth != null && color != null && frame3Dgpu == null)
        {
            // frame3Dgpu exists only one instance and passed to others as reference
            // to prevent GC overhead
            frame3Dgpu                                 = new Frame3DGPU();
            frame3Dgpu.colorHeight                     = color.height;
            frame3Dgpu.colorWidth                      = color.width;
            frame3Dgpu.depthHeight                     = depth.height;
            frame3Dgpu.depthWidth                      = depth.width;
            frame3Dgpu.depthImage                      = (Texture2D)depth;
            frame3Dgpu.rgbImage                        = (Texture2D)color;
            frame3Dgpu.filteredImage                   = new RenderTexture(frame3Dgpu.colorWidth, frame3Dgpu.colorHeight, 0, RenderTextureFormat.ARGB32);
            frame3Dgpu.filteredImage.filterMode        = FilterMode.Point;
            frame3Dgpu.filteredImage.enableRandomWrite = true;
            frame3Dgpu.filteredImage.Create();

            frame3Dgpu.recoloredImages    = new List <RenderTexture>();
            frame3Dgpu.filteredImageTex2D = new Texture2D(frame3Dgpu.colorWidth, frame3Dgpu.colorHeight, TextureFormat.RGB24, false);
            frame3Dgpu.refinedDepthTex2D  = new Texture2D(frame3Dgpu.depthWidth, frame3Dgpu.depthHeight, TextureFormat.R8, false);;

            frame3Dgpu.snapped = false;

            frame3Dgpu.refinedDepth     = new Mat();
            frame3Dgpu.waterMarkedColor = new Mat();
            frame3Dgpu.waterMarkedDepth = new Mat();

            frame3DgpuBinding.Invoke(frame3Dgpu);
        }
        Resources.UnloadUnusedAssets();
    }
Esempio n. 4
0
    void domainTransferDepthImage(Frame3DGPU f)
    {
        //Utils.setDebugMode(true);
        Debug.Log("Applying EdgeCleanup to Depth");
        // convert from texture to mat
        Mat rgbMat = new Mat();

        Core.flip(Util.toMat(f.postprocessedRGBImage, CvType.CV_8UC3), rgbMat, -1);
        Mat depthMat = Util.toMat(f.depthImage, CvType.CV_16UC1);

        Mat gray = new Mat();

        Imgproc.cvtColor(rgbMat, gray, Imgproc.COLOR_RGBA2GRAY);
        Mat sobelX = new Mat();
        Mat sobelY = new Mat();

        Imgproc.Sobel(gray, sobelX, CvType.CV_16S, 1, 0, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT);
        Imgproc.Sobel(gray, sobelY, CvType.CV_16S, 0, 1, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT);

        Mat depthMat8bit = new Mat();

        depthMat.convertTo(depthMat8bit, CvType.CV_8UC1, 0.03f);
        Core.bitwise_not(depthMat8bit, depthMat8bit);
        //Imgproc.equalizeHist(depthMat8bit, depthMat8bit);

        Mat depthFlipped = new Mat();

        Core.flip(depthMat8bit, depthFlipped, -1);

        Mat canneyRslt = new Mat();

        Imgproc.Canny(sobelX, sobelY, canneyRslt, cannyThreshold1, cannyThreshold2, true);

        //Imgcodecs.imwrite("C:/Users/SIGLab/AppData/LocalLow/Intel/Photo3D/3dImages/" + "depth.png", canneyRslt);

        //415 incomplete depth
        Mat cropped = depthFlipped.submat(0, 690, 0, 1190);

        Core.copyMakeBorder(cropped, depthFlipped, 0, 720 - 690, 0, 1280 - 1190, Core.BORDER_REPLICATE | Core.BORDER_ISOLATED);


        Mat laplacianRslt = new Mat();

        Imgproc.Laplacian(gray, laplacianRslt, CvType.CV_32F, 5, .1, 0);

        Ximgproc.dtFilter(canneyRslt, depthFlipped, f.refinedDepth, sigmaSpacial, sigmaColor, Ximgproc.DTF_NC, dtIter);

        // Not working with built solutions, cant figure out why
        List <Mat> matList  = new List <Mat>();
        Mat        depthLUT = Util.toMat(depthRescale, CvType.CV_8UC3);

        Core.split(depthLUT, matList);
        Mat temp = new Mat();

        f.refinedDepth.convertTo(temp, CvType.CV_8UC1);
        Core.LUT(temp, matList[0], f.refinedDepth);
        //Utils.setDebugMode(false);
    }
Esempio n. 5
0
    void filterImage(Frame3DGPU f)
    {
        // prefer postprocessed image

        // color, brightness, contrast and saturation processing are put here
//		image = image * Mathf.Pow(2, exposure);
//		image = image + new Scalar(brightness + 0.2, brightness + 0.2, brightness + 0.2);
//		image = (image * (contrast / 127.0f + 1)) - new Scalar(contrast, contrast, contrast);
        // blur processing
        sharpenImage(f);
        // resize is removed now. if speed is not fast enough, it will be added.
    }
Esempio n. 6
0
    void postprocessRGBImage(Frame3DGPU f)
    {
        if (f.snapped)
        {
            var rgbMat          = Util.toMat(f.rgbImage, CvType.CV_8UC3);
            var rgbProcessedMat = new Mat();

            //Photo.denoise_TVL1(mats, f.postprocessedRGBImage, 1, 2);
            Photo.fastNlMeansDenoisingColored(rgbMat, rgbProcessedMat, h, hColor, templateWindowSize, searchWindowSize);

            if (f.postprocessedRGBImage)
            {
                Texture.DestroyImmediate(f.postprocessedRGBImage);
                f.postprocessedRGBImage = null;
            }

            f.postprocessedRGBImage = (Texture2D)Util.toTexture(rgbProcessedMat, TextureFormat.RGB24);
        }
    }
Esempio n. 7
0
 public void OnFrame3DGPU(Frame3DGPU inFrame)
 {
     frame3dgpu = inFrame;
 }
Esempio n. 8
0
    public bool filterImage(Frame3DGPU f, float brightness, float contrast, float strength)
    {
        if (f.colorWidth == 0 || f.colorHeight == 0)
        {
            return(false);
        }
        // if recoloredImages are not initialized, new them here
        if (f.recoloredImages.Count <= index)
        {
            var add_count = index + 1 - f.recoloredImages.Count;
            for (int i = 0; i < add_count; i++)
            {
                f.recoloredImages.Add(new RenderTexture(f.colorWidth, f.colorHeight, 0, RenderTextureFormat.ARGB32));
                f.recoloredImages[f.recoloredImages.Count - 1].filterMode        = FilterMode.Point;
                f.recoloredImages[f.recoloredImages.Count - 1].enableRandomWrite = true;
                f.recoloredImages[f.recoloredImages.Count - 1].Create();
            }
        }

        //////////////////////////////////////////////////////////////
        // Modifiy the LUT to include Brightness and Contrast

        Mat LUTMat = Util.toMat(LUT, CvType.CV_8UC3);

        LUTMat       = LUTMat + new Scalar(brightness + 0.2, brightness + 0.2, brightness + 0.2);
        LUTMat       = (LUTMat * (contrast / 127.0f + 1)) - new Scalar(contrast, contrast, contrast);
        modifiyedLUT = (Texture2D)Util.toTexture(LUTMat, TextureFormat.RGB24);

        Mat LUTMat2 = Util.toMat(LUT2, CvType.CV_8UC3);

        LUTMat2       = LUTMat2 + new Scalar(brightness + 0.2, brightness + 0.2, brightness + 0.2);
        LUTMat2       = (LUTMat2 * (contrast / 127.0f + 1)) - new Scalar(contrast, contrast, contrast);
        modifiyedLUT2 = (Texture2D)Util.toTexture(LUTMat2, TextureFormat.RGB24);

        //////////////////////////////////////////////////////////////

        computeShader.SetInt("image_width", f.colorWidth);
        computeShader.SetInt("image_height", f.colorHeight);

        // set uniform values to shader
        computeShader.SetInt("map_width", LUT.width - 1);
        computeShader.SetFloat("filter_strength", strength);

        computeShader.SetTexture(1, "in_color_map", modifiyedLUT);
        computeShader.SetTexture(1, "in_color_map2", modifiyedLUT2);
        computeShader.SetTexture(1, "in_color_texture", f.filteredImage);
        if (f.snapped)
        {
            // I believe depth threshold is already provided. Hopefully...
            // for color LUT processing
            computeShader.SetFloat("depth_threshold", 1.5f * 26.0f);
            f.refinedDepthTex2D = (Texture2D)Util.toTexture(f.refinedDepth, TextureFormat.R8);
            var tempDepth = new Mat();
            Core.bitwise_not(f.refinedDepth, tempDepth);
            f.refinedDepthTex2D = (Texture2D)Util.toTexture(tempDepth, TextureFormat.R8, true, -1);
            computeShader.SetTexture(1, "in_depth_texture", f.refinedDepthTex2D);
            computeShader.SetTexture(1, "out_color_texture", f.recoloredImages[index]);
            computeShader.Dispatch(1, f.colorWidth / 8, f.colorHeight / 8, 1);

            // for HSV LUT processing
            computeShader.SetTexture(2, "in_hsv_map", hsvLUT);
            computeShader.SetTexture(2, "in_hsv_map2", hsvLUT2);
//			computeShader.SetTexture(2, "in_color_texture", f.filteredImage); // used when RGB LUT not processed
            computeShader.SetTexture(2, "in_depth_texture", f.refinedDepthTex2D);
            computeShader.SetTexture(2, "out_color_texture", f.recoloredImages[index]);             // in/out
            computeShader.Dispatch(2, f.colorWidth / 8, f.colorHeight / 8, 1);
        }
        else
        {
            // for color LUT processing
            computeShader.SetFloat("depth_threshold", 1.5f);
            computeShader.SetTexture(1, "in_depth_texture", f.depthImage);
            computeShader.SetTexture(1, "out_color_texture", f.recoloredImages[index]);
            computeShader.Dispatch(1, f.colorWidth / 8, f.colorHeight / 8, 1);

            // for HSV LUT processing
            computeShader.SetTexture(2, "in_hsv_map", hsvLUT);
            computeShader.SetTexture(2, "in_hsv_map2", hsvLUT2);
//			computeShader.SetTexture(2, "in_color_texture", f.filteredImage); // used when RGB LUT not processed
            computeShader.SetTexture(2, "in_depth_texture", f.depthImage);
            computeShader.SetTexture(2, "out_color_texture", f.recoloredImages[index]);                 // in/out
            computeShader.Dispatch(2, f.colorWidth / 8, f.colorHeight / 8, 1);
        }


        return(true);
    }
Esempio n. 9
0
    // apply the watermark to the color and depth image
    void WaterMarkFrame3D(Frame3DGPU f, Texture2D waterMarkColor, Texture2D waterMarkDepth)
    {
        // converting from RenderTexture to OpenCV Mat via Texture2D
        if (lut != null)
        {
            RenderTexture.active = frame3Dgpu.recoloredImages[lut.index];
        }
        else
        {
            RenderTexture.active = frame3Dgpu.filteredImage;
        }

        f.filteredImageTex2D.ReadPixels(new UnityEngine.Rect(0, 0, frame3Dgpu.filteredImage.width, frame3Dgpu.filteredImage.height), 0, 0);
        f.filteredImageTex2D.Apply();

        Mat filteredImage = Util.toMat(f.filteredImageTex2D, CvType.CV_8UC3);

        Imgproc.resize(filteredImage, filteredImage, new Size(0, 0), 2, 2);
        Mat roi = filteredImage.submat(
            filteredImage.rows() - waterMarkColor.height - 70,
            filteredImage.rows() - 70,
            (filteredImage.cols() / 2) - (waterMarkColor.width / 2),
            (filteredImage.cols() / 2) - (waterMarkColor.width / 2) + waterMarkColor.width);

        Mat img2gray     = new Mat();
        Mat waterMarkMat = Util.toMat(waterMarkColor, CvType.CV_8UC3, true, -1);

        Imgproc.cvtColor(waterMarkMat, img2gray, Imgproc.COLOR_BGR2GRAY);
        Mat mask    = new Mat();
        Mat maskInv = new Mat();

        Imgproc.threshold(img2gray, mask, 10, 255, Imgproc.THRESH_BINARY);
        Core.bitwise_not(mask, maskInv);

        Mat bg = new Mat();

        Core.bitwise_and(roi, roi, bg, maskInv);
        Mat fg = new Mat();

        Core.bitwise_and(waterMarkMat, waterMarkMat, fg, mask);

        Mat rslt = new Mat();

        Core.add(bg, fg, rslt);
        rslt.copyTo(roi);
        f.waterMarkedColor = filteredImage;

        Debug.Log("Applying Watermark to depth image");
        // here something good watermarking to depth

        Mat refinedDepth = new Mat();

        Core.flip(f.refinedDepth, refinedDepth, -1);

        Imgproc.resize(refinedDepth, refinedDepth, new Size(0, 0), 2, 2);
        roi = refinedDepth.submat(
            refinedDepth.rows() - waterMarkColor.height - 70,
            refinedDepth.rows() - 70,
            (refinedDepth.cols() / 2) - (waterMarkColor.width / 2),
            (refinedDepth.cols() / 2) - (waterMarkColor.width / 2) + waterMarkColor.width);


        Mat filledMask = mask.clone();

        Imgproc.floodFill(filledMask, new Mat(), new Point(0, 0), new Scalar(255));
        Mat filledMaskInv = new Mat();

        Core.bitwise_not(filledMask, filledMaskInv);
        Core.bitwise_or(mask, filledMaskInv, mask);

        //Imgcodecs.imwrite("C:/Users/SIGLab/AppData/LocalLow/Intel/Photo3D/3dImages/" + "filledMask.png", mask);

        bg = new Mat();
        //Imgproc.dilate(mask, mask, new Mat(3,3, CvType.CV_8UC1), new Point(0,0));
        //Imgproc.blur(mask, mask, new Size(2,2), new Point(1,1));
        Core.bitwise_not(mask, maskInv);
        //Core.bitwise_and(roi, roi, bg, maskInv);
        rslt = new Mat();
        Core.add(roi, mask, rslt);
        rslt.copyTo(roi);
        refinedDepth.copyTo(f.waterMarkedDepth);
    }