예제 #1
1
                /// <summary>
                /// Performs the <see cref="Closing"/> operator on the given
                /// <see cref="Matrix"/>.
                /// </summary>
                /// <param name="src">
                /// The <see cref="Matrix"/> which should be used by the
                /// operator.
                /// </param>
                /// <returns> The closed <see cref="Matrix"/>. </returns>
                public Matrix Execute (Matrix src)
                {
                        Dilation dilation = new Dilation (this.se);
                        Erosion erosion = new Erosion (this.se);

                        return (erosion.Execute (dilation.Execute (src)));
                }
예제 #2
0
                private static void DemoDilation()
                {
                        StructuringElement se = StructuringElement.CreateSquare(3);
                        Dilation dil = new Dilation(se);

                        using (StreamReader sr = new StreamReader("img.txt"))
                        {
                                String line;

                                while ((line = sr.ReadLine()) != null)
                                {
                                        string source = string.Format("grayscale_img/{0}.jpg", line);
                                        Bitmap bmp = Bitmap.FromFile(source) as Bitmap;

                                        Matrix img_matrix = Matrix.FromBitmap(bmp);
                                        Matrix thres_matrix = img_matrix < 100;
                                        Matrix result = dil.Execute(thres_matrix);

                                        string dest = string.Format("dilation/{0}.txt", line);
                                        using (StreamWriter sw = new StreamWriter(dest, false))
                                        {
                                                sw.Write(result.ToString());
                                        }
                                        result.ToBitmap().Save(string.Format("dilation/{0}.png", line));
                                }
                        }

                }
예제 #3
0
        //public void ApplyGaussianBlur(int size=131,double X=15,double Y=13)
        //{
        //    Image<Gray, float> imgGray = _ImageInput.Convert<Gray, float>();
        //    Image<Gray, float> imgGaussian = new Image<Gray, float>(_ImageInput.Width,_ImageInput.Height);
        //    Mat gaussian = new Mat();
        //    CvInvoke.cvSmooth(imgGray, imgGaussian, SMOOTH_TYPE.CV_GAUSSIAN, size, size, X, Y);
        //    imageBox1.Image = imgGaussian;
        //}
        private void dilationToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Dilation dilation = new Dilation();

            dilation.setDilationInput(_ImageInput);
            imageBox1.Image = dilation.ApplyDilation();
        }
예제 #4
0
        private static void DemoDilation()
        {
            StructuringElement se  = StructuringElement.CreateSquare(3);
            Dilation           dil = new Dilation(se);

            using (StreamReader sr = new StreamReader("img.txt"))
            {
                String line;

                while ((line = sr.ReadLine()) != null)
                {
                    string source = string.Format("grayscale_img/{0}.jpg", line);
                    Bitmap bmp    = Bitmap.FromFile(source) as Bitmap;

                    Matrix img_matrix   = Matrix.FromBitmap(bmp);
                    Matrix thres_matrix = img_matrix < 100;
                    Matrix result       = dil.Execute(thres_matrix);

                    string dest = string.Format("dilation/{0}.txt", line);
                    using (StreamWriter sw = new StreamWriter(dest, false))
                    {
                        sw.Write(result.ToString());
                    }
                    result.ToBitmap().Save(string.Format("dilation/{0}.png", line));
                }
            }
        }
예제 #5
0
        /// <summary>
        /// Performs the <see cref="Closing"/> operator on the given
        /// <see cref="Matrix"/>.
        /// </summary>
        /// <param name="src">
        /// The <see cref="Matrix"/> which should be used by the
        /// operator.
        /// </param>
        /// <returns> The closed <see cref="Matrix"/>. </returns>
        public Matrix Execute(Matrix src)
        {
            Dilation dilation = new Dilation(this.se);
            Erosion  erosion  = new Erosion(this.se);

            return(erosion.Execute(dilation.Execute(src)));
        }
예제 #6
0
        private void btn_edit_dilation_Click(object sender, EventArgs e)
        {
            Dilation dil = new Dilation();

            dil.setParameters(new int[, ] {
                { 1, 0, 1 }, { 0, 1, 0 }, { 1, 0, 1 }
            }, 1, 1);
            apply_effect(dil);
        }
예제 #7
0
        private void  асширениеToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var bm = new Bitmap(pictureBox1.Image);

            old_image = image;
            image     = bm;
            Dilation filter = new Dilation(mWidth, mHeight, mMatrix);

            backgroundWorker1.RunWorkerAsync(filter);
        }
예제 #8
0
        private void dilationрасширениеToolStripMenuItem_Click(object sender, EventArgs e)
        {
            if (mmCore.coreSize == 0)
            {
                MessageBox.Show("Set core first", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            Filters filter = new Dilation(mmCore);

            backgroundWorker1.RunWorkerAsync(filter);
        }
예제 #9
0
 private void расширениеToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (kernel != null)
     {
         Dilation filter = new Dilation(kernel);
         backgroundWorker1.RunWorkerAsync(filter);
     }
     else
     {
         Dilation filter = new Dilation();
         backgroundWorker1.RunWorkerAsync(filter);
     }
 }
        public byte[,] Reconstruct()
        {
            // Erosion without a mask
            MorphologicalFilter f1 = new Erosion(this.Ekernel, 1);
            var image = f1.Morph(this.Input, MorphologicalFilter.UseMask.No);

            byte[,] lastImage          = new byte[this.Input.GetLength(0), this.Input.GetLength(1)];
            HelperFunctions.GlobalMask = this.Input;
            while (!IsEqual(image, lastImage))
            {
                lastImage = image;
                // Dilation with a mask
                MorphologicalFilter f2 = new Dilation(this.Dkernel, 1);
                image = f2.Morph(image, this._e);
            }

            return(lastImage);
        }
예제 #11
0
        private void dirationToolStripMenuItem_Click(object sender, EventArgs e)
        {
            bool[,] mask = new bool[3, 3];
            mask[0, 0]   = false;
            mask[0, 1]   = true;
            mask[0, 2]   = false;
            mask[1, 0]   = true;
            mask[1, 1]   = true;
            mask[1, 2]   = true;
            mask[2, 0]   = false;
            mask[2, 1]   = true;
            mask[2, 2]   = false;
            MathMorf dil = new Dilation();
            //backgroundWorker2.RunWorkerAsync(dil);
            Bitmap resultImage = dil.procImg(image, mask, 3, 3 /*,backgroundWorker2*/);

            pictureBox1.Image = resultImage;
            pictureBox1.Refresh();
        }
예제 #12
0
        private Bitmap processBitmap(Bitmap bitmap)
        {
            Bitmap bitmapToProcess = bitmap.Clone(new Rectangle(bitmap.Width / 10, bitmap.Height / 10,
                                                                bitmap.Width / 10 * 8, bitmap.Height / 10 * 8), bitmap.PixelFormat);

            bitmapToProcess = Grayscale.CommonAlgorithms.BT709.Apply(bitmapToProcess);
            bitmapToProcess = new Dilation().Apply(bitmapToProcess);
            bitmapToProcess = new Erosion().Apply(bitmapToProcess);
            bitmapToProcess = new CannyEdgeDetector().Apply(bitmapToProcess);
            bitmapToProcess = new Threshold(1).Apply(bitmapToProcess);

            Bitmap bmp = new Bitmap(200, 200);

            using (Graphics g2 = Graphics.FromImage(bmp))
            {
                g2.DrawImage(bitmapToProcess, 20, 20, 160, 160);

                return(bmp.Clone(new Rectangle(0, 0, 200, 200), PixelFormat.Format32bppArgb));
            }
        }
예제 #13
0
        private void squareDilateToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Dilation filter = new Dilation(square);

            Image = filter.Apply(Image);
        }
예제 #14
0
        private void расширениеToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Filters filter = new Dilation(StructElem);

            backgroundWorker1.RunWorkerAsync(filter);
        }
예제 #15
0
        /// <summary>
        /// This is the method that actually does the work.
        /// </summary>
        /// <param name="DA">The DA object is used to retrieve from inputs and store in outputs.</param>
        protected override void SolveInstance(IGH_DataAccess DA)
        {
            IGH_Goo goo   = null;
            Image   image = new Image();

            if (!DA.GetData(0, ref goo))
            {
                return;
            }
            if (!goo.TryGetImage(ref image))
            {
                return;
            }

            int mode = 0;

            DA.GetData(1, ref mode);

            bool valueA = false;

            DA.GetData(2, ref valueA);

            int valueB = 1;

            DA.GetData(3, ref valueB);

            int valueC = 1;

            DA.GetData(4, ref valueC);

            Filter filter = new Filter();

            switch ((FilterModes)mode)
            {
            case FilterModes.Closing:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new Closing();
                image.Filters.Add(new Closing());
                break;

            case FilterModes.Dilation:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new Dilation();
                image.Filters.Add(new Dilation());
                break;

            case FilterModes.DilationBinary:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new DilationBinary();
                image.Filters.Add(new DilationBinary());
                break;

            case FilterModes.Erosion:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new Erosion();
                image.Filters.Add(new Erosion());
                break;

            case FilterModes.ErosionBinary:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new ErosionBinary();
                image.Filters.Add(new ErosionBinary());
                break;

            case FilterModes.HatBottom:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new HatBottom();
                image.Filters.Add(new HatBottom());
                break;

            case FilterModes.HatTop:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new HatTop();
                image.Filters.Add(new HatTop());
                break;

            case FilterModes.Opening:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new Opening();
                image.Filters.Add(new Opening());
                break;

            case FilterModes.Skeletonization:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new Skeletonization();
                image.Filters.Add(new Skeletonization());
                break;

            case FilterModes.SkeletonizationZhangSuen:
                SetParameter(2);
                SetParameter(3);
                SetParameter(4);
                filter = new SkeletonizationZhangSuen();
                image.Filters.Add(new SkeletonizationZhangSuen());
                break;

            case FilterModes.HorizontalBands:
                SetParameter(2, "B", "Borders", "Process gaps");
                SetParameter(3, "G", "Gap", "The pixel gap size");
                SetParameter(4);
                filter = new BandsHorizontal(valueB, valueA);
                image.Filters.Add(new BandsHorizontal(valueB, valueA));
                break;

            case FilterModes.VerticalBands:
                SetParameter(2, "B", "Borders", "Process gaps");
                SetParameter(3, "G", "Gap", "The pixel gap size");
                SetParameter(4);
                filter = new BandsVertical(valueB, valueA);
                image.Filters.Add(new BandsVertical(valueB, valueA));
                break;

            case FilterModes.FillHoles:
                SetParameter(2, "B", "Borders", "Process gaps");
                SetParameter(3, "W", "Width", "The pixel threshold");
                SetParameter(4, "H", "Height", "The pixel threshold");
                filter = new FillHoles(valueC, valueB, valueA);
                image.Filters.Add(new FillHoles(valueC, valueB, valueA));
                break;
            }

            message = ((FilterModes)mode).ToString();
            UpdateMessage();

            DA.SetData(0, image);
            DA.SetData(1, filter);
        }
예제 #16
0
 public Opening(int w, int h, int[,] k)
 {
     dilationFilter = new Dilation(w, h, k);
     erosionFilter  = new Erosion(w, h, k);
 }
예제 #17
0
 public Closings(int w, int h, int[,] k)
 {
     dilationFilter = new Dilation(w, h, k);
     erosionFilter  = new Erosion(w, h, k);
 }
        public void InitRenderSystem(CopyCameraImage _dualCameraLRT, CopyCameraImage _dualCameraRRT)
        {
            //Set reconstruct mesh to ScanLiveMeshLayer if exist
            Material reconMeshMat = new Material(Shader.Find("Unlit/Color"));

            reconMeshMat.color = Color.white * 0.5f;
            MeshRenderer[] reconMeshes = SRWorkControl.Instance.GetReconstructStaticMeshes();
            if (reconMeshes != null)
            {
                foreach (MeshRenderer r in reconMeshes)
                {
                    r.gameObject.layer = ScanLiveMeshLayer;
                    r.gameObject.GetComponent <MeshRenderer>().sharedMaterial = reconMeshMat;
                    r.GetComponent <MeshFilter>().mesh.RecalculateNormals();//must RecalculateNormals since original normal is wrong
                }
            }

            //-------------------------------------------------------------------------------------------------
            //Add culling processor
            //-------------------------------------------------------------------------------------------------
            _renderLivemeshDepthN               = ARRender.Instance.VRCamera.gameObject.AddComponent <CameraRenderRT>();
            _renderLivemeshDepthN.RTType        = CameraRenderRT.RTTYPE.FLOAT4;
            _renderLivemeshDepthN.RenderShader  = ScanLiveMeshShaderDepthNormal;
            _renderLivemeshDepthN.cullMask      = 1 << ScanLiveMeshLayer;
            _renderLivemeshDepthN.clearColor    = Color.white;
            _renderLivemeshDepthN.blurHMaterial = new Material(blurHMaterial);
            _renderLivemeshDepthN.blurVMaterial = new Material(blurVMaterial);
            _renderLivemeshDepthN.BlurFactor    = 3f;// 2.7f;//0 turn off blur
            _renderLivemeshDepthN.Init();

            //VRCamera.cullingMask = VRCamera.cullingMask & ~(1 << ScanLiveMeshLayer);//Not render 'LiveMesh' in vr camera
            ARRender.Instance.VRCamera.cullingMask     = MyHelpLayer.RemoveMaskLayer(ARRender.Instance.VRCamera.cullingMask, ScanLiveMeshLayer);
            ARRender.Instance.VRCamera.clearFlags      = CameraClearFlags.Color;
            ARRender.Instance.VRCamera.backgroundColor = new Color(0, 0, 0, 0);

            //Add dilation for later pass 'DownSample' not sample outter edge's black color.
            Dilation dilation = ARRender.Instance.VRCamera.gameObject.AddComponent <Dilation>();

            dilation.dilateCount         = 2;
            dilation.dilateMaskShader    = dilateMaskShader;
            dilation.genBinaryMaskShader = genBinaryMaskShader;
            dilation.refineGBufferShader = refineGBufferShader;

            //Add down sample for matching the low resolution see through camera.
            RTDownSample downsample = _createCameraFilter <RTDownSample>() as RTDownSample;

            downsample.DownScale = 3;

            //Render block pattern for simulate noise for matching low resolution see through camera.
            CamFilterDrawBlocks drawblocks = _createCameraFilter <CamFilterDrawBlocks>() as CamFilterDrawBlocks;

            drawblocks.Fade = 0.003f;

            CopyCameraDepthColor vrdepth = ARRender.Instance.VRCamera.gameObject.AddComponent <CopyCameraDepthColor>();

            vrdepth.CopyCameraDepthColorMaterial = CopyCameraDepthColorMaterial;

#if USE_MERGE_DEPTH
            MergeDepth mergeDepth = ARRender.Instance.VRCamera.gameObject.AddComponent <MergeDepth>();
            mergeDepth.cameraRenderRT       = _renderLivemeshDepthN.RT;
            mergeDepth.mergeDepthMat        = MergeDepthMat;
            mergeDepth.copyCameraColorDepth = vrdepth;
#endif

            SoftEdgeWeight softEdgeWeight = ARRender.Instance.VRCamera.gameObject.AddComponent <SoftEdgeWeight>();
            softEdgeWeight.renderDepth            = _renderLivemeshDepthN;
            softEdgeWeight.SoftEdgeWeightMaterial = softEdgeWeightMaterial;
            softEdgeWeight.factor = 2f;

            RenderWithARDepth renderWithARDepth = ARRender.Instance.VRCamera.gameObject.AddComponent <RenderWithARDepth>();
            renderWithARDepth.RenderWithARDepthMaterial = RenderWithARDepthMaterial;
            renderWithARDepth.MRDepthNormal             = _renderLivemeshDepthN.RT;
            renderWithARDepth.VRCamera       = vrdepth;
            renderWithARDepth.softCullLength = 0.0001f;
            renderWithARDepth.glowAmount     = 0.018f;
            renderWithARDepth.coefAmount     = 0.062f;
            renderWithARDepth.seeThroughL    = _dualCameraLRT;
            renderWithARDepth.seeThroughR    = _dualCameraRRT;
            renderWithARDepth.softEdgeWeight = softEdgeWeight;
            renderWithARDepth.CullingBaise   = 0.000017f;//1.7e-05

            //-------------------------------------------------------------------------------------------------
            //Add shadow processor
            //-------------------------------------------------------------------------------------------------
            ARRender.Instance.shadowCastDirLight.transform.position = ARRender.Instance.VRCamera.transform.position;

            ShadowRecieve shadowRecieve = ARRender.Instance.VRCamera.gameObject.AddComponent <ShadowRecieve>();
            shadowRecieve.ScreenMapScale   = 0.5f;
            shadowRecieve.shadowLight      = ARRender.Instance.shadowCastDirLight;
            shadowRecieve.screenShadowBlit = screenShadowBlit;
            // shadowRecieve.blurMaterial = shadowBlur;
            shadowRecieve.blurHMaterial = new Material(blurHMaterial);
            shadowRecieve.blurVMaterial = new Material(blurVMaterial);
            shadowRecieve.BlurFactor    = 3;
            shadowRecieve.ShadowFactor  = 10;
            shadowRecieve.VRDepthColor  = vrdepth;//set VR scene depth for screen shadow culling (no need recieve mesh's depth)

            shadowRecieve.cameraDepthN = _renderLivemeshDepthN.RT;
            //shadowRecieve.recieveShadowLayer = (1 << ScanLiveMeshLayer);

            GameObject shadowCastCameraObj = Instantiate(shadowCastCameraPrefab);
            shadowCastCameraObj.transform.parent = ARRender.Instance.shadowCastDirLight.transform;
            ShadowCastCamera shadowCastCamera = shadowCastCameraObj.GetComponent <ShadowCastCamera>();
            shadowCastCamera.GetComponent <Camera>().orthographicSize = 4; //4 is enough
            shadowCastCamera.shadowMapSize = 400;

            //cast shadow for default layer
            shadowCastCamera.shadowRenderMask = shadowCastMask;

            //shadowRecieve.shadowColor = Color.white * 50f / 255f;
            shadowRecieve.bais           = 0;
            shadowRecieve.shadowBlitBais = 0;

            //-------------------------------------------------------------------------------------------------
            //Add point light processor
            //-------------------------------------------------------------------------------------------------
            deferredLightMap = ARRender.Instance.VRCamera.gameObject.AddComponent <DeferredLightMap>();
            deferredLightMap.DeferredPointLightMapMaterial = DeferredPointLightMapMaterial;
            deferredLightMap.DeferredSpotLightMapMaterial  = DeferredSpotLightMapMaterial;
            deferredLightMap.BlitLightMapMaterial          = BlitLightMapMaterial;
            deferredLightMap.LightMapFactor = 0.2f;

            //deferredLightMap.mergeDepth = mergeDepth;
            deferredLightMap.cameraRenderDepthNormal = _renderLivemeshDepthN.RT; //not use merge depth, since game objects use unity lighting.

            deferredLightMap.TurnOffUnityLight = false;                          //Dont' turn off unity light, because current time my deferred lighting is not concern normal, and I need unity lighting to render object.


            //-------------------------------------------------------------------------------------------------
            //write Depth Camera and render alpha object
            //-------------------------------------------------------------------------------------------------
            GameObject writeDepthCameraRoot = new GameObject(ARRender.Instance.VRCamera.name + "_writeDepth +" + _writeDepthCamLOrder);
            writeDepthCameraRoot.transform.parent = null;
            Vive.Plugin.SR.ViveSR_HMDCameraShifter shifter = writeDepthCameraRoot.AddComponent <Vive.Plugin.SR.ViveSR_HMDCameraShifter>();
            writeDepthCameraRoot.transform.position = Vector3.zero;
            writeDepthCameraRoot.transform.rotation = Quaternion.identity;

            GameObject writeDepthCameraObj = Instantiate(writeDepthCameraPrefab);
            writeDepthCameraObj.transform.parent        = writeDepthCameraRoot.transform;
            writeDepthCameraObj.transform.localPosition = Vector3.zero;
            writeDepthCameraObj.transform.localRotation = Quaternion.identity;
            _writeDepthCamera       = writeDepthCameraObj.GetComponent <Camera>();
            _writeDepthCamera.depth = ARRender.Instance.VRCamera.depth + _writeDepthCamLOrder;
            _writeDepthCamera.name  = _writeDepthCamera.name + " +" + _writeDepthCamLOrder;
            _writeDepth             = _writeDepthCamera.GetComponent <WriteDepthPerspective>();
            _writeDepth.mainCam     = ARRender.Instance.VRCamera;
            _writeDepth.addShiftR   = 0.00145f;   //0.00807f;
            _writeDepth.addShiftUP  = -0.00005f;; // -0.0001f;
            _writeDepth.Init(0);
            //_writeDepth.IsRenderRightEye = null;
            //shifter.TargetCamera = _writeDepthCamera;
            bool setTargetCam = MyReflection.SetMemberVariable(shifter, "TargetCamera", _writeDepthCamera);
            if (!setTargetCam)
            {
                Debug.LogError("shifter.TargetCamera set fail..._writeDepthCamera");
            }
#if USE_MERGE_DEPTH
            _writeDepth.mergeDepth = mergeDepth;
#else
            _writeDepth.cameraDepth = renderLiveMeshDepthN.RT;
#endif

            ARRender.Instance.shadowCastDirLight.shadowBias = 0.02f;
        }
예제 #19
0
        private void dilationToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Filters filter = new Dilation(mask);

            backgroundWorker1.RunWorkerAsync(filter);
        }