Ejemplo n.º 1
0
        /**获取外包络矩形
         * **/
        public double[] GetBoundingBox(ModelParams para = ModelParams.bounding_box1)
        {
            HTuple boundingBox = GetParamValue(para);

            double[] arry = boundingBox.ToDArr();//获取2个点,2个点可以转换成一个立方体
            return(arry);
        }
Ejemplo n.º 2
0
        public double[] GetCoordZ(ModelParams para = ModelParams.point_coord_z)
        {
            HTuple Z = GetParamValue(para);

            double[] arry = Z.ToDArr();
            return(arry);
        }
Ejemplo n.º 3
0
        public double[] GetCoordY(ModelParams para = ModelParams.point_coord_y)
        {
            HTuple Y = GetParamValue(para);

            double[] arry = Y.ToDArr();
            return(arry);
        }
Ejemplo n.º 4
0
        /**获取点云的坐标
         * **/
        public double[] GetCoordX(ModelParams para = ModelParams.point_coord_x)
        {
            HTuple X = GetParamValue(para);

            double[] arry = X.ToDArr();
            return(arry);
        }
Ejemplo n.º 5
0
        public void ProcessStep(HImage imageGrabbed)
        {
            currentIVData.Image = imageGrabbed;

            int imageChannels = imageGrabbed.CountChannels();

            HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

            if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
            {
                rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                rgbImg = rImg.Compose3(gImg, bImg);
            }

            HImage grayImage = new HImage();

            if (rgbImg == null)
            {
                return;
            }

            imageGrabbed = rgbImg.CopyImage();

            lock (this.ImageToBeSaved)
            {
                this.ImageToBeSaved = imageGrabbed.CopyImage();
            }

            HImage ho_R;
            HImage ho_G;
            HImage ho_B;

            ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

            HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
            HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
            HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
            HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

            currentIVData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
            currentIVData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
            currentIVData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
            currentIVData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

            UpdateHistogramForImage();
        }
Ejemplo n.º 6
0
        public override void SaveConfig()
        {
            all_parm = OffsetXY + "_" + AngleRange + "_" + EnableRotateCenter + "_" + CalibDataFileName + "_";

            all_parm += XMaxDeviation + "_" + YMaxDeviation + "_";
            all_parm += CenterRow + "_" + CenterColumn + "_";
            all_parm += DistanceMax + "_";

            if (in_pixel_row != null)
            {
                int length = in_pixel_row.Length;

                all_parm += length + "_";

                for (int i = 0; i < length; i++)
                {
                    all_parm += in_pixel_column.ToDArr()[i] + "_" + in_pixel_row.ToDArr()[i] + "_" + in_world_x.ToDArr()[i] + "_" + in_world_y.ToDArr()[i] + "_";
                }
            }

            all_parm.Remove(all_parm.LastIndexOf("_"));
        }
Ejemplo n.º 7
0
        /// <summary>
        /// 已测试
        /// 软触发拍照下,计算相机的uv->xy坐标系变换的操作,如large相机
        /// 要求之前设置好相机/光源/触发至“合理”状态
        /// [x]   =   [ H1  H2  H3  ]    [u]
        /// [y]   =   [ H4  H5  H6  ] *  [v]
        ///                              [1]
        /// </summary>
        /// <param name="matUV2XY">uv->xy变换矩阵,将2*3的矩阵按照行堆叠的方式存储为1*6的数组, ref传递方式,需要数组有一个初始值</param>
        /// <param name="cam">相机控制类,注意曝光时间需要设置合适。软触发拍照建议曝光时间在100ms左右</param>
        /// <param name="modelID">匹配的ncc模板</param>
        /// <param name="score_thresh">匹配分数,需要大于0.5</param>
        /// <param name="axisPara">运动轴信息(xy插补),具体包括轴速,加速度</param>
        /// <param name="initPoint">初始点,中间仅适用了xy信息,需要保证走到初始点时室内内包含完整的,可匹配的model</param>
        /// <param name="lightInd">软触发拍照对应的光源id,需要事先设置好对应光源合适的时间</param>
        /// <param name="axisInd">x,y轴号</param>
        /// <param name="xyRange">计算变换时,以初始点为中心,随机走位的范围,单位mm</param>
        /// <param name="nPoints">随机走位多少个点</param>
        /// <param name="lightDelay">软触发拍照光源的延迟,单位us,典型值10us</param>
        /// <param name="timeOut">单次运动的延迟,单位ms</param>
        /// <param name="winID">(调试用)显示窗口ID</param>
        /// <returns>操作是否成功</returns>
        public bool calibUV2XY
        (
            int camInd,
            ref HTuple matUV2XY,
            Model model,
            double xyRange,
            int nPoints = 20
        )

        {
            //if (model.ReadModel(CalibrUV2XYModelPath) == false)
            //{
            //    HTUi.PopError("加载标定模版失败!");
            //    return false;
            //}
            string errMsg = "";

            if (_station.operation.MultiAxisMove(_station.AxisXYZ, new double[] { _xUv2xy, _yUv2xy, _zUv2xy }, true, out errMsg) != 0)
            {
                HTUi.PopError("无法移动至标定点位置!");
                return(false);
            }
            Thread.Sleep(200);
            //  App.obj_light.FlashMultiLight(LightUseFor.ScanPoint1st);

            //if(_station.operation.SWPosTrig(_station.AxisX,out errMsg)!=0)
            //{
            //    HTUi.PopError(errMsg);
            //    return false;
            //}

            Thread.Sleep(10);
            if (image != null)
            {
                image.Dispose();
            }
            HOperatorSet.GenEmptyObj(out image);

            if (_station.operation.CaputreOneImage(_station.CamereDev[camInd], "Halcon", out image, out errMsg) != 0)
            {
                HTUi.PopError("采集图像失败:" + errMsg);
                return(false);
            }

            //3. match
            HTuple  u, v, angle;
            bool    status     = matchModel(camInd, ref image, model, out u, out v, out angle);
            HObject showRegion = new HObject();

            showRegion.Dispose();
            HOperatorSet.GenCrossContourXld(out showRegion, u, v, 512, 0);
            _station._rtUi._fm.ShowImage(_station._rtUi._fm.htWindowCalibration, image, showRegion);
            //image.Dispose();
            if (!status)
            {
                HTUi.PopError("获取匹配初始位置图像失败");
                return(false);
            }
            HTuple xArr = new HTuple(), yArr = new HTuple(), uArr = new HTuple(), vArr = new HTuple();
            Random rand = new Random();

            //4. for ... snap , match, add <u,v,x,y>
            for (int i = 0; i < nPoints; i++)
            {
                DateTime t1 = DateTime.Now;
                //大于或等于 0.0 且小于 1.0 的双精度浮点数
                double x = (rand.NextDouble() - 0.5) * xyRange + _xUv2xy;
                double y = (rand.NextDouble() - 0.5) * xyRange + _yUv2xy;

                if (_station.operation.MultiAxisMove(new string[] { _station.AxisXYZ[0], _station.AxisXYZ[1], }, new double[] { x, y }, true, out errMsg) != 0)
                {
                    HTUi.PopError("无法移动至标定点位置!");
                    return(false);
                }

                Thread.Sleep(200);;

                //if (_station.operation.SWPosTrig(_station.AxisX, out errMsg) != 0)
                //{
                //    HTUi.PopError(errMsg);
                //    return false;
                //}

                Thread.Sleep(10);
                if (image != null)
                {
                    image.Dispose();
                }
                HOperatorSet.GenEmptyObj(out image);

                if (_station.operation.CaputreOneImage(_station.CamereDev[camInd], "Halcon", out image, out errMsg) != 0)
                {
                    HTUi.PopError("采集图像失败:" + errMsg);
                    return(false);
                }

                if (matchModel(camInd, ref image, model, out u, out v, out angle)) //found something
                {
                    xArr.Append(x); yArr.Append(y); uArr.Append(u); vArr.Append(v);
                }
                showRegion.Dispose();
                HOperatorSet.GenCrossContourXld(out showRegion, u, v, 512, 0);
                _station._rtUi._fm.ShowImage(_station._rtUi._fm.htWindowCalibration, image, showRegion);
                //image.Dispose();
            }
            if (xArr.Length < 10)
            {
                HTUi.PopError("有效点数不够");
                return(false);
            }
            //5. least square estimation
            Matrix <double> In = Matrix <double> .Build.Dense(3, xArr.Length, 1.0); //by default In[2,:] = 1.0

            Matrix <double> Out = Matrix <double> .Build.Dense(2, xArr.Length);

            Out.SetRow(0, xArr.ToDArr());
            Out.SetRow(1, yArr.ToDArr());
            In.SetRow(0, uArr.ToDArr());
            In.SetRow(1, vArr.ToDArr());
            Matrix <double> A = vec2Mat(In, Out);

            //6. move to center of uv space
            double[] aArr = A.ToRowWiseArray(); //need to be tested
            _station._calibrationUV2XYParameter = string.Join(",", aArr.ToArray());
            //parse
            if (matUV2XY == null)
            {
                matUV2XY = new HTuple();
            }
            for (int i = 0; i < 6; i++)
            {
                matUV2XY.Append(aArr[i]);
            }
            return(true);
        }
Ejemplo n.º 8
0
        public void ProcessStep(HImage imageGrabbed)
        {
            HTuple startTime = null;
            HTuple endTime   = null;

            HOperatorSet.CountSeconds(out startTime);
            HOperatorSet.CountSeconds(out endTime);
            currentOnCameraFFCData = new OnCameraFFCData();
            try
            {
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                lock (this.ImageToBeSaved)
                {
                    this.ImageToBeSaved = imageGrabbed.CopyImage();
                }
                // Feed the image to the script
                imageGrabbed = imageGrabbed.CropDomain();
                currentOnCameraFFCData.Image = imageGrabbed;

                if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayValues = Globals.GetGrayValuesOfLine(irImg);

                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.MPlaneVals = IRAOIGrayValues.ToDArr();
                }
                else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed);
                    currentOnCameraFFCData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr();
                }
                HOperatorSet.CountSeconds(out endTime);
                currentOnCameraFFCData.TimeTaken = (endTime - startTime).D * 1000;
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during On-Camera FFC step. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Performs the PRNU.
        /// </summary>
        /// <remarks></remarks>
        private void PerformPRNU()
        {
            try
            {
                this.btnFFC_FPN.Enabled  = false;
                this.btnFFC_PRNU.Enabled = false;
                this.lblStatus.Text      = "Performing PRNU...";
                this.lblStatus.BackColor = Color.Yellow;
                this.prnuDone            = false;
                Application.DoEvents();
                string buffer;
                string command;
                bool   bfcStatus = true;

                HImage imageCropped;

                currentOnCameraFFCData = new OnCameraFFCData();

                HImage imageGrabbed = new HImage();
                HImage ho_RGB       = new HImage();
                HImage ho_R         = new HImage();
                HImage ho_G         = new HImage();
                HImage ho_B         = new HImage();
                HImage ho_M         = new HImage();

                lock (this.ImageToBeSaved)
                {
                    imageGrabbed = this.ImageToBeSaved.CopyImage();
                }

                ho_R   = imageGrabbed.Decompose3(out ho_G, out ho_B);
                ho_RGB = ho_R.Compose3(ho_G, ho_B);

                int presentChannel = ho_RGB.CountChannels();


                if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_RGB = this.ImageToBeSaved.CopyImage();
                    }

                    ho_R = ho_RGB.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);


                    currentOnCameraFFCData.Image = ho_RGB;
                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();

                    bool lightIntensityOptimum = true;

                    imageCropped = ho_RGB.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FFCROIRectangleHeight,
                                                   (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FFCROIRectangleWidth,
                                                   Properties.Settings.Default.FFCROIRectangleWidth * 2,
                                                   (this.CameraAcquisition.CurrentImageHeight / 2));

                    ho_R = imageCropped.Decompose3(out ho_G, out ho_B);

                    RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);

                    if (Properties.Settings.Default.CheckForImproperCalibrationTarget)
                    {
                        int      minGrayValue;
                        int      maxGrayValue;
                        double[] tempLineProfileR = RAOIGrayVals.ToDArr();

                        double[] tempLineProfileG = GAOIGrayVals.ToDArr();

                        double[] tempLineProfileB = BAOIGrayVals.ToDArr();

                        lightIntensityOptimum &=
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileR, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) &&
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileG, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) &&
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileB, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset);
                    }

                    if (!lightIntensityOptimum)
                    {
                        this.RefreshImageWindow(ho_RGB);

                        this.lblStatus.Text      = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        Application.DoEvents();
                        return;
                    }
                }
                else if (presentChannel == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_M = this.ImageToBeSaved.CopyImage();
                    }

                    HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M);


                    currentOnCameraFFCData.Image = ho_M;
                    currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr();

                    bool lightIntensityOptimum = true;

                    if (Properties.Settings.Default.CheckForImproperCalibrationTarget)
                    {
                        int      minGrayValue;
                        int      maxGrayValue;
                        double[] tempLineProfileM = MAOIGrayVals.ToDArr();

                        lightIntensityOptimum &=
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileM, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset);
                    }

                    if (!lightIntensityOptimum)
                    {
                        this.RefreshImageWindow(ho_M);

                        this.lblStatus.Text      = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        Application.DoEvents();
                        return;
                    }
                }

                command   = Globals.e2vUC4CommandToDoOnCameraBFC + System.Environment.NewLine;
                bfcStatus = Globals.ExecuteSerialCommand(
                    (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                    command,
                    out buffer,
                    5000,
                    1000);

                command    = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine;
                bfcStatus &= Globals.ExecuteSerialCommand(
                    (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                    command,
                    out buffer,
                    1000,
                    2000);

                command    = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine;
                bfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                if (bfcStatus)
                {
                    lblStatus.Text      = "On Camera Bright Field Correction Done";
                    lblStatus.BackColor = Color.LimeGreen;
                }
                else
                {
                    lblStatus.Text      = "On Camera Bright Field Correction not done properly";
                    lblStatus.BackColor = Color.Red;
                }
                this.prnuDone            = bfcStatus;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;

                if (this.fpnDone && this.prnuDone)
                {
                    this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = "";
                    this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = DateTime.Now.Day.ToString() + "-" +
                                                                                                    DateTime.Now.Month.ToString() + "-" + DateTime.Now.Year.ToString() + " " +
                                                                                                    DateTime.Now.Hour.ToString() + ":" + DateTime.Now.Minute.ToString() + ":" +
                                                                                                    DateTime.Now.Second.ToString();
                }
            }
            catch (System.Exception ex)
            {
                lblStatus.Text           = "On Camera Bright Field Correction not done properly";
                lblStatus.BackColor      = Color.Red;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
                this.prnuDone            = false;
                MessageBox.Show("Exception occurred while performing PRNU.\r\n Error: " + ex.Message,
                                "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Performs the FPN.
        /// </summary>
        /// <remarks></remarks>
        private void PerformFPN()
        {
            this.btnFFC_FPN.Enabled  = false;
            this.btnFFC_PRNU.Enabled = false;
            this.lblStatus.Text      = "Performing FPN...";
            this.lblStatus.BackColor = Color.Yellow;
            this.fpnDone             = false;
            Application.DoEvents();
            string buffer;
            string command;
            bool   dfcStatus = true;

            currentOnCameraFFCData = new OnCameraFFCData();

            HImage imageGrabbed = new HImage();
            HImage ho_RGB       = new HImage();
            HImage ho_R         = new HImage();
            HImage ho_G         = new HImage();
            HImage ho_B         = new HImage();
            HImage ho_M         = new HImage();

            try
            {
                lock (this.ImageToBeSaved)
                {
                    imageGrabbed = this.ImageToBeSaved.CopyImage();
                }

                ho_R   = imageGrabbed.Decompose3(out ho_G, out ho_B);
                ho_RGB = ho_R.Compose3(ho_G, ho_B);

                int presentChannel = ho_RGB.CountChannels();

                if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    //ho_R = ho_RGB.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);


                    currentOnCameraFFCData.Image = ho_RGB;
                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();

                    HTuple maxGrayValueInCenterLineR = RAOIGrayVals.TupleMax();
                    HTuple maxGrayValueInCenterLineG = GAOIGrayVals.TupleMax();
                    HTuple maxGrayValueInCenterLineB = BAOIGrayVals.TupleMax();
                    if ((maxGrayValueInCenterLineR.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) ||
                        (maxGrayValueInCenterLineG.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) ||
                        (maxGrayValueInCenterLineB.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel))
                    {
                        this.RefreshImageWindow(ho_RGB);

                        this.lblStatus.Text      = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        //Application.DoEvents();
                        return;
                    }
                }
                else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_M = this.ImageToBeSaved.CopyImage();
                    }

                    HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M);

                    currentOnCameraFFCData.Image = ho_M;
                    currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr();

                    HTuple maxGrayValueInCenterLineM = MAOIGrayVals.TupleMax();
                    if (maxGrayValueInCenterLineM.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel)
                    {
                        this.RefreshImageWindow(ho_M);
                        this.lblStatus.Text      = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        //Application.DoEvents();
                        return;
                    }
                }

                command   = Globals.e2vUC4CommandToDoOnCameraDFC + System.Environment.NewLine;
                dfcStatus = Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                         command,
                                                         out buffer,
                                                         2000,
                                                         1000);

                command    = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine;
                dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                command    = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine;
                dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                if (dfcStatus)
                {
                    lblStatus.Text      = "On Camera Dark Field Correction Done";
                    lblStatus.BackColor = Color.LimeGreen;
                }
                else
                {
                    lblStatus.Text      = "On Camera Dark Field Correction not done properly";
                    lblStatus.BackColor = Color.Red;
                }
                this.fpnDone             = dfcStatus;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
            }
            catch (System.Exception ex)
            {
                lblStatus.Text           = "On Camera Dark Field Correction not done properly";
                lblStatus.BackColor      = Color.Red;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
                this.fpnDone             = false;
                MessageBox.Show("Exception occurred while performing FPN.\r\n Error: " + ex.Message,
                                "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Ejemplo n.º 11
0
        public void ProcessStep(HImage imageGrabbed)
        {
            HTuple startTime = null;
            HTuple endTime   = null;

            HOperatorSet.CountSeconds(out startTime);
            HOperatorSet.CountSeconds(out endTime);
            currentLSAData = new LightStickAlignmentData();
            HImage imageCropped = new HImage();

            try
            {
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                int presentImageNoOfChannels = imageGrabbed.CountChannels();

                lock (this.ImageToBeSaved)
                {
                    this.ImageToBeSaved = imageGrabbed.CopyImage();
                }
                // Feed the image to the script
                imageGrabbed         = imageGrabbed.CropDomain();
                currentLSAData.Image = imageGrabbed;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayVals  = Globals.GetGrayValuesOfLine(irImg);

                    currentLSAData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

                    imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.LICROIRectangleHeight,
                                                         (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.LICROIRectangleWidth,
                                                         Properties.Settings.Default.LICROIRectangleWidth * 2,
                                                         (this.CameraAcquisition.CurrentImageHeight / 2));

                    ho_R = imageCropped.Decompose3(out ho_G, out ho_B);

                    RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R);

                    // Clipping the pixels at Left & Right of the line
                    RAOIGrayValues = RAOIGrayValues.TupleSelectRange(
                        this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                        (Properties.Settings.Default.LICROIRectangleWidth * 2) - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

                    // Update the labels showing the results of light uniformity check
                    int RminGrayValue;
                    int RmaxGrayValue;

                    // Checking whether the gray values is within the defined range in RED plane
                    bool RPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(RAOIGrayValues.ToDArr(), out RminGrayValue, out RmaxGrayValue);

                    currentLSAData.MinGrayValue = RminGrayValue;
                    currentLSAData.MaxGrayValue = RmaxGrayValue;
                    currentLSAData.Status       = RPlaneOptimum;
                }
                else if (this.cameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed);
                    currentLSAData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr();

                    // Clipping the pixels at Left & Right of the line
                    AOIGrayValues = AOIGrayValues.TupleSelectRange(
                        this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                        this.cameraAcquisition.CurrentImageWidth - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

                    // Update the labels showing the results of light uniformity check
                    int minGrayValue;
                    int maxGrayValue;

                    // Checking whether the gray values is within the defined range in MONO plane
                    bool MonoPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(AOIGrayValues.ToDArr(), out minGrayValue, out maxGrayValue);

                    currentLSAData.MinGrayValue = minGrayValue;
                    currentLSAData.MaxGrayValue = maxGrayValue;
                    currentLSAData.Status       = MonoPlaneOptimum;
                }
                HOperatorSet.CountSeconds(out endTime);
                currentLSAData.TimeTaken = (endTime - startTime).D * 1000;

                this.BeginInvoke(new Action(UpdateStepUI));
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during light intensity check. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                //MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);

                this.BeginInvoke(new Action(UpdateStepUI));

                //lblStatus.Text = errorMessage;
                //lblStatus.BackColor = Color.Red;
            }
        }
Ejemplo n.º 12
0
        public void ProcessStep(HImage imageGrabbed)
        {
            currentFMCData = new FocusMagnificationCheckData();
            lock (this.ImageToBeSaved)
            {
                this.ImageToBeSaved = imageGrabbed.CopyImage();
            }

            bool calibrationTargetContrastOK = true;

            // Feed the image to the script
            imageGrabbed         = imageGrabbed.CropDomain();
            currentFMCData.Image = imageGrabbed;

            int imageChannels = imageGrabbed.CountChannels();

            HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

            if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
            {
                rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                rgbImg = rImg.Compose3(gImg, bImg);
            }

            HImage grayImage = new HImage();

            if (rgbImg == null)
            {
                return;
            }

            imageGrabbed = rgbImg.CopyImage();

            #region IntensityCheckForCheckingCalibrationTarget
            // Check the minimum and maximum light intensity to determine whether correct calibration target
            // has been placed
            grayImage = imageGrabbed.Rgb1ToGray();

            HImage ho_R;
            HImage ho_G;
            HImage ho_B;

            ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

            HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
            HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
            HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
            HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

            currentFMCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

            int RPlaneMinValue;
            int RPlaneMaxValue;

            RAOIGrayVals = RAOIGrayVals.TupleSelectRange(this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                                                         this.CameraAcquisition.CurrentImageWidth - this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

            calibrationTargetContrastOK = this.CameraAcquisition.IsLightIntensityLevelOptimum(GAOIGrayVals.ToDArr(), out RPlaneMinValue, out RPlaneMaxValue);
            calibrationTargetContrastOK = (RPlaneMaxValue - RPlaneMinValue) > Properties.Settings.Default.FocusPatternCheckReferenceGrayValue;

            #endregion

            #region FindPatternArea
            //Finds the pattern area
            HImage reducedImage = Globals.FindPatternArea(grayImage);
            if (reducedImage == null)
            {
                SetFocusStatusMessage("Image is too dark or Incorrect Pattern", Color.Red);
                currentFMCData.FocusPercentage         = "";
                currentFMCData.MagnificationPercentage = "";
                currentFMCData.PixelResolution         = "";
                //this.BeginInvoke(new Action<FocusMagnificationCheckData>(UpdateFocusMagnificationStepUI), currentFMCData);
                return;
            }
            #endregion

            #region Focus Learning

            hv_focus = new HTuple();
            HTuple hv_currentSharpness = new HTuple();
            HImage imageCropped        = new HImage();

            //Indicates that execution has entered the focus learning stage
            //(For both successful and failed learning)
            bool focusLearningDone = false;

            if (this.focusLearningStarted)
            {
                #region FocusLearningStage
                focusLearningDone = true;
                if (!this.focusLearningOver)
                {
                    HTuple hv_Scale;
                    HTuple hv_Subsampling = 1;
                    hv_Scale = 1.0 / hv_Subsampling;

                    int grabbedImageWidth, grabbedImageHeight;

                    imageGrabbed.GetImageSize(out grabbedImageWidth, out grabbedImageHeight);

                    // Crop the image before learning
                    imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight,
                                                         (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth,
                                                         Properties.Settings.Default.FocusROIRectangleWidth * 2,
                                                         (this.CameraAcquisition.CurrentImageHeight / 2));

                    int croppedImageWidth, croppedImageHeight;

                    imageCropped.GetImageSize(out croppedImageWidth, out croppedImageHeight);

                    int noOfSamples = croppedImageHeight / Properties.Settings.Default.NoOfSamplesUsedInFocusStep;

                    for (int i = 0; i < Properties.Settings.Default.NoOfSamplesUsedInFocusStep; i++)
                    {
                        try
                        {
                            int    row             = i * noOfSamples;
                            HImage newCroppedImage = imageCropped.CopyImage().CropPart(row, 0, croppedImageWidth, noOfSamples);
                            //newCroppedImage.WriteImage("bmp", 0, "D:\\imageCropped" + i.ToString());

                            // Function Call for sharpness Measurement
                            hv_currentSharpness     = MeasureSharpness(newCroppedImage, hv_Scale);
                            hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness);
                        }
                        catch (Exception ex)
                        {
                        }
                    }

                    currentFMCData.FocusPercentage = "";
                    SetFocusStatusMessage("Learning...", Color.Yellow);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                }
                else
                {
                    hv_MaxAutoCorrelation = new HTuple();
                    HTuple hv_Change = new HTuple();
                    if (hv_AutoCorrelationTuple.TupleLength() > 0)
                    {
                        hv_MaxAutoCorrelation = hv_AutoCorrelationTuple.TupleMax();
                        hv_Change             = ((hv_AutoCorrelationTuple.TupleMax() - hv_AutoCorrelationTuple.TupleMin()) / hv_AutoCorrelationTuple.TupleMax()) * 100;

                        if (hv_MaxAutoCorrelation.D <= 0.0 || hv_Change.D < Properties.Settings.Default.MinimumFocusLearningRangeRequired)
                        {
                            currentFMCData.FocusPercentage = "";
                            SetFocusStatusMessage("Focus Learning not done properly. Range of focus learning is not enough.", Color.Orange);
                            this.focusLearningOver = false;
                            UpdateLabelMessage(currentFMCData.FocusPercentage);
                        }
                        else
                        {
                            this.CameraAcquisition.CurrentCameraSetupProperties.FocusMaxAutoCorrelationValue = hv_MaxAutoCorrelation.D;
                        }
                    }
                    else
                    {
                        currentFMCData.FocusPercentage = "";
                        SetFocusStatusMessage("Focus Learning not done properly. Sharpness measurement failed", Color.Orange);
                        this.focusLearningOver = false;
                        UpdateLabelMessage(currentFMCData.FocusPercentage);
                    }
                    this.focusLearningStarted = false;
                }
                #endregion
            }
            else if (this.focusLearningOver)
            {
                #region FocusTestingPhase
                if (!calibrationTargetContrastOK)
                {
                    currentFMCData.FocusPercentage         = "";
                    currentFMCData.MagnificationPercentage = "";
                    currentFMCData.PixelResolution         = "";
                    SetFocusStatusMessage("Incorrect Pattern. Not enough contrast", Color.Red);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                    return;
                }

                HTuple hv_Scale;
                HTuple hv_Subsampling = 1;

                hv_Scale = 1.0 / hv_Subsampling;
                // Crop the image before learning
                imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight,
                                                     (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth,
                                                     Properties.Settings.Default.FocusROIRectangleWidth * 2,
                                                     (this.CameraAcquisition.CurrentImageHeight / 2));

                // Function Call for sharpness Measurement
                hv_currentSharpness = MeasureSharpness(imageCropped, hv_Scale);
                if (hv_currentSharpness > hv_MaxAutoCorrelation + 2)
                {
                    SetFocusStatusMessage("Current sharpness is more than learnt sharpness. Insert valid calibration doc or Re-do focus learning !!", Color.Orange);
                    currentFMCData.PixelResolution         = "";
                    currentFMCData.MagnificationPercentage = "";
                    currentFMCData.FocusPercentage         = "";
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                    return;
                }
                else if (hv_currentSharpness > hv_MaxAutoCorrelation)
                {
                    hv_MaxAutoCorrelation   = hv_currentSharpness;
                    hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness);
                }
                hv_focus = 100 - (((hv_MaxAutoCorrelation - hv_currentSharpness) / hv_MaxAutoCorrelation) * 100);
                hv_focus = hv_focus.TupleRound();
                currentFMCData.FocusPercentage = hv_focus.ToString();
                if (hv_focus > 100)
                {
                    // Not Focused
                    currentFMCData.FocusPercentage = "";
                    SetFocusStatusMessage("Focus learning not done properly", Color.Red);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                }
                if (hv_focus >= 95 && hv_focus <= 100)
                {
                    // Focused
                    SetFocusStatusMessage("Focused", Color.LimeGreen);
                }
                else if (hv_focus > 70 && hv_focus < 95)
                {
                    //// Fine Tuning is required
                    SetFocusStatusMessage("Fine Tuning is required", Color.Yellow);
                }
                else
                {
                    // Not Focused
                    SetFocusStatusMessage("Not focused", Color.Red);
                }
                #endregion
            }
            else if (!focusLearningOver && !focusLearningDone)
            {
                if (hv_MaxAutoCorrelation == null)
                {
                    SetFocusStatusMessage("Focus learning not done", Color.Yellow);
                }
            }

            UpdateLabelMessage(currentFMCData.FocusPercentage);
            #endregion
        }
Ejemplo n.º 13
0
        public ResultFinShell DealFinShell(ParFinShell par, Hashtable htResult)
        {
            #region 定义
            HTuple         num_Obj = 0;
            ResultFinShell result  = new ResultFinShell();

            HTuple width  = null;
            HTuple height = null;

            ImageAll imageAll = null;

            HObject ho_Image             = null;
            HObject ho_RegionReduced     = null;
            HObject ho_ResultPreprocess  = null;
            HObject ho_RegionOuter       = null;
            HObject ho_RegionInnerOuter  = null;
            HObject ho_RegionInner       = null;
            HObject ho_RegionIntersected = null;
            HObject ho_RegionConnected   = null;
            HObject ho_RegionFillUp      = null;
            HObject ho_RegionOpening     = null;
            HObject ho_RegionClosing     = null;

            HObject ho_RegionWidthSelected  = null;
            HObject ho_RegionHeightSelected = null;
            HObject ho_RegionAreaSelected   = null;

            HObject ho_RegionUnion1 = null;
            HObject ho_RegionUnion2 = null;

            HObject ho_RegionFinShell = null;

            //求取沿XLD外形切线方向的外轮廓
            HObject ho_xldToCalHeight                  = null;
            HObject ho_xldToCalWidth                   = null;
            HObject ho_ho_RegionFinShellSelect         = null;
            HObject ho_ho_RegionFinShellSelectContours = null;

            HTuple hv_disMin      = null;
            double HeightCalUpper = 0;

            HTuple WidthCalRow = null, WidthCalCol = null;

            HTuple hv_FinShellIntersectRow = null;
            HTuple hv_FinShellIntersectCol = null;
            HTuple hv_FinShellCircleRadius = null;

            HTuple FinShellWidth = null;

            HOperatorSet.GenEmptyObj(out ho_Image);
            HOperatorSet.GenEmptyObj(out ho_RegionReduced);
            HOperatorSet.GenEmptyObj(out ho_ResultPreprocess);
            HOperatorSet.GenEmptyObj(out ho_RegionOuter);
            HOperatorSet.GenEmptyObj(out ho_RegionInnerOuter);
            HOperatorSet.GenEmptyObj(out ho_RegionInner);
            HOperatorSet.GenEmptyObj(out ho_RegionIntersected);
            HOperatorSet.GenEmptyObj(out ho_RegionConnected);
            HOperatorSet.GenEmptyObj(out ho_RegionFillUp);
            HOperatorSet.GenEmptyObj(out ho_RegionOpening);
            HOperatorSet.GenEmptyObj(out ho_RegionClosing);
            HOperatorSet.GenEmptyObj(out ho_RegionWidthSelected);
            HOperatorSet.GenEmptyObj(out ho_RegionHeightSelected);
            HOperatorSet.GenEmptyObj(out ho_RegionAreaSelected);
            HOperatorSet.GenEmptyObj(out ho_RegionUnion1);
            HOperatorSet.GenEmptyObj(out ho_RegionUnion2);
            HOperatorSet.GenEmptyObj(out ho_RegionFinShell);

            HOperatorSet.GenEmptyObj(out ho_xldToCalHeight);
            HOperatorSet.GenEmptyObj(out ho_xldToCalWidth);
            HOperatorSet.GenEmptyObj(out ho_ho_RegionFinShellSelect);
            HOperatorSet.GenEmptyObj(out ho_ho_RegionFinShellSelectContours);

            HTuple hv_Area      = null;
            HTuple hv_CenterRow = null;
            HTuple hv_CenterCol = null;

            HTuple hv_Row     = null;
            HTuple hv_Column  = null;
            HTuple hv_Row2    = null;
            HTuple hv_Column2 = null;
            HTuple hv_Phi     = null;
            HTuple hv_Width   = null;
            HTuple hv_Height  = null;
            #endregion 定义

            try
            {
                #region 基础功能调用
                //if (BasicImageProcess(par, result, htResult, out ho_RegionReduced, out ho_Image, out width, out height))
                //{

                //}
                //else
                //{
                //    return result;
                //}
                #endregion 基础功能调用

                //获取图像预处理结果,图像是二值化后再进行处理
                ho_ResultPreprocess = result.g_ResultPreProcess.ImageResult.Ho_Image;

                #region 真实轮廓
                //string nameStdEdge = par.NameCellActualEdge;
                string nameStdEdge = "C9";
                /***** 得到当前片的平滑轮廓******/
                ResultRaisedEdge resultRaise = (ResultRaisedEdge)htResult[nameStdEdge];
                if (resultRaise == null)
                {
                    result.LevelError_e = LevelError_enum.OK;
                    result.Annotation   = par.NameCell.ToString() + ",当前片的平滑轮廓获取异常";
                    result.SetDefault();
                    return(result);
                }
                HObject ho_StdEdge = ((ImageAll)resultRaise.HtResultImage["C9FunRaisedEdge.RegionRemainingRemovedSmoothed"]).Ho_Image;

                #endregion 真实轮廓

                //以下为崩缺检查的算法
                //获取图像预处理结果,图像是二值化后再进行处理
                ho_ResultPreprocess = result.g_ResultPreProcess.ImageResult.Ho_Image;
                //通过残留检查平滑后的轮廓获取平行的ROI区域
                ho_RegionOuter.Dispose();
                ho_RegionInner.Dispose();
                ho_RegionInnerOuter.Dispose();
                GenParalRegionFromXld(ho_StdEdge, out ho_RegionOuter, out ho_RegionInnerOuter, out ho_RegionInner, par.Width_Paral, par.OuterStdShift_Paral, par.InnerStdShift_Paral);


                HObject ho_PreRegion = result.g_ResultPreProcess.resultBinary.RegionResult.Ho_Image;
                ho_RegionIntersected.Dispose();
                if (par.WorkingRegion == "Outer")//残留检测,选区为边缘外侧
                {
                    HOperatorSet.Intersection(ho_PreRegion, ho_RegionOuter, out ho_RegionIntersected);
                }
                else if (par.WorkingRegion == "Inner")//崩缺检测,选区为边缘内侧
                {
                    HOperatorSet.Intersection(ho_ResultPreprocess, ho_RegionInner, out ho_RegionIntersected);
                }
                else
                {
                    return(result);
                }
                HOperatorSet.CountObj(ho_RegionIntersected, out num_Obj);
                if (num_Obj == 0)
                {
                    result.LevelError_e = LevelError_enum.OK;
                    result.Annotation   = par.NameCell.ToString() + ",选区内无异常";
                    result.SetDefault();
                    return(result);
                }
                ho_RegionConnected.Dispose();
                HOperatorSet.Connection(ho_RegionIntersected, out ho_RegionConnected);
                ho_RegionFillUp.Dispose();
                HOperatorSet.FillUp(ho_RegionConnected, out ho_RegionFillUp);

                #region 开运算,闭运算
                ho_RegionOpening.Dispose();

                if (par.OpeningCircle != 0)
                {
                    HOperatorSet.OpeningCircle(ho_RegionFillUp, out ho_RegionOpening, par.OpeningCircle);
                }
                else
                {
                    ho_RegionOpening = ho_RegionFillUp.Clone();
                }
                HOperatorSet.CountObj(ho_RegionOpening, out num_Obj);
                if (num_Obj == 0)
                {
                    result.LevelError_e = LevelError_enum.OK;
                    result.Annotation   = par.NameCell.ToString() + ",开运算后异常点消失";
                    result.SetDefault();
                    return(result);
                }

                ho_RegionClosing.Dispose();
                if (par.ClosingCircle != 0)
                {
                    HOperatorSet.ClosingCircle(ho_RegionOpening, out ho_RegionClosing, par.ClosingCircle);
                }
                else
                {
                    ho_RegionClosing = ho_RegionOpening.Clone();
                }
                #endregion 开运算,闭运算

                //把贝壳宽度/高度/面积超出阈值的,都挑选出来
                ho_RegionWidthSelected.Dispose();
                ho_RegionHeightSelected.Dispose();
                ho_RegionAreaSelected.Dispose();
                HOperatorSet.SelectShape(ho_RegionClosing, out ho_RegionWidthSelected, "width", "and", par.MinWidth, par.MaxWidth);
                HOperatorSet.SelectShape(ho_RegionWidthSelected, out ho_RegionHeightSelected, "height", "and", par.MinHeight, par.MaxHeight);
                HOperatorSet.SelectShape(ho_RegionHeightSelected, out ho_RegionAreaSelected, "area", "and", par.MinArea, par.MaxArea);
                ho_RegionUnion1.Dispose();
                //HOperatorSet.Union2(ho_RegionWidthSelected, ho_RegionHeightSelected, out ho_RegionUnion1);
                ho_RegionUnion2.Dispose();
                //HOperatorSet.Union2(ho_RegionUnion1, ho_RegionAreaSelected, out ho_RegionUnion2);
                HOperatorSet.CountObj(ho_RegionAreaSelected, out num_Obj);
                if (num_Obj == 0)
                {
                    result.LevelError_e = LevelError_enum.OK;
                    result.Annotation   = par.NameCell.ToString() + ",区域特征提取后异常点消失";
                    result.SetDefault();
                    return(result);
                }
                //提取到的贝壳(Shell)OR残留(Fin)
                ho_RegionFinShell.Dispose();
                ho_RegionFinShell = ho_RegionAreaSelected.Clone();
                //HOperatorSet.Connection(ho_RegionUnion2, out ho_RegionFinShell);

                //对获取到的残留或者崩缺进行blob分析
                HOperatorSet.AreaCenter(ho_RegionFinShell, out hv_Area, out hv_CenterRow, out hv_CenterCol);
                double[] dblHeightArry = new double[hv_CenterRow.Length];//存放高度值的数组
                double[] dblWidthArry  = new double[hv_CenterRow.Length];

                #region 求取包络
                switch (par.SmallestSurround_e)
                {
                case SmallestSurround_enum.Rect2:
                    HOperatorSet.SmallestRectangle2(ho_RegionFinShell, out hv_Row, out hv_Column, out hv_Phi, out hv_Width, out hv_Height);

                    break;

                case SmallestSurround_enum.Circle:
                    HOperatorSet.SmallestCircle(ho_RegionFinShell, out hv_Row, out hv_Column, out hv_Width);
                    break;

                case SmallestSurround_enum.TanLineRect:
                    if (par.WorkingRegion == "Outer")    //残留检测,选区为边缘外侧
                    {
                        HeightCalUpper = par.OuterStdShift_Paral + par.Width_Paral * 1.5;
                        HOperatorSet.GenParallelContourXld(ho_StdEdge, out ho_xldToCalHeight, "regression_normal", HeightCalUpper);
                        HOperatorSet.GenParallelContourXld(ho_StdEdge, out ho_xldToCalWidth, "regression_normal", par.OuterStdShift_Paral);
                    }
                    else    //崩缺检测,选区为边缘内测
                    {
                        HeightCalUpper = par.InnerStdShift_Paral + par.Width_Paral * 1.5;
                        HOperatorSet.GenParallelContourXld(ho_StdEdge, out ho_xldToCalHeight, "regression_normal", -HeightCalUpper);
                        HOperatorSet.GenParallelContourXld(ho_StdEdge, out ho_xldToCalWidth, "regression_normal", -par.InnerStdShift_Paral);
                    }
                    for (int j = 0; j < hv_CenterRow.Length; j++)
                    {
                        try
                        {
                            HOperatorSet.SelectObj(ho_RegionFinShell, out ho_ho_RegionFinShellSelect, j + 1);
                            HOperatorSet.GenContourRegionXld(ho_ho_RegionFinShellSelect, out ho_ho_RegionFinShellSelectContours, "border");


                            //求残留区域和参考xld平行的XLD
                            //HObject ho_ContoursIntersected = null;
                            //HObject ho_TestImage = null;
                            //HObject ho_TestImageResult = null;
                            //HOperatorSet.GenImageConst(out ho_TestImage, "byte", width, height);
                            //HOperatorSet.PaintXld(ho_xldToCalHeight, ho_TestImage, out ho_TestImageResult, 255);
                            //HOperatorSet.PaintXld(ho_ho_RegionFinShellSelectContours, ho_TestImageResult, out ho_TestImageResult, 255);
                            //HOperatorSet.WriteImage(ho_TestImageResult, "bmp", 128, "E:\\DOC\\德龙\\0427测试\\XKY4.27\\TestImage"+j.ToString());



                            //计算残留高度
                            HOperatorSet.DistanceCcMin(ho_xldToCalHeight, ho_ho_RegionFinShellSelectContours, "fast_point_to_segment", out hv_disMin);
                            dblHeightArry[j] = Math.Round(HeightCalUpper - hv_disMin.D, 1);


                            HOperatorSet.IntersectionContoursXld(ho_xldToCalWidth, ho_ho_RegionFinShellSelectContours, "mutual", out hv_FinShellIntersectRow, out hv_FinShellIntersectCol, out hv_FinShellCircleRadius);

                            double   dblFinShellWidth = 0;
                            HTuple   hv_WidthTemp;
                            double[] IntersectPointRow = hv_FinShellIntersectRow.DArr;
                            double[] IntersectPointCol = hv_FinShellIntersectCol.DArr;
                            //求取边缘上最远的两个点的距离
                            for (int k = 0; k < hv_FinShellIntersectRow.Length - 1; k++)
                            {
                                for (int l = k + 1; l < hv_FinShellIntersectRow.Length; l++)
                                {
                                    HOperatorSet.DistancePp(IntersectPointRow[k], IntersectPointCol[k], IntersectPointRow[l], IntersectPointCol[l], out hv_WidthTemp);
                                    if (hv_WidthTemp.D > dblFinShellWidth)
                                    {
                                        dblFinShellWidth = hv_WidthTemp.D;
                                    }
                                }
                            }

                            //HOperatorSet.GenContourPolygonXld(out ho_ContoursIntersected, hv_FinShellIntersectRow, hv_FinShellIntersectCol);
                            //HOperatorSet.LengthXld(ho_ContoursIntersected, out FinShellWidth);

                            //计算残留宽度
                            dblWidthArry[j] = Math.Round(dblFinShellWidth, 1);
                        }
                        catch
                        {
                        }
                    }
                    break;

                default:    //默认矩形1
                    HOperatorSet.SmallestRectangle1(ho_RegionFinShell, out hv_Row, out hv_Column, out hv_Row2, out hv_Column2);
                    break;
                }



                #endregion 求取包络

                double x = 0;
                double y = 0;

                for (int j = 0; j < hv_CenterRow.Length; j++)
                {
                    #region 输出坐标类型
                    switch (par.TypeOutCoord)
                    {
                    case "面积中心":
                        x = Math.Round(hv_CenterCol.ToDArr()[j], 0);
                        y = Math.Round(hv_CenterRow.ToDArr()[j], 0);
                        break;

                    case "包络中心":
                        if (par.SmallestSurround_e == SmallestSurround_enum.Rect1)
                        {
                            x = Math.Round((hv_Column.ToDArr()[j] + hv_Column2.ToDArr()[j]) / 2, 3);
                            y = Math.Round((hv_Row.ToDArr()[j] + hv_Row2.ToDArr()[j]) / 2, 3);
                        }
                        else
                        {
                            x = Math.Round(hv_Column.ToDArr()[j], 3);
                            y = Math.Round(hv_Row.ToDArr()[j], 3);
                        }
                        break;
                    }
                    #endregion 输出坐标类型


                    result.X_L.Add(x);
                    result.Y_L.Add(y);
                    result.Area_L.Add(hv_Area.IArr[j]);//求取面积
                    result.Rectangularity_L.Add(0);
                    result.Circularity_L.Add(0);
                    #region 求取包络的集合
                    switch (par.SmallestSurround_e)
                    {
                    case SmallestSurround_enum.Rect2:
                        result.R_L.Add(Math.Round(hv_Phi.DArr[j], 5));         //求取角度
                        result.Height_L.Add(Math.Round(hv_Height.DArr[j], 3)); //获取矩形的长宽
                        result.Width_L.Add(Math.Round(hv_Width.DArr[j], 3));
                        result.Radius_L.Add(0);
                        break;

                    case SmallestSurround_enum.Circle:
                        result.Radius_L.Add(Math.Round(hv_Width.DArr[j], 3));
                        result.R_L.Add(0);
                        result.Height_L.Add(0);
                        result.Width_L.Add(0);
                        break;

                    case SmallestSurround_enum.TanLineRect:     //切线矩形,即为方向与xld切线平行方向的矩形
                        result.Radius_L.Add(0);
                        result.R_L.Add(0);
                        result.Height_L.Add(dblHeightArry[j]);
                        result.Width_L.Add(dblWidthArry[j]);
                        break;

                    default:    //默认矩形1
                        result.R_L.Add(0);
                        double rectWidth = (hv_Column2.IArr[j] - hv_Column.IArr[j]) / 2;
                        result.Width_L.Add(Math.Round(rectWidth, 3));
                        double rectHeight = (hv_Row2.IArr[j] - hv_Row.IArr[j]) / 2;
                        result.Height_L.Add(Math.Round(rectHeight, 3));
                        result.Radius_L.Add(0);
                        break;
                    }
                    #endregion 求取包络的集合
                }

                //区域个数
                result.Num = result.X_L.Count;
                if (result.Num == 0)
                {
                    result.LevelError_e = LevelError_enum.OK;
                    result.Annotation   = par.NameCell.ToString() + "FinShell个数为0";
                    result.SetDefault();
                }
                //添加显示
                AddDisplay(par.g_ParOutput, result);

                return(result);
            }
            catch (Exception ex)
            {
                return(result);
            }
            finally
            {
                //对结果进行综合处理
                SetComprehensiveResult(result, par, htResult, false);

                //校准
                DealCalibResult(par, result, htResult);

                #region 记录时间
                WriteRunTime(stopWatch, NameClass, par, result);
                #endregion 记录时间

                #region 记录

                RecordHoject(par, result, NameClass, "RegionReduced", ho_RegionReduced);
                RecordHoject(par, result, NameClass, "RegionInner", ho_RegionInner);
                RecordHoject(par, result, NameClass, "RegionOuter", ho_RegionOuter);
                RecordHoject(par, result, NameClass, "RegionInnerOuter", ho_RegionInnerOuter);

                RecordHoject(par, result, NameClass, "RegionIntersected", ho_RegionIntersected);
                RecordHoject(par, result, NameClass, "RegionConnected", ho_RegionConnected);
                RecordHoject(par, result, NameClass, "RegionFillUp", ho_RegionFillUp);

                RecordHoject(par, result, NameClass, "RegionOpening", ho_RegionOpening);
                RecordHoject(par, result, NameClass, "RegionClosing", ho_RegionClosing);
                RecordHoject(par, result, NameClass, "RegionWidthSelected", ho_RegionWidthSelected);

                RecordHoject(par, result, NameClass, "RegionHeightSelected", ho_RegionHeightSelected);
                RecordHoject(par, result, NameClass, "RegionAreaSelected", ho_RegionAreaSelected);
                RecordHoject(par, result, NameClass, "RegionUnion1", ho_RegionUnion1);

                RecordHoject(par, result, NameClass, "RegionUnion2", ho_RegionUnion2);
                RecordHoject(par, result, NameClass, "RegionFinShell", ho_RegionFinShell);
                #endregion 记录

                ho_xldToCalHeight.Dispose();
                ho_xldToCalWidth.Dispose();
                ho_ho_RegionFinShellSelect.Dispose();
                ho_ho_RegionFinShellSelectContours.Dispose();
            }
        }
Ejemplo n.º 14
0
        public void DealSmoothing(ParRaisedEdgeSmooth parRaisedEdgeSmooth, HObject ho_ImageBinary, HObject ho_RegionBinary, ResultRaisedEdge result, Hashtable htResult)
        {
            ParSmooth par = parRaisedEdgeSmooth.g_ParSmooth;

            #region 定义
            HObject ho_ROI              = null;
            HObject ho_FilterROI        = null;
            HObject ho_FilterInverseROI = null;
            // Local control variables
            HTuple hv_Width = null, hv_Height = null;
            HTuple hv_RowInit = new HTuple(), hv_ColInit = new HTuple();
            HTuple hv_Row1 = null, hv_Col1 = null;
            HTuple hv_Row2 = null, hv_Col2 = null;
            HTuple hv_AreaDilation = new HTuple(), hv_RowDilation = new HTuple();
            HTuple hv_ColumnDilation = new HTuple(), hv_NumberSmooth = new HTuple();
            HTuple hv_RowS = new HTuple();
            HTuple hv_ColS = new HTuple(), hv_InvertedCol = new HTuple();
            HTuple hv_InvertedRow = new HTuple(), hv_ConcatCol = new HTuple();
            HTuple hv_ConcatRow = new HTuple(), hv_NumberRegionDilation1 = new HTuple();

            // Local iconic variables
            //怎么把图片和region导入
            HObject ho_ImageReduced;
            HObject ho_Region, ho_RegionDilation;
            HObject ho_rectErosion1, ho_BorderOrigin;
            HObject ho_ConnectedRegionDilation = null;
            HObject ho_BinImage = null;
            HObject ho_ImageBinReduced = null, ho_BorderOriginal = null, ho_SmoothedBoaderOriginal = null;
            HObject ho_SelectedSmoothedContours = null, ho_ObjectSelected = null;
            HObject ho_ContourConcat = null, ho_RegionParall = null, ho_ImageParall = null;
            HObject ho_RegionError = null, ho_ConnectedRegions = null, ho_RegionErosionError = null;
            HObject ho_RegionDilationError = null;

            //求取缺陷
            HObject ho_BorderFull           = null;
            HObject ho_SmoothedContoursFull = null;
            HObject ho_ParaFull             = null;
            HObject ho_ParaRegionFull       = null;
            HObject ho_RegionROIFull        = null;
            HObject ho_ErrorFull            = null;
            HObject ho_FilterErrorFull      = null;//崩缺求取中,剪掉台阶面崩缺后的缺陷
            HObject ho_ErrorRegioinFull     = null;
            HObject ho_ErrorConnectFull     = null;
            HObject ho_ErrorSelectFull      = null;
            HObject ho_ErrorXldFull         = null;

            HObject ho_SmoothedContoursFullLongest = null;
            HOperatorSet.GenEmptyObj(out ho_SmoothedContoursFullLongest);

            HOperatorSet.GenEmptyObj(out ho_ImageReduced);
            HOperatorSet.GenEmptyObj(out ho_Region);
            HOperatorSet.GenEmptyObj(out ho_RegionDilation);
            HOperatorSet.GenEmptyObj(out ho_rectErosion1);
            HOperatorSet.GenEmptyObj(out ho_BorderOrigin);
            HOperatorSet.GenEmptyObj(out ho_ConnectedRegionDilation);
            HOperatorSet.GenEmptyObj(out ho_BinImage);
            HOperatorSet.GenEmptyObj(out ho_ImageBinReduced);
            HOperatorSet.GenEmptyObj(out ho_BorderOriginal);
            HOperatorSet.GenEmptyObj(out ho_SmoothedBoaderOriginal);
            HOperatorSet.GenEmptyObj(out ho_SelectedSmoothedContours);
            HOperatorSet.GenEmptyObj(out ho_ObjectSelected);
            HOperatorSet.GenEmptyObj(out ho_ContourConcat);
            HOperatorSet.GenEmptyObj(out ho_RegionParall);
            HOperatorSet.GenEmptyObj(out ho_ImageParall);
            HOperatorSet.GenEmptyObj(out ho_RegionError);
            HOperatorSet.GenEmptyObj(out ho_ConnectedRegions);
            HOperatorSet.GenEmptyObj(out ho_RegionErosionError);
            HOperatorSet.GenEmptyObj(out ho_RegionDilationError);
            HOperatorSet.GenEmptyObj(out ho_BorderFull);
            HOperatorSet.GenEmptyObj(out ho_SmoothedContoursFull);


            HOperatorSet.GenEmptyObj(out ho_FilterInverseROI);
            HOperatorSet.GenEmptyObj(out ho_FilterErrorFull);
            HOperatorSet.GenEmptyObj(out ho_ParaFull);
            HOperatorSet.GenEmptyObj(out ho_ParaRegionFull);
            #endregion 定义

            try
            {
                int smooth = (par.SmoothValue / 2) * 2 + 1;//平滑系数只能是奇数

                HOperatorSet.GetImageSize(ho_ImageBinary, out hv_Width, out hv_Height);
                //求取ROI
                FunROI funCirROI = new FunROI();
                bool   blResult  = false;
                string anno      = "";
                ho_ROI = funCirROI.CreateOneROI(parRaisedEdgeSmooth.g_ParROI.g_ParROIExcute_L[0], htResult, out blResult, out anno);


                //对ROI区域进行腐蚀,剔除多余轮廓的边界
                HOperatorSet.ErosionCircle(ho_ROI, out ho_rectErosion1, 20);
                HOperatorSet.ReduceDomain(ho_ImageBinary, ho_rectErosion1, out ho_ImageReduced);

                //通过ROI截图
                HOperatorSet.Intersection(ho_RegionBinary, ho_ROI, out ho_RegionDilation);

                //对二值化图片求取轮廓
                HOperatorSet.ThresholdSubPix(ho_ImageReduced, out ho_BorderOrigin, 100);

                //迭代十次
                for (int i = 0; i < par.Num; i++)
                {
                    hv_Row1 = new HTuple();
                    hv_Col1 = new HTuple();
                    hv_Row2 = new HTuple();
                    hv_Col2 = new HTuple();

                    //剔除主体之外的噪声点
                    ho_ConnectedRegionDilation.Dispose();
                    HOperatorSet.Connection(ho_RegionDilation, out ho_ConnectedRegionDilation);
                    HOperatorSet.AreaCenter(ho_ConnectedRegionDilation, out hv_AreaDilation, out hv_RowDilation, out hv_ColumnDilation);
                    ho_RegionDilation.Dispose();

                    //保留主体
                    HOperatorSet.SelectShape(ho_ConnectedRegionDilation, out ho_RegionDilation, "area", "and", 50000, int.MaxValue);
                    //主题二值化以后的图片
                    ho_BinImage.Dispose();
                    HOperatorSet.RegionToBin(ho_RegionDilation, out ho_BinImage, 255, 0, hv_Width, hv_Height);

                    //二值化缩减轮廓
                    ho_ImageBinReduced.Dispose();
                    HOperatorSet.ReduceDomain(ho_BinImage, ho_rectErosion1, out ho_ImageBinReduced);

                    //***************************
                    //生成带有方向梯度的边界,用平滑后的边界和原始边界的平移生成轮廓处理
                    //生成内边界
                    ho_BorderOriginal.Dispose();
                    HOperatorSet.ThresholdSubPix(ho_ImageBinReduced, out ho_BorderOriginal, 100);

                    //角点非圆弧
                    ho_SmoothedBoaderOriginal.Dispose();
                    HOperatorSet.SmoothContoursXld(ho_BorderOriginal, out ho_SmoothedBoaderOriginal, smooth);

                    //筛选轮廓
                    ho_SelectedSmoothedContours.Dispose();
                    HOperatorSet.SelectContoursXld(ho_SmoothedBoaderOriginal, out ho_SelectedSmoothedContours, "contour_length", par.SelectAreaLow, 99999999, -0.5, 0.5);
                    HOperatorSet.CountObj(ho_SelectedSmoothedContours, out hv_NumberSmooth);
                    if ((int)(new HTuple(hv_NumberSmooth.TupleGreater(1))) != 0)
                    {
                        int numSmooth = hv_NumberSmooth.ToIArr()[0] - 1;
                        for (int j = 0; j < numSmooth; j++)
                        {
                            ho_ObjectSelected.Dispose();
                            HOperatorSet.SelectObj(ho_SelectedSmoothedContours, out ho_ObjectSelected, j + 1);
                            HOperatorSet.GetContourXld(ho_ObjectSelected, out hv_RowS, out hv_ColS);
                            HOperatorSet.TupleConcat(hv_RowInit, hv_RowS, out hv_RowInit);
                            HOperatorSet.TupleConcat(hv_ColInit, hv_ColS, out hv_ColInit);
                        }
                    }
                    else
                    {
                        HOperatorSet.GetContourXld(ho_SelectedSmoothedContours, out hv_RowInit, out hv_ColInit);
                    }

                    //生成平行轮廓的点
                    HTuple[] rowCol = null;
                    GenParallPoint(par.Position, par.DefectType, (int)par.GapIn, (int)par.GapOut, hv_RowInit, hv_ColInit, out rowCol);
                    hv_Row1 = rowCol[0];
                    hv_Col1 = rowCol[1];
                    hv_Row2 = rowCol[2];
                    hv_Col2 = rowCol[3];
                    //生成平行轮廓包围的区域
                    HOperatorSet.TupleInverse(hv_Col2, out hv_InvertedCol);
                    HOperatorSet.TupleInverse(hv_Row2, out hv_InvertedRow);

                    HOperatorSet.TupleConcat(hv_Col1, hv_InvertedCol, out hv_ConcatCol);
                    HOperatorSet.TupleConcat(hv_Row1, hv_InvertedRow, out hv_ConcatRow);

                    //select_contours_xld (SmoothedContours, SelectedSmoothedContours, 'contour_length', 50, 99999999, -0.5, 0.5)

                    //HOperatorSet.SelectContoursXld(ho_ContourConcat, out ho_ContourConcat, "contour_length", par.ErrorAreaValue, 99999999, -0.5, 0.5);

                    if (hv_ConcatRow.Length == 0)
                    {
                        result.LevelError_e = LevelError_enum.Error;
                        result.Annotation   = par.NameCell.ToString() + "hv_ConcatRow的数量为0";
                        result.SetDefault();
                        return;
                    }

                    ho_ContourConcat.Dispose();
                    HOperatorSet.GenContourPolygonXld(out ho_ContourConcat, hv_ConcatRow, hv_ConcatCol);

                    //生成平行轮廓region
                    ho_RegionParall.Dispose();
                    HOperatorSet.GenRegionContourXld(ho_ContourConcat, out ho_RegionParall, "filled");

                    ho_ImageParall.Dispose();
                    HOperatorSet.ReduceDomain(ho_BinImage, ho_RegionParall, out ho_ImageParall);
                    ho_RegionError.Dispose();
                    HOperatorSet.Threshold(ho_ImageParall, out ho_RegionError, 100, 255);
                    ho_ConnectedRegions.Dispose();
                    HOperatorSet.Connection(ho_RegionError, out ho_ConnectedRegions);

                    //轮廓的内边界先外移1个像素,再膨胀靠近(忽略最小1个像素的地方)
                    ho_RegionErosionError.Dispose();
                    HOperatorSet.SelectShape(ho_ConnectedRegions, out ho_RegionErosionError, "area", "and", par.ErrorAreaValue, 99999999);

                    //膨胀缺陷
                    ho_RegionDilationError.Dispose();
                    HOperatorSet.DilationCircle(ho_RegionErosionError, out ho_RegionDilationError, par.DilationDefectValue);

                    //剔除残留
                    HOperatorSet.Difference(ho_RegionDilation, ho_RegionDilationError, out ho_RegionDilation);

                    HOperatorSet.CountObj(ho_RegionDilationError, out hv_NumberRegionDilation1);
                    if ((int)(new HTuple(hv_NumberRegionDilation1.TupleEqual(0))) != 0)
                    {
                        break;
                    }
                }

                //完整图片
                ho_BinImage.Dispose();
                HOperatorSet.RegionToBin(ho_RegionDilation, out ho_BinImage, 255, 0, hv_Width, hv_Height);

                ho_ImageBinReduced.Dispose();
                HOperatorSet.ReduceDomain(ho_BinImage, ho_rectErosion1, out ho_ImageBinReduced);

                //完整轮廓
                ho_BorderFull.Dispose();
                HOperatorSet.ThresholdSubPix(ho_ImageBinReduced, out ho_BorderFull, 100);

                ho_SmoothedContoursFull.Dispose();
                HOperatorSet.SmoothContoursXld(ho_BorderFull, out ho_SmoothedContoursFull, smooth);

                HTuple hv_ho_SmoothedContoursFullNumber = null;
                HOperatorSet.CountObj(ho_SmoothedContoursFull, out hv_ho_SmoothedContoursFullNumber);//计算平滑区域的xld个数

                //如果完整轮廓为空,则返回
                if (hv_ho_SmoothedContoursFullNumber == 0)
                {
                    result.LevelError_e = LevelError_enum.Error;
                    result.Annotation   = par.NameCell.ToString() + "ho_SmoothedContoursFull的数量为0";
                    result.SetDefault();
                    return;
                }
                //计算完整轮廓的长度
                HTuple hv_SmoothedContoursFullLength = null;
                HOperatorSet.LengthXld(ho_SmoothedContoursFull, out hv_SmoothedContoursFullLength);
                //找到完整轮廓中最长的一段,剔除干扰小段
                HOperatorSet.SelectObj(ho_SmoothedContoursFull, out ho_SmoothedContoursFullLongest, (((hv_SmoothedContoursFullLength.TupleSortIndex()
                                                                                                       )).TupleSelect((new HTuple(hv_SmoothedContoursFullLength.TupleLength())) - 1)) + 1);


                HTuple[] rowColFull = null;
                HOperatorSet.GetContourXld(ho_SmoothedContoursFullLongest, out hv_RowInit, out hv_ColInit);
                GenParallPoint(par.Position, par.DefectType, 0, (int)par.GapOut, hv_RowInit, hv_ColInit, out rowColFull);
                hv_Row1 = rowColFull[0];
                hv_Col1 = rowColFull[1];
                hv_Row2 = rowColFull[2];
                hv_Col2 = rowColFull[3];
                //生成平行轮廓包围的区域
                HOperatorSet.TupleInverse(hv_Col2, out hv_InvertedCol);
                HOperatorSet.TupleInverse(hv_Row2, out hv_InvertedRow);

                HOperatorSet.TupleConcat(hv_Col1, hv_InvertedCol, out hv_ConcatCol);
                HOperatorSet.TupleConcat(hv_Row1, hv_InvertedRow, out hv_ConcatRow);

                ho_ContourConcat.Dispose();
                HOperatorSet.GenContourPolygonXld(out ho_ContourConcat, hv_ConcatRow, hv_ConcatCol);
                HOperatorSet.GenRegionContourXld(ho_ContourConcat, out ho_RegionROIFull, "filled");
                HOperatorSet.ReduceDomain(ho_ImageBinary, ho_RegionROIFull, out ho_ErrorFull);

                ho_FilterErrorFull.Dispose();
                ho_FilterInverseROI.Dispose();
                if (par.DefectType == "Shell" && parRaisedEdgeSmooth.g_ParROI.g_ParROIExcute_L.Count > 1)
                {
                    ho_FilterROI = funCirROI.CreateOneROI(parRaisedEdgeSmooth.g_ParROI.g_ParROIExcute_L[1], htResult, out blResult, out anno);
                    HOperatorSet.Complement(ho_FilterROI, out ho_FilterInverseROI);
                    HOperatorSet.ReduceDomain(ho_ErrorFull, ho_FilterInverseROI, out ho_FilterErrorFull);
                }
                else
                {
                    ho_FilterErrorFull = ho_ErrorFull.Clone();
                }

                HOperatorSet.Threshold(ho_FilterErrorFull, out ho_ErrorRegioinFull, 100, 255);
                HOperatorSet.Connection(ho_ErrorRegioinFull, out ho_ErrorConnectFull);

                HTuple numError = 0;
                HTuple rowFull  = null;
                HTuple colFull  = null;
                HTuple disMin   = null;
                HTuple disMax   = null;

                HTuple disMinSort      = null;
                HTuple disMinSortIndex = null;
                HTuple areaError       = null;
                HTuple rowError        = null;
                HTuple colError        = null;
                HOperatorSet.CountObj(ho_ErrorConnectFull, out numError);
                HObject ho_ErrorSelectedPart;
                for (int i = 0; i < numError; i++)
                {
                    HOperatorSet.SelectObj(ho_ErrorConnectFull, out ho_ErrorSelectedPart, i + 1);
                    HOperatorSet.GenContourRegionXld(ho_ErrorSelectedPart, out ho_ErrorXldFull, "border");
                    HOperatorSet.GetContourXld(ho_ErrorXldFull, out rowFull, out colFull);
                    HOperatorSet.DistancePc(ho_SmoothedContoursFullLongest, rowFull, colFull, out disMin, out disMax);
                    HOperatorSet.TupleSort(disMin, out disMinSort);
                    HOperatorSet.TupleSortIndex(disMin, out disMinSortIndex);
                    //for (int j = 0; j < disMax.Length; j++)
                    //{
                    if (disMinSort[disMinSort.Length - 1] > par.NormalTh)
                    {
                        HOperatorSet.AreaCenter(ho_ErrorSelectedPart, out areaError, out rowError, out colError);
                        result.X_L.Add(colFull[disMinSortIndex[disMinSort.Length - 1].I]);
                        result.Y_L.Add(rowFull[disMinSortIndex[disMinSort.Length - 1].I]);
                        result.R_L.Add(0);
                        double dist = disMinSort[disMinSort.Length - 1];
                        result.Height_L.Add(Math.Round(dist, 1));
                        result.Area_L.Add(areaError.ToDArr()[0]);
                        break;
                    }
                    //}
                }

                //区域个数
                result.Num = result.X_L.Count;
                if (result.Num == 0)
                {
                    //如果前一次结果为error,那么即使这次的结果是OK,也要保持ERROR的状态
                    if (result.LevelError_e == LevelError_enum.Error)
                    {
                        result.SetDefault();
                    }
                    else
                    {
                        result.LevelError_e = LevelError_enum.OK;
                        result.Annotation   = par.NameCell.ToString() + "缺陷个数为0";
                        result.SetDefault();
                    }
                }

                //添加显示
                AddDisplay(par.g_ParOutput, result);
            }
            catch (Exception ex)
            {
                result.LevelError_e = LevelError_enum.Error;
                result.Annotation   = par.NameCell.ToString() + ex.Message.ToString();
                result.SetDefault();
                Log.L_I.WriteError(NameClass, ex);
            }
            finally
            {
                #region 记录
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ImageBinary", ho_ImageBinary);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ROI", ho_ROI);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_FilterROI", ho_FilterROI);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_rectErosion1", ho_rectErosion1);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ImageReduced", ho_ImageReduced);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_Region", ho_Region);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionDilation", ho_RegionDilation);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_BorderOrigin", ho_BorderOrigin);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ConnectedRegionDilation", ho_ConnectedRegionDilation);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_BinImage", ho_BinImage);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_BorderOriginal", ho_BorderOriginal);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_SmoothedBoaderOriginal", ho_SmoothedBoaderOriginal);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_SelectedSmoothedContours", ho_SelectedSmoothedContours);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ObjectSelected", ho_ObjectSelected);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ContourConcat", ho_ContourConcat);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionParall", ho_RegionParall);


                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_FilterErrorFull", ho_FilterErrorFull);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ImageParall", ho_ImageParall);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionError", ho_RegionError);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ConnectedRegions", ho_ConnectedRegions);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionErosionError", ho_RegionErosionError);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionDilationError", ho_RegionDilationError);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_BorderFull", ho_BorderFull);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_SmoothedContoursFull", ho_SmoothedContoursFull);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_SmoothedContoursFullLongest", ho_SmoothedContoursFullLongest);

                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_RegionROIFull", ho_RegionROIFull);
                RecordHoject(parRaisedEdgeSmooth, result, NameClass, "ho_ErrorConnectFull", ho_ErrorConnectFull);
                #endregion 记录
            }
        }
Ejemplo n.º 15
0
        public void ProcessStep(HImage imageGrabbed)
        {
            try
            {
                HTuple startTime = null;
                HTuple endTime   = null;
                HOperatorSet.CountSeconds(out startTime);
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == 6)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                int presentImageNoOfChannels = imageGrabbed.CountChannels();

                if (presentImageNoOfChannels == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        this.ImageToBeSaved = imageGrabbed.CopyImage();
                    }
                    if (whitebalancingStarted)
                    {
                        wbIterationCompletedEvent.Reset();
                    }
                    imageGrabbedEvent.Set();

                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

                    currentWBData.Image = imageGrabbed;
                    currentWBData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

                    HImage ho_GrayImage;
                    ho_GrayImage = imageGrabbed.Rgb1ToGray();

                    HRegion whiteRegion = ho_GrayImage.Threshold(
                        Properties.Settings.Default.MinThresholdInDeterminingGain,
                        Properties.Settings.Default.MaxThresholdInDeterminingGain);
                    whiteRegion = whiteRegion.FillUp();
                    whiteRegion = whiteRegion.ErosionRectangle1(20, 20);

                    double rClipValue = 15.0;
                    double gClipValue = 15.0;
                    double bClipValue = 15.0;
                    //CalculateOptimumClipValue(whiteRegion, ho_R, out rClipValue);
                    //CalculateOptimumClipValue(whiteRegion, ho_G, out gClipValue);
                    //CalculateOptimumClipValue(whiteRegion, ho_B, out bClipValue);

                    double rMin, rMax, rRange;
                    double gMin, gMax, gRange;
                    double bMin, bMax, bRange;

                    ho_R.MinMaxGray(whiteRegion, rClipValue, out rMin, out rMax,
                                    out rRange);
                    ho_G.MinMaxGray(whiteRegion, gClipValue, out gMin, out gMax,
                                    out gRange);
                    ho_B.MinMaxGray(whiteRegion, bClipValue, out bMin, out bMax,
                                    out bRange);

                    double RGDiff = rMax - gMax;
                    double GBDiff = gMax - bMax;
                    double BRDiff = bMax - rMax;

                    currentWBData.ErrorLevel = (Math.Max(RGDiff, Math.Max(GBDiff, BRDiff)) / this.CameraAcquisition.CurrentCameraProperties.BrightRegionReferenceGrayLevel) * 100;
                    currentWBData.RedMax     = rMax;
                    currentWBData.GreenMax   = gMax;
                    currentWBData.BlueMax    = bMax;


                    HOperatorSet.CountSeconds(out endTime);
                    currentWBData.TimeTaken = (endTime - startTime).D;

                    UpdateControlUI();

                    wbIterationCompletedEvent.WaitOne();
                }
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during white balancing step. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }