Esempio n. 1
0
        HTuple GetGrayHisto(HTuple rectangle1)
        {
            if (HObjList == null || HObjList.Count < 1)
            {
                return(null);
            }
            HImage hv_image = HObjList[0].HObj as HImage;

            if (hv_image != null)
            {
                try
                {
                    HTuple hv_AbsoluteHisto, hv_RelativeHisto;

                    HTuple channel = hv_image.CountChannels();
                    HImage imgTmp  = null;
                    if (channel == 3)
                    {
                        imgTmp = hv_image.Rgb1ToGray();
                    }
                    else
                    {
                        imgTmp = hv_image.Clone();
                    }
                    HRegion region = new HRegion();
                    region.GenRectangle1(rectangle1[0].D, rectangle1[1], rectangle1[2], rectangle1[3]);
                    hv_AbsoluteHisto = imgTmp.GrayHisto(region, out hv_RelativeHisto);
                    return(hv_AbsoluteHisto);
                }
                catch (Exception)
                {
                }
            }
            return(null);
        }
Esempio n. 2
0
        private void UpdateImageWindow(HImage image)
        {
            if (this.currentImage == null)
            {
                return;
            }
            if (this.InvokeRequired)
            {
                Action <HImage> a = new Action <HImage>(UpdateImageWindow);
                this.BeginInvoke(a, image);
            }
            HImage rgbImage = null;
            HImage rImage   = null;
            HImage gImage   = null;
            HImage bImage   = null;
            HImage irImage  = null;

            if (image.CountChannels() > Properties.Settings.Default.NumberOfChannelsInColorImage)
            {
                rImage   = image.Decompose4(out gImage, out bImage, out irImage);
                rgbImage = rImage.Compose3(gImage, bImage);

                if (rgbImage != null)
                {
                    this.hwcRGBImage.DisplayImage(rgbImage);
                }

                if (irImage != null)
                {
                    this.hwcIRImage.DisplayImage(irImage);
                }
            }
        }
        public void DisplayImage(HObject imageObj)
        {
            if (imageObj == null) return;

            HImage obj = imageObj as HImage;
            if (obj != null)
            {
                bool isImageValid = false;
                try
                {
                    int channelCount = obj.CountChannels();
                    isImageValid = channelCount > 0;
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
                if (isImageValid)
                {
                    int h;
                    int w;
                    string s;
                    obj.GetImagePointer1(out s, out w, out h);
                    imageUsed = obj;
                    if (imageHeight == h && imageWidth == w)
                    {
                        Repaint();
                        if (ImageChanged != null)
                            ImageChanged(this);
                        return;
                    }
                    HRegion domain = obj.GetDomain();
                    HTuple r;
                    HTuple c;
                    int area = domain.AreaCenter(out r, out c);


                    if (area == (w * h))
                    {
                        if ((h != imageHeight) || (w != imageWidth))
                        {
                            imageHeight = h;
                            imageWidth = w;
                            SetImagePart(0, 0, h, w);
                            ZoomImage(0, 0, 1);
                        }
                    }
                }
            }
            Repaint();
            if (ImageChanged != null)
                ImageChanged(this);
        }
Esempio n. 4
0
        //private bool ConvertRGBBitmap(HImage image, out Bitmap bitmap)
        //{
        //    bitmap = null;
        //    if (image == null || !image.IsInitialized())
        //    {
        //        return false;
        //    }

        //    HTuple hred, hgreen, hblue, type, width, height;
        //    HOperatorSet.GetImagePointer3(image, out hred, out hgreen, out hblue, out type, out width, out height);
        //    if (type != "byte")
        //    {
        //        return false;
        //    }

        //    bitmap = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
        //    Rectangle rect = new Rectangle(0, 0, width, height);
        //    BitmapData bitmapData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb);
        //    unsafe
        //    {
        //        byte* bptr = (byte*)bitmapData.Scan0;
        //        byte* r = ((byte*)hred.I);
        //        byte* g = ((byte*)hgreen.I);
        //        byte* b = ((byte*)hblue.I);
        //        ParallelLoopResult result = Parallel.For(0, width * height, (i) =>
        //        {
        //            bptr[i * 4 + 0] = (b)[i];
        //            bptr[i * 4 + 1] = (g)[i];
        //            bptr[i * 4 + 2] = (r)[i];
        //            bptr[i * 4 + 3] = 255;
        //        });
        //    }
        //    bitmap.UnlockBits(bitmapData);

        //    return true;
        //}


        public static void HImageToBitmap(HImage image, out Bitmap bitmap)
        {
            bitmap = null;
            if (!image.IsInitialized())
            {
                return;
            }

            if (image.CountChannels() == 1)
            {
                ConvertGrayBitmap(image, out bitmap);
            }
            else if (image.CountChannels() == 3)
            {
                //ConvertRGBBitmap(image, out bitmap);
                throw new Exception("不支持的图像格式");
            }
            else
            {
                throw new Exception("不支持的图像格式");
            }
        }
        public void ProcessStep(HImage imageGrabbed)
        {
            currentIVData.Image = imageGrabbed;

            int imageChannels = imageGrabbed.CountChannels();

            HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

            if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
            {
                rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                rgbImg = rImg.Compose3(gImg, bImg);
            }

            HImage grayImage = new HImage();

            if (rgbImg == null)
            {
                return;
            }

            imageGrabbed = rgbImg.CopyImage();

            lock (this.ImageToBeSaved)
            {
                this.ImageToBeSaved = imageGrabbed.CopyImage();
            }

            HImage ho_R;
            HImage ho_G;
            HImage ho_B;

            ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

            HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
            HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
            HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
            HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

            currentIVData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
            currentIVData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
            currentIVData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
            currentIVData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

            UpdateHistogramForImage();
        }
Esempio n. 6
0
        public static HTuple getGrayHisto(HImage image, HTuple rectangle1 = null)
        {
            if (image != null)
            {
                try
                {
                    HTuple hv_AbsoluteHisto, hv_RelativeHisto;

                    HTuple channel = image.CountChannels();
                    HImage imgTmp  = null;
                    if (channel == 3)
                    {
                        imgTmp = image.Rgb1ToGray();
                    }
                    else
                    {
                        imgTmp = image.Clone();
                    }
                    HRegion region = new HRegion();
                    if (rectangle1 == null)
                    {
                        HTuple col, row;
                        imgTmp.GetImageSize(out col, out row);
                        region.GenRectangle1(0, 0, row - 1, col - 1);
                    }
                    else
                    {
                        region.GenRectangle1(rectangle1[0].D, rectangle1[1], rectangle1[2], rectangle1[3]);
                    }
                    hv_AbsoluteHisto = imgTmp.GrayHisto(region, out hv_RelativeHisto);
                    return(hv_AbsoluteHisto);
                }
                catch (Exception exc)
                {
                    Wells.FrmType.frm_Log.Log("获取灰度直方图出错:" + exc.Message, 2);
                }
            }
            return(null);
        }
        /// <summary>
        /// Performs the PRNU.
        /// </summary>
        /// <remarks></remarks>
        private void PerformPRNU()
        {
            try
            {
                this.btnFFC_FPN.Enabled  = false;
                this.btnFFC_PRNU.Enabled = false;
                this.lblStatus.Text      = "Performing PRNU...";
                this.lblStatus.BackColor = Color.Yellow;
                this.prnuDone            = false;
                Application.DoEvents();
                string buffer;
                string command;
                bool   bfcStatus = true;

                HImage imageCropped;

                currentOnCameraFFCData = new OnCameraFFCData();

                HImage imageGrabbed = new HImage();
                HImage ho_RGB       = new HImage();
                HImage ho_R         = new HImage();
                HImage ho_G         = new HImage();
                HImage ho_B         = new HImage();
                HImage ho_M         = new HImage();

                lock (this.ImageToBeSaved)
                {
                    imageGrabbed = this.ImageToBeSaved.CopyImage();
                }

                ho_R   = imageGrabbed.Decompose3(out ho_G, out ho_B);
                ho_RGB = ho_R.Compose3(ho_G, ho_B);

                int presentChannel = ho_RGB.CountChannels();


                if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_RGB = this.ImageToBeSaved.CopyImage();
                    }

                    ho_R = ho_RGB.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);


                    currentOnCameraFFCData.Image = ho_RGB;
                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();

                    bool lightIntensityOptimum = true;

                    imageCropped = ho_RGB.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FFCROIRectangleHeight,
                                                   (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FFCROIRectangleWidth,
                                                   Properties.Settings.Default.FFCROIRectangleWidth * 2,
                                                   (this.CameraAcquisition.CurrentImageHeight / 2));

                    ho_R = imageCropped.Decompose3(out ho_G, out ho_B);

                    RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);

                    if (Properties.Settings.Default.CheckForImproperCalibrationTarget)
                    {
                        int      minGrayValue;
                        int      maxGrayValue;
                        double[] tempLineProfileR = RAOIGrayVals.ToDArr();

                        double[] tempLineProfileG = GAOIGrayVals.ToDArr();

                        double[] tempLineProfileB = BAOIGrayVals.ToDArr();

                        lightIntensityOptimum &=
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileR, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) &&
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileG, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) &&
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileB, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset);
                    }

                    if (!lightIntensityOptimum)
                    {
                        this.RefreshImageWindow(ho_RGB);

                        this.lblStatus.Text      = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        Application.DoEvents();
                        return;
                    }
                }
                else if (presentChannel == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_M = this.ImageToBeSaved.CopyImage();
                    }

                    HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M);


                    currentOnCameraFFCData.Image = ho_M;
                    currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr();

                    bool lightIntensityOptimum = true;

                    if (Properties.Settings.Default.CheckForImproperCalibrationTarget)
                    {
                        int      minGrayValue;
                        int      maxGrayValue;
                        double[] tempLineProfileM = MAOIGrayVals.ToDArr();

                        lightIntensityOptimum &=
                            this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileM, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset);
                    }

                    if (!lightIntensityOptimum)
                    {
                        this.RefreshImageWindow(ho_M);

                        this.lblStatus.Text      = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        Application.DoEvents();
                        return;
                    }
                }

                command   = Globals.e2vUC4CommandToDoOnCameraBFC + System.Environment.NewLine;
                bfcStatus = Globals.ExecuteSerialCommand(
                    (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                    command,
                    out buffer,
                    5000,
                    1000);

                command    = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine;
                bfcStatus &= Globals.ExecuteSerialCommand(
                    (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                    command,
                    out buffer,
                    1000,
                    2000);

                command    = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine;
                bfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                if (bfcStatus)
                {
                    lblStatus.Text      = "On Camera Bright Field Correction Done";
                    lblStatus.BackColor = Color.LimeGreen;
                }
                else
                {
                    lblStatus.Text      = "On Camera Bright Field Correction not done properly";
                    lblStatus.BackColor = Color.Red;
                }
                this.prnuDone            = bfcStatus;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;

                if (this.fpnDone && this.prnuDone)
                {
                    this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = "";
                    this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = DateTime.Now.Day.ToString() + "-" +
                                                                                                    DateTime.Now.Month.ToString() + "-" + DateTime.Now.Year.ToString() + " " +
                                                                                                    DateTime.Now.Hour.ToString() + ":" + DateTime.Now.Minute.ToString() + ":" +
                                                                                                    DateTime.Now.Second.ToString();
                }
            }
            catch (System.Exception ex)
            {
                lblStatus.Text           = "On Camera Bright Field Correction not done properly";
                lblStatus.BackColor      = Color.Red;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
                this.prnuDone            = false;
                MessageBox.Show("Exception occurred while performing PRNU.\r\n Error: " + ex.Message,
                                "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
        /// <summary>
        /// Performs the FPN.
        /// </summary>
        /// <remarks></remarks>
        private void PerformFPN()
        {
            this.btnFFC_FPN.Enabled  = false;
            this.btnFFC_PRNU.Enabled = false;
            this.lblStatus.Text      = "Performing FPN...";
            this.lblStatus.BackColor = Color.Yellow;
            this.fpnDone             = false;
            Application.DoEvents();
            string buffer;
            string command;
            bool   dfcStatus = true;

            currentOnCameraFFCData = new OnCameraFFCData();

            HImage imageGrabbed = new HImage();
            HImage ho_RGB       = new HImage();
            HImage ho_R         = new HImage();
            HImage ho_G         = new HImage();
            HImage ho_B         = new HImage();
            HImage ho_M         = new HImage();

            try
            {
                lock (this.ImageToBeSaved)
                {
                    imageGrabbed = this.ImageToBeSaved.CopyImage();
                }

                ho_R   = imageGrabbed.Decompose3(out ho_G, out ho_B);
                ho_RGB = ho_R.Compose3(ho_G, ho_B);

                int presentChannel = ho_RGB.CountChannels();

                if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    //ho_R = ho_RGB.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B);


                    currentOnCameraFFCData.Image = ho_RGB;
                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();

                    HTuple maxGrayValueInCenterLineR = RAOIGrayVals.TupleMax();
                    HTuple maxGrayValueInCenterLineG = GAOIGrayVals.TupleMax();
                    HTuple maxGrayValueInCenterLineB = BAOIGrayVals.TupleMax();
                    if ((maxGrayValueInCenterLineR.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) ||
                        (maxGrayValueInCenterLineG.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) ||
                        (maxGrayValueInCenterLineB.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel))
                    {
                        this.RefreshImageWindow(ho_RGB);

                        this.lblStatus.Text      = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        //Application.DoEvents();
                        return;
                    }
                }
                else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        ho_M = this.ImageToBeSaved.CopyImage();
                    }

                    HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M);

                    currentOnCameraFFCData.Image = ho_M;
                    currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr();

                    HTuple maxGrayValueInCenterLineM = MAOIGrayVals.TupleMax();
                    if (maxGrayValueInCenterLineM.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel)
                    {
                        this.RefreshImageWindow(ho_M);
                        this.lblStatus.Text      = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage;
                        this.lblStatus.BackColor = Color.Red;
                        this.btnFFC_FPN.Enabled  = true;
                        this.btnFFC_PRNU.Enabled = true;
                        //Application.DoEvents();
                        return;
                    }
                }

                command   = Globals.e2vUC4CommandToDoOnCameraDFC + System.Environment.NewLine;
                dfcStatus = Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                         command,
                                                         out buffer,
                                                         2000,
                                                         1000);

                command    = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine;
                dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                command    = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine;
                dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex,
                                                          command,
                                                          out buffer,
                                                          1000,
                                                          2000);

                if (dfcStatus)
                {
                    lblStatus.Text      = "On Camera Dark Field Correction Done";
                    lblStatus.BackColor = Color.LimeGreen;
                }
                else
                {
                    lblStatus.Text      = "On Camera Dark Field Correction not done properly";
                    lblStatus.BackColor = Color.Red;
                }
                this.fpnDone             = dfcStatus;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
            }
            catch (System.Exception ex)
            {
                lblStatus.Text           = "On Camera Dark Field Correction not done properly";
                lblStatus.BackColor      = Color.Red;
                this.btnFFC_FPN.Enabled  = true;
                this.btnFFC_PRNU.Enabled = true;
                this.fpnDone             = false;
                MessageBox.Show("Exception occurred while performing FPN.\r\n Error: " + ex.Message,
                                "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Esempio n. 9
0
        /*******************************************************************/
        private void mouseMoved(object sender, HMouseEventArgs e)
        {
            if (inMeasureLine)
            {
                return;
            }
            double motionX, motionY;

#if NativeCode
            if (Wrapper.ShowUnit.IsEmpty(viewPort.HalconWindow))
            {
                return;
            }
#else
#endif


            double currX, currY;
            HTuple currX1 = 0, currY1 = 0;
            try
            {
#if NativeCode
                string message = Wrapper.ShowUnit.GetPixMessage(viewPort.HalconWindow, out currX1, out currY1);
#else
                if (HObjList.Count < 1 || HObjList[0].HObj == null || (HObjList[0].HObj is HImage) == false)
                {
                    return;
                }
                int state;
                viewPort.HalconWindow.GetMpositionSubPix(out currY, out currX, out state);
                HImage hv_image = HObjList[0].HObj as HImage;

                string str_value = "";
                string str_position = "";
                bool   _isXOut = true, _isYOut = true;
                int    channel_count;
                string str_imgSize = string.Format("{0}*{1}", imageHeight, imageWidth);
                channel_count = hv_image.CountChannels();

                str_position = string.Format("|{0:F0}*{1:F0}", currY, currX);
                _isXOut      = (currX < 0 || currX >= imageWidth);
                _isYOut      = (currY < 0 || currY >= imageHeight);

                if (!_isXOut && !_isYOut)
                {
                    if ((int)channel_count == 1)
                    {
                        double grayVal;
                        grayVal   = hv_image.GetGrayval((int)currY, (int)currX);
                        str_value = String.Format("|{0}", grayVal);
                    }
                    else if ((int)channel_count == 3)
                    {
                        double grayValRed, grayValGreen, grayValBlue;

                        HImage _RedChannel, _GreenChannel, _BlueChannel;

                        _RedChannel   = hv_image.AccessChannel(1);
                        _GreenChannel = hv_image.AccessChannel(2);
                        _BlueChannel  = hv_image.AccessChannel(3);

                        grayValRed   = _RedChannel.GetGrayval((int)currY, (int)currX);
                        grayValGreen = _GreenChannel.GetGrayval((int)currY, (int)currX);
                        grayValBlue  = _BlueChannel.GetGrayval((int)currY, (int)currX);
                        str_value    = String.Format("| R:{0}, G:{1}, B:{2})", grayValRed, grayValGreen, grayValBlue);
                    }
                    else
                    {
                        str_value = "";
                    }
                }
                string message = str_imgSize + str_position + str_value;
#endif
                if (message.Length > 0)
                {
                    MousePosMessage = message;
                    TriggerShowMessageEvent(new ShowMessageEventArgs(message));
                }
                else
                {
                    return;
                }
                if (!mousePressed)
                {
                    return;
                }
                if (currX1.Length != 1 || currY1.Length != 1)
                {
                    return;
                }
#if NativeCode
                currX = currX1;
                currY = currY1;
#endif
                if (roiManager != null &&
                    (roiManager.ActiveRoiIdx != -1) && (showMode == ShowMode.IncludeROI))
                {
                    roiManager.MouseMoveAction(currX, currY);
                }
                else
                {
                    //qDebug()<<"xx.D():"<<xx.;
                    motionX = ((currX - startX));
                    motionY = ((currY - startY));

                    if (((int)motionX != 0) || ((int)motionY != 0))
                    {
                        moveImage(motionX, motionY);
                        startX = currX - motionX;
                        startY = currY - motionY;
                    }
                }
            }
            catch (HOperatorException)
            {
                return;
            }
            catch (Exception)
            {
                return;
            }
        }
Esempio n. 10
0
        public void ProcessStep(HImage imageGrabbed)
        {
            HTuple startTime = null;
            HTuple endTime   = null;

            HOperatorSet.CountSeconds(out startTime);
            HOperatorSet.CountSeconds(out endTime);
            currentLSAData = new LightStickAlignmentData();
            HImage imageCropped = new HImage();

            try
            {
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                int presentImageNoOfChannels = imageGrabbed.CountChannels();

                lock (this.ImageToBeSaved)
                {
                    this.ImageToBeSaved = imageGrabbed.CopyImage();
                }
                // Feed the image to the script
                imageGrabbed         = imageGrabbed.CropDomain();
                currentLSAData.Image = imageGrabbed;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayVals  = Globals.GetGrayValuesOfLine(irImg);

                    currentLSAData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr();
                    currentLSAData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

                    imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.LICROIRectangleHeight,
                                                         (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.LICROIRectangleWidth,
                                                         Properties.Settings.Default.LICROIRectangleWidth * 2,
                                                         (this.CameraAcquisition.CurrentImageHeight / 2));

                    ho_R = imageCropped.Decompose3(out ho_G, out ho_B);

                    RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R);

                    // Clipping the pixels at Left & Right of the line
                    RAOIGrayValues = RAOIGrayValues.TupleSelectRange(
                        this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                        (Properties.Settings.Default.LICROIRectangleWidth * 2) - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

                    // Update the labels showing the results of light uniformity check
                    int RminGrayValue;
                    int RmaxGrayValue;

                    // Checking whether the gray values is within the defined range in RED plane
                    bool RPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(RAOIGrayValues.ToDArr(), out RminGrayValue, out RmaxGrayValue);

                    currentLSAData.MinGrayValue = RminGrayValue;
                    currentLSAData.MaxGrayValue = RmaxGrayValue;
                    currentLSAData.Status       = RPlaneOptimum;
                }
                else if (this.cameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed);
                    currentLSAData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr();

                    // Clipping the pixels at Left & Right of the line
                    AOIGrayValues = AOIGrayValues.TupleSelectRange(
                        this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                        this.cameraAcquisition.CurrentImageWidth - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

                    // Update the labels showing the results of light uniformity check
                    int minGrayValue;
                    int maxGrayValue;

                    // Checking whether the gray values is within the defined range in MONO plane
                    bool MonoPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(AOIGrayValues.ToDArr(), out minGrayValue, out maxGrayValue);

                    currentLSAData.MinGrayValue = minGrayValue;
                    currentLSAData.MaxGrayValue = maxGrayValue;
                    currentLSAData.Status       = MonoPlaneOptimum;
                }
                HOperatorSet.CountSeconds(out endTime);
                currentLSAData.TimeTaken = (endTime - startTime).D * 1000;

                this.BeginInvoke(new Action(UpdateStepUI));
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during light intensity check. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                //MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);

                this.BeginInvoke(new Action(UpdateStepUI));

                //lblStatus.Text = errorMessage;
                //lblStatus.BackColor = Color.Red;
            }
        }
Esempio n. 11
0
        /// <summary>
        /// Create a bitmap from a HImage. Image data is copied -> new bitmap is independent of HImage lifetime
        /// </summary>
        /// <param name="ho_Image">input HImage</param>
        /// <returns>output new System.Drawing.Imaging.Bitmap</returns>
        public static Bitmap HImage2Bitmap(HImage ho_Image)
        {
            int    iWidth, iHeight, iNumChannels;
            IntPtr ip_R, ip_G, ip_B, ip_Data;
            String sType;
            // null return object
            Bitmap bitmap = null;

            try
            {
                //
                // Note that pixel data is stored differently in a System.Drawing.Bitmap:
                // a) Stride:
                // stride is the width, rounded up to a multiple of 4 (padding)
                // Size of data array HALCON: heigth*width, Bitmap: heigth*stride
                // compare: https://msdn.microsoft.com/en-us/library/zy1a2d14%28v=vs.110%29.aspx
                // b) RGB data storage:
                // Bitmap: one array, alternating red/green/blue (HALCON: three arrays)
                //
                // get the number of channels to run different conversion method
                iNumChannels = ho_Image.CountChannels();
                if (iNumChannels != 1 && iNumChannels != 3)
                {
                    throw new Exception("Conversion of HImage to Bitmap failed. Number of channels of the HImage is: " +
                                        iNumChannels + ". Conversion rule exists only for images with 1 or 3 chanels");
                }
                if (iNumChannels == 1)
                {
                    //
                    // 1) Get the image pointer
                    ip_Data = ho_Image.GetImagePointer1(out sType, out iWidth, out iHeight);
                    //
                    // 2) Calculate the stride
                    int iStride = CalculateBitmapStride(iWidth, iNumChannels);
                    //
                    // 3) Create a new gray Bitmap object, allocating the necessary (managed) memory
                    bitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format8bppIndexed);
                    // note for high performance, image can be copied by reference (see HImage2BitmapByReference)
                    //
                    // 4) Copy the image data directly into the bitmap data object
                    CopyBytesIntoBitmap(ref bitmap, ip_Data, iWidth, iStride);
                    //
                    // 5) Adjust color palette to grayscale (linearized grayscale)
                    bitmap.Palette = CreateGrayColorPalette(bitmap);
                }
                if (iNumChannels == 3)
                {
                    //
                    // 1) Calculate the stride
                    ho_Image.GetImagePointer3(out ip_R, out ip_G, out ip_B, out sType, out iWidth, out iHeight);
                    int iStride = CalculateBitmapStride(iWidth, iNumChannels);
                    //
                    // 2) Create interleaved image in HALCON
                    HImage ho_ImageInterleaved = ho_Image.InterleaveChannels("rgb", iStride, 0);
                    //
                    // 3) Create a new RGB Bitmap object, allocating the necessary (managed) memory
                    bitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb);
                    // note for high performance, image can be copied by reference (see HImage2BitmapByReference)
                    //
                    // 4) Copy bytes
                    int iWidthIntlvd, iHeightIntlvd;
                    ip_Data = ho_ImageInterleaved.GetImagePointer1(out sType, out iWidthIntlvd, out iHeightIntlvd);
                    CopyBytesIntoBitmap(ref bitmap, ip_Data, iStride, iStride);
                    //
                    // 5) Free temp HALCON image
                    ho_ImageInterleaved.Dispose();
                }
            }
            catch (Exception ex)
            {
                throw new Exception("Conversion of HImage to Bitmap failed.", ex);
            }
            return(bitmap);
        }
Esempio n. 12
0
        /// <summary>
        /// Measures the sharpness.
        /// </summary>
        /// <param name="currentImage">The current image.</param>
        /// <param name="ScaleVal">The scale val.</param>
        /// <returns></returns>
        public HTuple MeasureSharpness(HImage currentImage, HTuple ScaleVal)
        {
            HTuple hv_Sharpness = new HTuple();

            try
            {
                if (currentImage.CountChannels() == 3)
                {
                    currentImage = currentImage.Rgb1ToGray();
                }
                string hv_Type   = null;
                int    hv_Width  = 0;
                int    hv_Height = 0;

                IntPtr imagePtr = currentImage.GetImagePointer1(out hv_Type, out hv_Width, out hv_Height);

                HImage ho_ImageZoomed      = new HImage();
                HImage ho_ImageFFT         = new HImage();
                HImage ho_ImageCorrelation = new HImage();
                HImage ho_ImageFFTInv      = new HImage();


                HTuple hv_ZoomedWidth;
                HTuple hv_ZoomedHeight;

                HTuple IRow = new HTuple(4);

                HTuple ICol = new HTuple(4);
                HTuple hv_SumCorrelation;

                ho_ImageZoomed = currentImage.ZoomImageFactor(ScaleVal, ScaleVal, "constant");
                ho_ImageZoomed.GetImageSize(out hv_ZoomedWidth, out hv_ZoomedHeight);
                ho_ImageFFT         = ho_ImageZoomed.RftGeneric("to_freq", "none", "complex", hv_ZoomedWidth);
                ho_ImageCorrelation = ho_ImageFFT.CorrelationFft(ho_ImageFFT);
                ho_ImageFFTInv      = ho_ImageCorrelation.RftGeneric("from_freq", "n", "real", hv_ZoomedWidth);

                IRow[0] = 0;
                IRow[1] = 1;
                IRow[2] = hv_ZoomedHeight - 1;
                IRow[3] = hv_ZoomedHeight - 2;


                ICol[0] = 1;
                ICol[1] = 0;
                ICol[2] = hv_ZoomedWidth - 2;
                ICol[3] = hv_ZoomedWidth - 1;

                hv_SumCorrelation = ho_ImageFFTInv.GetGrayval(IRow, ICol);

                HTuple hv_Mean;
                HTuple hv_Deviation;

                hv_Mean = ho_ImageZoomed.Intensity(ho_ImageZoomed, out hv_Deviation);

                HTuple hv_Blurness;

                hv_Blurness = ((hv_SumCorrelation / (hv_ZoomedWidth * hv_ZoomedHeight)) - (hv_Mean * hv_Mean)) / (hv_Deviation * hv_Deviation);

                hv_Sharpness = 1000.0 - ((hv_Blurness.TupleMin()) * 1000.0);

                hv_Sharpness = hv_Sharpness * 100;
                hv_Sharpness = hv_Sharpness.TupleRound();
                hv_Sharpness = hv_Sharpness.TupleReal();
                hv_Sharpness = hv_Sharpness / 100;
            }
            catch (Exception ex)
            {
                SetFocusStatusMessage(ex.Message, Color.Pink);
                //this.lblFocusStatus.Text = ex.Message;
                //this.lblFocusStatus.BackColor = Color.Pink;
            }

            return(hv_Sharpness);
        }
Esempio n. 13
0
        public void ProcessStep(HImage imageGrabbed)
        {
            currentFMCData = new FocusMagnificationCheckData();
            lock (this.ImageToBeSaved)
            {
                this.ImageToBeSaved = imageGrabbed.CopyImage();
            }

            bool calibrationTargetContrastOK = true;

            // Feed the image to the script
            imageGrabbed         = imageGrabbed.CropDomain();
            currentFMCData.Image = imageGrabbed;

            int imageChannels = imageGrabbed.CountChannels();

            HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

            if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
            {
                rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                rgbImg = rImg.Compose3(gImg, bImg);
            }

            HImage grayImage = new HImage();

            if (rgbImg == null)
            {
                return;
            }

            imageGrabbed = rgbImg.CopyImage();

            #region IntensityCheckForCheckingCalibrationTarget
            // Check the minimum and maximum light intensity to determine whether correct calibration target
            // has been placed
            grayImage = imageGrabbed.Rgb1ToGray();

            HImage ho_R;
            HImage ho_G;
            HImage ho_B;

            ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

            HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
            HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
            HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
            HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

            currentFMCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
            currentFMCData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

            int RPlaneMinValue;
            int RPlaneMaxValue;

            RAOIGrayVals = RAOIGrayVals.TupleSelectRange(this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft,
                                                         this.CameraAcquisition.CurrentImageWidth - this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight);

            calibrationTargetContrastOK = this.CameraAcquisition.IsLightIntensityLevelOptimum(GAOIGrayVals.ToDArr(), out RPlaneMinValue, out RPlaneMaxValue);
            calibrationTargetContrastOK = (RPlaneMaxValue - RPlaneMinValue) > Properties.Settings.Default.FocusPatternCheckReferenceGrayValue;

            #endregion

            #region FindPatternArea
            //Finds the pattern area
            HImage reducedImage = Globals.FindPatternArea(grayImage);
            if (reducedImage == null)
            {
                SetFocusStatusMessage("Image is too dark or Incorrect Pattern", Color.Red);
                currentFMCData.FocusPercentage         = "";
                currentFMCData.MagnificationPercentage = "";
                currentFMCData.PixelResolution         = "";
                //this.BeginInvoke(new Action<FocusMagnificationCheckData>(UpdateFocusMagnificationStepUI), currentFMCData);
                return;
            }
            #endregion

            #region Focus Learning

            hv_focus = new HTuple();
            HTuple hv_currentSharpness = new HTuple();
            HImage imageCropped        = new HImage();

            //Indicates that execution has entered the focus learning stage
            //(For both successful and failed learning)
            bool focusLearningDone = false;

            if (this.focusLearningStarted)
            {
                #region FocusLearningStage
                focusLearningDone = true;
                if (!this.focusLearningOver)
                {
                    HTuple hv_Scale;
                    HTuple hv_Subsampling = 1;
                    hv_Scale = 1.0 / hv_Subsampling;

                    int grabbedImageWidth, grabbedImageHeight;

                    imageGrabbed.GetImageSize(out grabbedImageWidth, out grabbedImageHeight);

                    // Crop the image before learning
                    imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight,
                                                         (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth,
                                                         Properties.Settings.Default.FocusROIRectangleWidth * 2,
                                                         (this.CameraAcquisition.CurrentImageHeight / 2));

                    int croppedImageWidth, croppedImageHeight;

                    imageCropped.GetImageSize(out croppedImageWidth, out croppedImageHeight);

                    int noOfSamples = croppedImageHeight / Properties.Settings.Default.NoOfSamplesUsedInFocusStep;

                    for (int i = 0; i < Properties.Settings.Default.NoOfSamplesUsedInFocusStep; i++)
                    {
                        try
                        {
                            int    row             = i * noOfSamples;
                            HImage newCroppedImage = imageCropped.CopyImage().CropPart(row, 0, croppedImageWidth, noOfSamples);
                            //newCroppedImage.WriteImage("bmp", 0, "D:\\imageCropped" + i.ToString());

                            // Function Call for sharpness Measurement
                            hv_currentSharpness     = MeasureSharpness(newCroppedImage, hv_Scale);
                            hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness);
                        }
                        catch (Exception ex)
                        {
                        }
                    }

                    currentFMCData.FocusPercentage = "";
                    SetFocusStatusMessage("Learning...", Color.Yellow);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                }
                else
                {
                    hv_MaxAutoCorrelation = new HTuple();
                    HTuple hv_Change = new HTuple();
                    if (hv_AutoCorrelationTuple.TupleLength() > 0)
                    {
                        hv_MaxAutoCorrelation = hv_AutoCorrelationTuple.TupleMax();
                        hv_Change             = ((hv_AutoCorrelationTuple.TupleMax() - hv_AutoCorrelationTuple.TupleMin()) / hv_AutoCorrelationTuple.TupleMax()) * 100;

                        if (hv_MaxAutoCorrelation.D <= 0.0 || hv_Change.D < Properties.Settings.Default.MinimumFocusLearningRangeRequired)
                        {
                            currentFMCData.FocusPercentage = "";
                            SetFocusStatusMessage("Focus Learning not done properly. Range of focus learning is not enough.", Color.Orange);
                            this.focusLearningOver = false;
                            UpdateLabelMessage(currentFMCData.FocusPercentage);
                        }
                        else
                        {
                            this.CameraAcquisition.CurrentCameraSetupProperties.FocusMaxAutoCorrelationValue = hv_MaxAutoCorrelation.D;
                        }
                    }
                    else
                    {
                        currentFMCData.FocusPercentage = "";
                        SetFocusStatusMessage("Focus Learning not done properly. Sharpness measurement failed", Color.Orange);
                        this.focusLearningOver = false;
                        UpdateLabelMessage(currentFMCData.FocusPercentage);
                    }
                    this.focusLearningStarted = false;
                }
                #endregion
            }
            else if (this.focusLearningOver)
            {
                #region FocusTestingPhase
                if (!calibrationTargetContrastOK)
                {
                    currentFMCData.FocusPercentage         = "";
                    currentFMCData.MagnificationPercentage = "";
                    currentFMCData.PixelResolution         = "";
                    SetFocusStatusMessage("Incorrect Pattern. Not enough contrast", Color.Red);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                    return;
                }

                HTuple hv_Scale;
                HTuple hv_Subsampling = 1;

                hv_Scale = 1.0 / hv_Subsampling;
                // Crop the image before learning
                imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight,
                                                     (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth,
                                                     Properties.Settings.Default.FocusROIRectangleWidth * 2,
                                                     (this.CameraAcquisition.CurrentImageHeight / 2));

                // Function Call for sharpness Measurement
                hv_currentSharpness = MeasureSharpness(imageCropped, hv_Scale);
                if (hv_currentSharpness > hv_MaxAutoCorrelation + 2)
                {
                    SetFocusStatusMessage("Current sharpness is more than learnt sharpness. Insert valid calibration doc or Re-do focus learning !!", Color.Orange);
                    currentFMCData.PixelResolution         = "";
                    currentFMCData.MagnificationPercentage = "";
                    currentFMCData.FocusPercentage         = "";
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                    return;
                }
                else if (hv_currentSharpness > hv_MaxAutoCorrelation)
                {
                    hv_MaxAutoCorrelation   = hv_currentSharpness;
                    hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness);
                }
                hv_focus = 100 - (((hv_MaxAutoCorrelation - hv_currentSharpness) / hv_MaxAutoCorrelation) * 100);
                hv_focus = hv_focus.TupleRound();
                currentFMCData.FocusPercentage = hv_focus.ToString();
                if (hv_focus > 100)
                {
                    // Not Focused
                    currentFMCData.FocusPercentage = "";
                    SetFocusStatusMessage("Focus learning not done properly", Color.Red);
                    UpdateLabelMessage(currentFMCData.FocusPercentage);
                }
                if (hv_focus >= 95 && hv_focus <= 100)
                {
                    // Focused
                    SetFocusStatusMessage("Focused", Color.LimeGreen);
                }
                else if (hv_focus > 70 && hv_focus < 95)
                {
                    //// Fine Tuning is required
                    SetFocusStatusMessage("Fine Tuning is required", Color.Yellow);
                }
                else
                {
                    // Not Focused
                    SetFocusStatusMessage("Not focused", Color.Red);
                }
                #endregion
            }
            else if (!focusLearningOver && !focusLearningDone)
            {
                if (hv_MaxAutoCorrelation == null)
                {
                    SetFocusStatusMessage("Focus learning not done", Color.Yellow);
                }
            }

            UpdateLabelMessage(currentFMCData.FocusPercentage);
            #endregion
        }
Esempio n. 14
0
        /// <summary>
        /// Create a bitmap from a HALCON HImage by reference.
        /// Make sure to keep the interleaved HImage alive as long as the bitmap is used.
        /// in case of 1 channel image, padding must be 0
        /// </summary>
        /// <param name="ho_Image"></param>
        /// <returns></returns>
        public static Bitmap HImage2BitmapByReference(HImage ho_Image, out HImage ho_ImageInterleaved)
        {
            int    iWidth, iHeight, iNumChannels;
            IntPtr ip_Gray;
            String sType;
            // null return objects
            Bitmap bitmap = null;

            try
            {
                ho_ImageInterleaved = new HImage();
                //
                // Note that pixel data is stored differently in System.Drawing.Bitmap
                iNumChannels = ho_Image.CountChannels();
                if (iNumChannels == 1)
                {
                    //
                    // 1) Get the image pointer
                    ip_Gray = ho_Image.GetImagePointer1(out sType, out iWidth, out iHeight);
                    //
                    // 2) Calculate the stride
                    int iPadding = CalculateBitmapPadding(iWidth, iNumChannels);
                    if (iPadding > 0)
                    {
                        throw new Exception("Conversion of HImage to Bitmap failed. " +
                                            " Padding (=width modulo 4) of Bitmap not zero (mandatory to copy by reference). " +
                                            "To solve, please use HImage2Bitmap");
                    }
                    //
                    // 3) Create a new gray Bitmap object, copy by reference.
                    // keep in mind that the bitmap object's validity relies on the HImage lifetime
                    bitmap = new Bitmap(iWidth, iHeight, iWidth, PixelFormat.Format8bppIndexed, ip_Gray);
                    //
                    // 4) Adjust palette to grayscale (linearized grayscale)
                    bitmap.Palette = CreateGrayColorPalette(bitmap);
                }
                else if (iNumChannels == 3)
                {
                    //
                    // 1) Get the image stride
                    ho_Image.GetImagePointer1(out sType, out iWidth, out iHeight);
                    int iStride = CalculateBitmapStride(iWidth, iNumChannels);
                    //
                    // 2) Create an interleaved HALCON image using operator interleave_channels
                    ho_ImageInterleaved = ho_Image.InterleaveChannels("rgb", iStride, 0);
                    int iWidthIntlvd, iHeightIndlvd;
                    ip_Gray = ho_ImageInterleaved.GetImagePointer1(out sType, out iWidthIntlvd, out iHeightIndlvd);
                    //
                    // 3) Create a new gray Bitmap object, copy by reference.
                    // keep in mind that the bitmap object's validity relies on the HImage lifetime
                    bitmap = new Bitmap(iWidth, iHeight, iStride, PixelFormat.Format24bppRgb, ip_Gray);
                }
                else if (iNumChannels == 4)
                {
                    throw new NotImplementedException();
                }
                else
                {
                    throw new Exception("Conversion of HImage to Bitmap failed. Number of channels in HImage is: " +
                                        iNumChannels + ". Direct conversion by reference only possible with images of 1,3 or 4 channels.");
                }
            }
            catch (Exception ex)
            {
                throw new Exception("Conversion of HImage to Bitmap failed.", ex);
            }
            return(bitmap);
        }
Esempio n. 15
0
        public void ProcessStep(HImage imageGrabbed)
        {
            HTuple startTime = null;
            HTuple endTime   = null;

            HOperatorSet.CountSeconds(out startTime);
            HOperatorSet.CountSeconds(out endTime);
            currentOnCameraFFCData = new OnCameraFFCData();
            try
            {
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                lock (this.ImageToBeSaved)
                {
                    this.ImageToBeSaved = imageGrabbed.CopyImage();
                }
                // Feed the image to the script
                imageGrabbed = imageGrabbed.CropDomain();
                currentOnCameraFFCData.Image = imageGrabbed;

                if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage)
                {
                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayValues  = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayValues = Globals.GetGrayValuesOfLine(irImg);

                    currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr();
                    currentOnCameraFFCData.GrayValues.MPlaneVals = IRAOIGrayValues.ToDArr();
                }
                else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage)
                {
                    HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed);
                    currentOnCameraFFCData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr();
                }
                HOperatorSet.CountSeconds(out endTime);
                currentOnCameraFFCData.TimeTaken = (endTime - startTime).D * 1000;
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during On-Camera FFC step. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Esempio n. 16
0
 public static int GetImageChannel(HImage hImage)
 {
     return(hImage.CountChannels());
 }
Esempio n. 17
0
        public void ProcessStep(HImage imageGrabbed)
        {
            try
            {
                HTuple startTime = null;
                HTuple endTime   = null;
                HOperatorSet.CountSeconds(out startTime);
                int imageChannels = imageGrabbed.CountChannels();

                HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null;

                if (imageChannels == 6)
                {
                    rImg   = imageGrabbed.Decompose4(out gImg, out bImg, out irImg);
                    rgbImg = rImg.Compose3(gImg, bImg);
                }

                HImage grayImage = new HImage();
                if (rgbImg == null)
                {
                    return;
                }

                imageGrabbed = rgbImg.CopyImage();

                int presentImageNoOfChannels = imageGrabbed.CountChannels();

                if (presentImageNoOfChannels == Properties.Settings.Default.NumberOfChannelsInColorImage)
                {
                    lock (this.ImageToBeSaved)
                    {
                        this.ImageToBeSaved = imageGrabbed.CopyImage();
                    }
                    if (whitebalancingStarted)
                    {
                        wbIterationCompletedEvent.Reset();
                    }
                    imageGrabbedEvent.Set();

                    HImage ho_R;
                    HImage ho_G;
                    HImage ho_B;

                    ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B);

                    HTuple RAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_R);
                    HTuple GAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_G);
                    HTuple BAOIGrayVals  = Globals.GetGrayValuesOfLine(ho_B);
                    HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg);

                    currentWBData.Image = imageGrabbed;
                    currentWBData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr();
                    currentWBData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr();

                    HImage ho_GrayImage;
                    ho_GrayImage = imageGrabbed.Rgb1ToGray();

                    HRegion whiteRegion = ho_GrayImage.Threshold(
                        Properties.Settings.Default.MinThresholdInDeterminingGain,
                        Properties.Settings.Default.MaxThresholdInDeterminingGain);
                    whiteRegion = whiteRegion.FillUp();
                    whiteRegion = whiteRegion.ErosionRectangle1(20, 20);

                    double rClipValue = 15.0;
                    double gClipValue = 15.0;
                    double bClipValue = 15.0;
                    //CalculateOptimumClipValue(whiteRegion, ho_R, out rClipValue);
                    //CalculateOptimumClipValue(whiteRegion, ho_G, out gClipValue);
                    //CalculateOptimumClipValue(whiteRegion, ho_B, out bClipValue);

                    double rMin, rMax, rRange;
                    double gMin, gMax, gRange;
                    double bMin, bMax, bRange;

                    ho_R.MinMaxGray(whiteRegion, rClipValue, out rMin, out rMax,
                                    out rRange);
                    ho_G.MinMaxGray(whiteRegion, gClipValue, out gMin, out gMax,
                                    out gRange);
                    ho_B.MinMaxGray(whiteRegion, bClipValue, out bMin, out bMax,
                                    out bRange);

                    double RGDiff = rMax - gMax;
                    double GBDiff = gMax - bMax;
                    double BRDiff = bMax - rMax;

                    currentWBData.ErrorLevel = (Math.Max(RGDiff, Math.Max(GBDiff, BRDiff)) / this.CameraAcquisition.CurrentCameraProperties.BrightRegionReferenceGrayLevel) * 100;
                    currentWBData.RedMax     = rMax;
                    currentWBData.GreenMax   = gMax;
                    currentWBData.BlueMax    = bMax;


                    HOperatorSet.CountSeconds(out endTime);
                    currentWBData.TimeTaken = (endTime - startTime).D;

                    UpdateControlUI();

                    wbIterationCompletedEvent.WaitOne();
                }
            }
            catch (Exception ex)
            {
                string errorMessage = "Exception occurred during white balancing step. ";
                errorMessage = errorMessage + " Error Message: " + ex.Message;
                MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }