public void ProcessStep(HImage imageGrabbed) { currentIVData.Image = imageGrabbed; int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentIVData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentIVData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentIVData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentIVData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); UpdateHistogramForImage(); }
private void UpdateHistogramForImage() { if (this.InvokeRequired) { Action a = new Action(UpdateHistogramForImage); this.BeginInvoke(a); } else { HImage currentImage = this.ImageToBeSaved.CopyImage(); HImage rImg, gImg, bImg; rImg = currentImage.Decompose3(out gImg, out bImg); HTuple RGrayValues = Globals.GetGrayValuesOfPixel(rImg); HTuple GGrayValues = Globals.GetGrayValuesOfPixel(gImg); HTuple BGrayValues = Globals.GetGrayValuesOfPixel(bImg); double[] redData; double[] greenData; double[] blueData; XYChart c = new XYChart(chartViewer.Width - 10, chartViewer.Height - 10, 0xffdddd, 0x000000, 1); c.setPlotArea(50, 10, chartViewer.Width - 100, chartViewer.Height - 60, 0xffffff, -1, -1); redData = RGrayValues; greenData = GGrayValues; blueData = BGrayValues; // Add a line chart layer using the given data c.addSplineLayer(redData, 0xFF0000); c.addSplineLayer(greenData, 0x00FF00); c.addSplineLayer(blueData, 0x0000FF); c.xAxis().setLinearScale(0, 255, 255); chartViewer.Chart = c; } }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData = new OnCameraFFCData(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentOnCameraFFCData.Image = imageGrabbed; if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayValues = Globals.GetGrayValuesOfLine(irImg); currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.MPlaneVals = IRAOIGrayValues.ToDArr(); } else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentOnCameraFFCData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); } HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData.TimeTaken = (endTime - startTime).D * 1000; } catch (Exception ex) { string errorMessage = "Exception occurred during On-Camera FFC step. "; errorMessage = errorMessage + " Error Message: " + ex.Message; MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
/// <summary> /// Performs the PRNU. /// </summary> /// <remarks></remarks> private void PerformPRNU() { try { this.btnFFC_FPN.Enabled = false; this.btnFFC_PRNU.Enabled = false; this.lblStatus.Text = "Performing PRNU..."; this.lblStatus.BackColor = Color.Yellow; this.prnuDone = false; Application.DoEvents(); string buffer; string command; bool bfcStatus = true; HImage imageCropped; currentOnCameraFFCData = new OnCameraFFCData(); HImage imageGrabbed = new HImage(); HImage ho_RGB = new HImage(); HImage ho_R = new HImage(); HImage ho_G = new HImage(); HImage ho_B = new HImage(); HImage ho_M = new HImage(); lock (this.ImageToBeSaved) { imageGrabbed = this.ImageToBeSaved.CopyImage(); } ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); ho_RGB = ho_R.Compose3(ho_G, ho_B); int presentChannel = ho_RGB.CountChannels(); if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage) { lock (this.ImageToBeSaved) { ho_RGB = this.ImageToBeSaved.CopyImage(); } ho_R = ho_RGB.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); currentOnCameraFFCData.Image = ho_RGB; currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); bool lightIntensityOptimum = true; imageCropped = ho_RGB.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FFCROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FFCROIRectangleWidth, Properties.Settings.Default.FFCROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); ho_R = imageCropped.Decompose3(out ho_G, out ho_B); RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); if (Properties.Settings.Default.CheckForImproperCalibrationTarget) { int minGrayValue; int maxGrayValue; double[] tempLineProfileR = RAOIGrayVals.ToDArr(); double[] tempLineProfileG = GAOIGrayVals.ToDArr(); double[] tempLineProfileB = BAOIGrayVals.ToDArr(); lightIntensityOptimum &= this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileR, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) && this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileG, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset) && this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileB, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset); } if (!lightIntensityOptimum) { this.RefreshImageWindow(ho_RGB); this.lblStatus.Text = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage; this.lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; Application.DoEvents(); return; } } else if (presentChannel == Properties.Settings.Default.NumberOfChannelsInMonoImage) { lock (this.ImageToBeSaved) { ho_M = this.ImageToBeSaved.CopyImage(); } HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M); currentOnCameraFFCData.Image = ho_M; currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr(); bool lightIntensityOptimum = true; if (Properties.Settings.Default.CheckForImproperCalibrationTarget) { int minGrayValue; int maxGrayValue; double[] tempLineProfileM = MAOIGrayVals.ToDArr(); lightIntensityOptimum &= this.CameraAcquisition.IsLightIntensityLevelOptimum(tempLineProfileM, out minGrayValue, out maxGrayValue, Properties.Settings.Default.PRNUMinimumGrayLevelOffset); } if (!lightIntensityOptimum) { this.RefreshImageWindow(ho_M); this.lblStatus.Text = Properties.Settings.Default.PRNULightIntensityNotOptimumMessage; this.lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; Application.DoEvents(); return; } } command = Globals.e2vUC4CommandToDoOnCameraBFC + System.Environment.NewLine; bfcStatus = Globals.ExecuteSerialCommand( (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 5000, 1000); command = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine; bfcStatus &= Globals.ExecuteSerialCommand( (uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 1000, 2000); command = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine; bfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 1000, 2000); if (bfcStatus) { lblStatus.Text = "On Camera Bright Field Correction Done"; lblStatus.BackColor = Color.LimeGreen; } else { lblStatus.Text = "On Camera Bright Field Correction not done properly"; lblStatus.BackColor = Color.Red; } this.prnuDone = bfcStatus; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; if (this.fpnDone && this.prnuDone) { this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = ""; this.CameraAcquisition.CurrentCameraSetupProperties.LastOnCameraFFCSavingDate = DateTime.Now.Day.ToString() + "-" + DateTime.Now.Month.ToString() + "-" + DateTime.Now.Year.ToString() + " " + DateTime.Now.Hour.ToString() + ":" + DateTime.Now.Minute.ToString() + ":" + DateTime.Now.Second.ToString(); } } catch (System.Exception ex) { lblStatus.Text = "On Camera Bright Field Correction not done properly"; lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; this.prnuDone = false; MessageBox.Show("Exception occurred while performing PRNU.\r\n Error: " + ex.Message, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
/// <summary> /// Performs the FPN. /// </summary> /// <remarks></remarks> private void PerformFPN() { this.btnFFC_FPN.Enabled = false; this.btnFFC_PRNU.Enabled = false; this.lblStatus.Text = "Performing FPN..."; this.lblStatus.BackColor = Color.Yellow; this.fpnDone = false; Application.DoEvents(); string buffer; string command; bool dfcStatus = true; currentOnCameraFFCData = new OnCameraFFCData(); HImage imageGrabbed = new HImage(); HImage ho_RGB = new HImage(); HImage ho_R = new HImage(); HImage ho_G = new HImage(); HImage ho_B = new HImage(); HImage ho_M = new HImage(); try { lock (this.ImageToBeSaved) { imageGrabbed = this.ImageToBeSaved.CopyImage(); } ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); ho_RGB = ho_R.Compose3(ho_G, ho_B); int presentChannel = ho_RGB.CountChannels(); if (presentChannel == Properties.Settings.Default.NumberOfChannelsInColorImage) { //ho_R = ho_RGB.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); currentOnCameraFFCData.Image = ho_RGB; currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); HTuple maxGrayValueInCenterLineR = RAOIGrayVals.TupleMax(); HTuple maxGrayValueInCenterLineG = GAOIGrayVals.TupleMax(); HTuple maxGrayValueInCenterLineB = BAOIGrayVals.TupleMax(); if ((maxGrayValueInCenterLineR.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) || (maxGrayValueInCenterLineG.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) || (maxGrayValueInCenterLineB.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel)) { this.RefreshImageWindow(ho_RGB); this.lblStatus.Text = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage; this.lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; //Application.DoEvents(); return; } } else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { lock (this.ImageToBeSaved) { ho_M = this.ImageToBeSaved.CopyImage(); } HTuple MAOIGrayVals = Globals.GetGrayValuesOfLine(ho_M); currentOnCameraFFCData.Image = ho_M; currentOnCameraFFCData.GrayValues.MPlaneVals = MAOIGrayVals.ToDArr(); HTuple maxGrayValueInCenterLineM = MAOIGrayVals.TupleMax(); if (maxGrayValueInCenterLineM.I > Properties.Settings.Default.DarkFieldCorrectionMaximumGrayLevel) { this.RefreshImageWindow(ho_M); this.lblStatus.Text = Properties.Settings.Default.FPNLightIntensityNotOptimumMessage; this.lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; //Application.DoEvents(); return; } } command = Globals.e2vUC4CommandToDoOnCameraDFC + System.Environment.NewLine; dfcStatus = Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 2000, 1000); command = Globals.e2vUC4CommandToSaveFFCToUserBank1 + System.Environment.NewLine; dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 1000, 2000); command = Globals.e2vUC4CommandToSaveALLSettingsToUserBank1 + System.Environment.NewLine; dfcStatus &= Globals.ExecuteSerialCommand((uint)this.CameraAcquisition.CurrentCameraProperties.SerialPortIndex, command, out buffer, 1000, 2000); if (dfcStatus) { lblStatus.Text = "On Camera Dark Field Correction Done"; lblStatus.BackColor = Color.LimeGreen; } else { lblStatus.Text = "On Camera Dark Field Correction not done properly"; lblStatus.BackColor = Color.Red; } this.fpnDone = dfcStatus; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; } catch (System.Exception ex) { lblStatus.Text = "On Camera Dark Field Correction not done properly"; lblStatus.BackColor = Color.Red; this.btnFFC_FPN.Enabled = true; this.btnFFC_PRNU.Enabled = true; this.fpnDone = false; MessageBox.Show("Exception occurred while performing FPN.\r\n Error: " + ex.Message, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentLSAData = new LightStickAlignmentData(); HImage imageCropped = new HImage(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); int presentImageNoOfChannels = imageGrabbed.CountChannels(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentLSAData.Image = imageGrabbed; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentLSAData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentLSAData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentLSAData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentLSAData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.LICROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.LICROIRectangleWidth, Properties.Settings.Default.LICROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); ho_R = imageCropped.Decompose3(out ho_G, out ho_B); RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); // Clipping the pixels at Left & Right of the line RAOIGrayValues = RAOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, (Properties.Settings.Default.LICROIRectangleWidth * 2) - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int RminGrayValue; int RmaxGrayValue; // Checking whether the gray values is within the defined range in RED plane bool RPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(RAOIGrayValues.ToDArr(), out RminGrayValue, out RmaxGrayValue); currentLSAData.MinGrayValue = RminGrayValue; currentLSAData.MaxGrayValue = RmaxGrayValue; currentLSAData.Status = RPlaneOptimum; } else if (this.cameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentLSAData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); // Clipping the pixels at Left & Right of the line AOIGrayValues = AOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.cameraAcquisition.CurrentImageWidth - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int minGrayValue; int maxGrayValue; // Checking whether the gray values is within the defined range in MONO plane bool MonoPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(AOIGrayValues.ToDArr(), out minGrayValue, out maxGrayValue); currentLSAData.MinGrayValue = minGrayValue; currentLSAData.MaxGrayValue = maxGrayValue; currentLSAData.Status = MonoPlaneOptimum; } HOperatorSet.CountSeconds(out endTime); currentLSAData.TimeTaken = (endTime - startTime).D * 1000; this.BeginInvoke(new Action(UpdateStepUI)); } catch (Exception ex) { string errorMessage = "Exception occurred during light intensity check. "; errorMessage = errorMessage + " Error Message: " + ex.Message; //MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); this.BeginInvoke(new Action(UpdateStepUI)); //lblStatus.Text = errorMessage; //lblStatus.BackColor = Color.Red; } }
public void ProcessStep(HImage imageGrabbed) { currentFMCData = new FocusMagnificationCheckData(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } bool calibrationTargetContrastOK = true; // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentFMCData.Image = imageGrabbed; int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); #region IntensityCheckForCheckingCalibrationTarget // Check the minimum and maximum light intensity to determine whether correct calibration target // has been placed grayImage = imageGrabbed.Rgb1ToGray(); HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentFMCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentFMCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentFMCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentFMCData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); int RPlaneMinValue; int RPlaneMaxValue; RAOIGrayVals = RAOIGrayVals.TupleSelectRange(this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.CameraAcquisition.CurrentImageWidth - this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); calibrationTargetContrastOK = this.CameraAcquisition.IsLightIntensityLevelOptimum(GAOIGrayVals.ToDArr(), out RPlaneMinValue, out RPlaneMaxValue); calibrationTargetContrastOK = (RPlaneMaxValue - RPlaneMinValue) > Properties.Settings.Default.FocusPatternCheckReferenceGrayValue; #endregion #region FindPatternArea //Finds the pattern area HImage reducedImage = Globals.FindPatternArea(grayImage); if (reducedImage == null) { SetFocusStatusMessage("Image is too dark or Incorrect Pattern", Color.Red); currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; //this.BeginInvoke(new Action<FocusMagnificationCheckData>(UpdateFocusMagnificationStepUI), currentFMCData); return; } #endregion #region Focus Learning hv_focus = new HTuple(); HTuple hv_currentSharpness = new HTuple(); HImage imageCropped = new HImage(); //Indicates that execution has entered the focus learning stage //(For both successful and failed learning) bool focusLearningDone = false; if (this.focusLearningStarted) { #region FocusLearningStage focusLearningDone = true; if (!this.focusLearningOver) { HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; int grabbedImageWidth, grabbedImageHeight; imageGrabbed.GetImageSize(out grabbedImageWidth, out grabbedImageHeight); // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); int croppedImageWidth, croppedImageHeight; imageCropped.GetImageSize(out croppedImageWidth, out croppedImageHeight); int noOfSamples = croppedImageHeight / Properties.Settings.Default.NoOfSamplesUsedInFocusStep; for (int i = 0; i < Properties.Settings.Default.NoOfSamplesUsedInFocusStep; i++) { try { int row = i * noOfSamples; HImage newCroppedImage = imageCropped.CopyImage().CropPart(row, 0, croppedImageWidth, noOfSamples); //newCroppedImage.WriteImage("bmp", 0, "D:\\imageCropped" + i.ToString()); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(newCroppedImage, hv_Scale); hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } catch (Exception ex) { } } currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Learning...", Color.Yellow); UpdateLabelMessage(currentFMCData.FocusPercentage); } else { hv_MaxAutoCorrelation = new HTuple(); HTuple hv_Change = new HTuple(); if (hv_AutoCorrelationTuple.TupleLength() > 0) { hv_MaxAutoCorrelation = hv_AutoCorrelationTuple.TupleMax(); hv_Change = ((hv_AutoCorrelationTuple.TupleMax() - hv_AutoCorrelationTuple.TupleMin()) / hv_AutoCorrelationTuple.TupleMax()) * 100; if (hv_MaxAutoCorrelation.D <= 0.0 || hv_Change.D < Properties.Settings.Default.MinimumFocusLearningRangeRequired) { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Range of focus learning is not enough.", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } else { this.CameraAcquisition.CurrentCameraSetupProperties.FocusMaxAutoCorrelationValue = hv_MaxAutoCorrelation.D; } } else { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Sharpness measurement failed", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } this.focusLearningStarted = false; } #endregion } else if (this.focusLearningOver) { #region FocusTestingPhase if (!calibrationTargetContrastOK) { currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; SetFocusStatusMessage("Incorrect Pattern. Not enough contrast", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); return; } HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(imageCropped, hv_Scale); if (hv_currentSharpness > hv_MaxAutoCorrelation + 2) { SetFocusStatusMessage("Current sharpness is more than learnt sharpness. Insert valid calibration doc or Re-do focus learning !!", Color.Orange); currentFMCData.PixelResolution = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.FocusPercentage = ""; UpdateLabelMessage(currentFMCData.FocusPercentage); return; } else if (hv_currentSharpness > hv_MaxAutoCorrelation) { hv_MaxAutoCorrelation = hv_currentSharpness; hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } hv_focus = 100 - (((hv_MaxAutoCorrelation - hv_currentSharpness) / hv_MaxAutoCorrelation) * 100); hv_focus = hv_focus.TupleRound(); currentFMCData.FocusPercentage = hv_focus.ToString(); if (hv_focus > 100) { // Not Focused currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus learning not done properly", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); } if (hv_focus >= 95 && hv_focus <= 100) { // Focused SetFocusStatusMessage("Focused", Color.LimeGreen); } else if (hv_focus > 70 && hv_focus < 95) { //// Fine Tuning is required SetFocusStatusMessage("Fine Tuning is required", Color.Yellow); } else { // Not Focused SetFocusStatusMessage("Not focused", Color.Red); } #endregion } else if (!focusLearningOver && !focusLearningDone) { if (hv_MaxAutoCorrelation == null) { SetFocusStatusMessage("Focus learning not done", Color.Yellow); } } UpdateLabelMessage(currentFMCData.FocusPercentage); #endregion }
public void ProcessStep(HImage imageGrabbed) { try { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == 6) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); int presentImageNoOfChannels = imageGrabbed.CountChannels(); if (presentImageNoOfChannels == Properties.Settings.Default.NumberOfChannelsInColorImage) { lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } if (whitebalancingStarted) { wbIterationCompletedEvent.Reset(); } imageGrabbedEvent.Set(); HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentWBData.Image = imageGrabbed; currentWBData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentWBData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentWBData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentWBData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); HImage ho_GrayImage; ho_GrayImage = imageGrabbed.Rgb1ToGray(); HRegion whiteRegion = ho_GrayImage.Threshold( Properties.Settings.Default.MinThresholdInDeterminingGain, Properties.Settings.Default.MaxThresholdInDeterminingGain); whiteRegion = whiteRegion.FillUp(); whiteRegion = whiteRegion.ErosionRectangle1(20, 20); double rClipValue = 15.0; double gClipValue = 15.0; double bClipValue = 15.0; //CalculateOptimumClipValue(whiteRegion, ho_R, out rClipValue); //CalculateOptimumClipValue(whiteRegion, ho_G, out gClipValue); //CalculateOptimumClipValue(whiteRegion, ho_B, out bClipValue); double rMin, rMax, rRange; double gMin, gMax, gRange; double bMin, bMax, bRange; ho_R.MinMaxGray(whiteRegion, rClipValue, out rMin, out rMax, out rRange); ho_G.MinMaxGray(whiteRegion, gClipValue, out gMin, out gMax, out gRange); ho_B.MinMaxGray(whiteRegion, bClipValue, out bMin, out bMax, out bRange); double RGDiff = rMax - gMax; double GBDiff = gMax - bMax; double BRDiff = bMax - rMax; currentWBData.ErrorLevel = (Math.Max(RGDiff, Math.Max(GBDiff, BRDiff)) / this.CameraAcquisition.CurrentCameraProperties.BrightRegionReferenceGrayLevel) * 100; currentWBData.RedMax = rMax; currentWBData.GreenMax = gMax; currentWBData.BlueMax = bMax; HOperatorSet.CountSeconds(out endTime); currentWBData.TimeTaken = (endTime - startTime).D; UpdateControlUI(); wbIterationCompletedEvent.WaitOne(); } } catch (Exception ex) { string errorMessage = "Exception occurred during white balancing step. "; errorMessage = errorMessage + " Error Message: " + ex.Message; MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }