private void SaveImage(string files, HImage ngImage) { if (ngImage == null || ngImage.IsInitialized() == false) { Util.WriteLog(this.GetType(), "异常图像数据丢失"); Util.Notify("异常图像数据丢失"); return; } HImage imgSave = ngImage.CopyImage(); Task.Run(() => { try { FileInfo fi = new FileInfo(files); if (!fi.Directory.Exists) { fi.Directory.Create(); } imgSave.WriteImage("png", 0, files); imgSave.Dispose(); } catch (Exception ex) { Util.WriteLog(this.GetType(), ex); Util.Notify(string.Format("相机{0}异常图像保存异常", settingIndex)); } }); }
private void CoreOnNewImage(HImage hImage, Guid tmpSessionId) { lock (OnNewImageLock) { double startTime = HSystem.CountSeconds(); if (lastGrabImg[0] != null) { lastGrabImg[0].Dispose(); } lastGrabImg[0] = hImage.CopyImage(); HImage rotateImage = hImage.RotateImage(new HTuple(rotazione[0]), "constant"); //HImage rotateImage = rotateImage_.MirrorImage("column"); //rotateImage_.Dispose(); hImage.Dispose(); cts = new CancellationTokenSource(Properties.Settings.Default.TimeoutAlgoritmo); ArrayList iconicVarList; ElaborateResult result; ElaborateImage(rotateImage, cts, out iconicVarList, out result); double tAnalisi = HSystem.CountSeconds(); tAnalisi = (tAnalisi - startTime) * 1000.0; result.ElapsedTime = tAnalisi; RaiseNewImageToDisplayEvent(iconicVarList, result); } }
private void SaveImage(string files, HImage ngImage) { if (ngImage == null || ngImage.IsInitialized() == false) { Util.WriteLog(this.GetType(), "异常图像数据丢失"); Util.Notify("异常图像数据丢失"); return; } HImage imgSave = ngImage.CopyImage(); Task.Run(() => { try { FileInfo fi = new FileInfo(files); if (!fi.Directory.Exists) { fi.Directory.Create(); } Common.FileAct.FileManger.DeleteOverflowFile(Path.GetDirectoryName(files), IniStatus.Instance.NgImageCount); imgSave.WriteImage("png", 0, files); imgSave.Dispose(); } catch (Exception ex) { Util.WriteLog(this.GetType(), ex); Util.Notify(string.Format("相机{0}异常图像保存异常", settingIndex)); } }); }
protected HImage RotateImage(HImage img) { HImage img1 = null; switch (ImageAngle) { case ImageAngle.角度0: img1 = img.CopyImage(); break; case ImageAngle.角度90: img1 = img.RotateImage((double)90, "constant"); break; case ImageAngle.角度180: img1 = img.RotateImage((double)180, "constant"); break; case ImageAngle.角度270: img1 = img.RotateImage((double)270, "constant"); break; default: break; } return(img1); }
private void SaveImage(string files, HImage ngImage) { if (ngImage == null || ngImage.IsInitialized() == false) { return; } HImage imgSave = ngImage.CopyImage(); Task.Run(() => { try { FileInfo fi = new FileInfo(files); if (!fi.Directory.Exists) { fi.Directory.Create(); } imgSave.WriteImage("png", 0, files); imgSave.Dispose(); } catch (Exception ex) { } }); }
private void RunSingleCamera() { GlobalData globalData = GlobalData.GetIstance(); globalData.LastTryGrabLivello = DateTime.Now; HImage imgGrabTmp = AcquisitionTask(); if (imgGrabTmp != null && imgGrabTmp.IsInitialized()) { sessionId = Guid.NewGuid(); globalData.LastGrabLivello = DateTime.Now; globalData.NumGrabLivello++; lock (lastResultLock) { lastResult = null; } CoreOnNewImage(imgGrabTmp, sessionId); EventHandler <HImage> ev = OnNewImageForRegolazioni; if (ev != null) { HImage imgRegolazioni = imgGrabTmp.CopyImage(); Action action = () => { ev(this, imgRegolazioni); }; if (this.taskNewImageForRegolazioni == null) { this.taskNewImageForRegolazioni = Task.Run(action); } else { this.taskNewImageForRegolazioni = this.taskNewImageForRegolazioni.ContinueWith(k => action()); } } imgGrabTmp.Dispose(); } //else //{ // //Riarma la camera se è da 10 secondi che non grabba // if (DateTime.Now - globalData.LastGrabLivello > TimeSpan.FromSeconds(10)) // { // GrabImageStart(0); // globalData.LastGrabImageStartLivello = DateTime.Now; // } // Thread.Sleep(1); //} }
private void UpdateUIAfterProcessingStep(HImage grabbedImage) { if (this.currentImage == null) { return; } currentImage = grabbedImage.CopyImage(); currentCameraStep.ProcessStep(currentImage); UpdateImageWindow(currentImage); UpdateChartUI(currentCameraStep); }
public void ProcessStep(HImage imageGrabbed) { currentIVData.Image = imageGrabbed; int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentIVData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentIVData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentIVData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentIVData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); UpdateHistogramForImage(); }
/// <summary> /// Converts a bitmap to a Halcon image. /// </summary> /// <param name="from">A bitmap to be converted to an HImage.</param> /// <returns>An HImage converted from a Windows Bitmap.</returns> /// <example> HImage imageFromBitmap = myBitmap.ToHImage(); .</example> public static HImage ToHimage(this Bitmap from) { Contract.Requires(from != null); Rectangle rectangle = new Rectangle(0, 0, from.Width, from.Height); HImage interleavedHalconImage = new HImage(); // Convert 24 bit bitmap to 32 bit bitmap in order to ensure // that the bit width of the image (the Stride) is divisible by four. // Otherwise, one might obtain skewed conversions. Bitmap image32 = new Bitmap(from.Width, from.Height, PixelFormat.Format32bppRgb); image32.SetResolution(from.HorizontalResolution, from.VerticalResolution); using (Graphics g = Graphics.FromImage(image32)) { g.DrawImage(from, new Point(0, 0)); } // Obtain the image pointer. BitmapData bitmapData = image32.LockBits(rectangle, ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb); IntPtr pointerToBitmap = bitmapData.Scan0; IntPtr pointerToPixels = pointerToBitmap; // Create HALCON image from the pointer. interleavedHalconImage.GenImageInterleaved(pointerToPixels, "bgrx", from.Width, from.Height, -1, "byte", from.Width, from.Height, 0, 0, -1, 0); // Don't forget to unlock the bits again. ;-) image32.UnlockBits(bitmapData); HImage outputHalconImage = interleavedHalconImage.CopyImage(); // Release memory by dereferencing and garbage collection interleavedHalconImage.Dispose(); image32.Dispose(); GC.Collect(); GC.WaitForPendingFinalizers(); return(outputHalconImage); }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData = new OnCameraFFCData(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentOnCameraFFCData.Image = imageGrabbed; if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayValues = Globals.GetGrayValuesOfLine(irImg); currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.MPlaneVals = IRAOIGrayValues.ToDArr(); } else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentOnCameraFFCData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); } HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData.TimeTaken = (endTime - startTime).D * 1000; } catch (Exception ex) { string errorMessage = "Exception occurred during On-Camera FFC step. "; errorMessage = errorMessage + " Error Message: " + ex.Message; MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentLSAData = new LightStickAlignmentData(); HImage imageCropped = new HImage(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); int presentImageNoOfChannels = imageGrabbed.CountChannels(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentLSAData.Image = imageGrabbed; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentLSAData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentLSAData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentLSAData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentLSAData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.LICROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.LICROIRectangleWidth, Properties.Settings.Default.LICROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); ho_R = imageCropped.Decompose3(out ho_G, out ho_B); RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); // Clipping the pixels at Left & Right of the line RAOIGrayValues = RAOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, (Properties.Settings.Default.LICROIRectangleWidth * 2) - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int RminGrayValue; int RmaxGrayValue; // Checking whether the gray values is within the defined range in RED plane bool RPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(RAOIGrayValues.ToDArr(), out RminGrayValue, out RmaxGrayValue); currentLSAData.MinGrayValue = RminGrayValue; currentLSAData.MaxGrayValue = RmaxGrayValue; currentLSAData.Status = RPlaneOptimum; } else if (this.cameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentLSAData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); // Clipping the pixels at Left & Right of the line AOIGrayValues = AOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.cameraAcquisition.CurrentImageWidth - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int minGrayValue; int maxGrayValue; // Checking whether the gray values is within the defined range in MONO plane bool MonoPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(AOIGrayValues.ToDArr(), out minGrayValue, out maxGrayValue); currentLSAData.MinGrayValue = minGrayValue; currentLSAData.MaxGrayValue = maxGrayValue; currentLSAData.Status = MonoPlaneOptimum; } HOperatorSet.CountSeconds(out endTime); currentLSAData.TimeTaken = (endTime - startTime).D * 1000; this.BeginInvoke(new Action(UpdateStepUI)); } catch (Exception ex) { string errorMessage = "Exception occurred during light intensity check. "; errorMessage = errorMessage + " Error Message: " + ex.Message; //MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); this.BeginInvoke(new Action(UpdateStepUI)); //lblStatus.Text = errorMessage; //lblStatus.BackColor = Color.Red; } }
public void ProcessStep(HImage imageGrabbed) { currentFMCData = new FocusMagnificationCheckData(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } bool calibrationTargetContrastOK = true; // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentFMCData.Image = imageGrabbed; int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); #region IntensityCheckForCheckingCalibrationTarget // Check the minimum and maximum light intensity to determine whether correct calibration target // has been placed grayImage = imageGrabbed.Rgb1ToGray(); HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentFMCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentFMCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentFMCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentFMCData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); int RPlaneMinValue; int RPlaneMaxValue; RAOIGrayVals = RAOIGrayVals.TupleSelectRange(this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.CameraAcquisition.CurrentImageWidth - this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); calibrationTargetContrastOK = this.CameraAcquisition.IsLightIntensityLevelOptimum(GAOIGrayVals.ToDArr(), out RPlaneMinValue, out RPlaneMaxValue); calibrationTargetContrastOK = (RPlaneMaxValue - RPlaneMinValue) > Properties.Settings.Default.FocusPatternCheckReferenceGrayValue; #endregion #region FindPatternArea //Finds the pattern area HImage reducedImage = Globals.FindPatternArea(grayImage); if (reducedImage == null) { SetFocusStatusMessage("Image is too dark or Incorrect Pattern", Color.Red); currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; //this.BeginInvoke(new Action<FocusMagnificationCheckData>(UpdateFocusMagnificationStepUI), currentFMCData); return; } #endregion #region Focus Learning hv_focus = new HTuple(); HTuple hv_currentSharpness = new HTuple(); HImage imageCropped = new HImage(); //Indicates that execution has entered the focus learning stage //(For both successful and failed learning) bool focusLearningDone = false; if (this.focusLearningStarted) { #region FocusLearningStage focusLearningDone = true; if (!this.focusLearningOver) { HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; int grabbedImageWidth, grabbedImageHeight; imageGrabbed.GetImageSize(out grabbedImageWidth, out grabbedImageHeight); // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); int croppedImageWidth, croppedImageHeight; imageCropped.GetImageSize(out croppedImageWidth, out croppedImageHeight); int noOfSamples = croppedImageHeight / Properties.Settings.Default.NoOfSamplesUsedInFocusStep; for (int i = 0; i < Properties.Settings.Default.NoOfSamplesUsedInFocusStep; i++) { try { int row = i * noOfSamples; HImage newCroppedImage = imageCropped.CopyImage().CropPart(row, 0, croppedImageWidth, noOfSamples); //newCroppedImage.WriteImage("bmp", 0, "D:\\imageCropped" + i.ToString()); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(newCroppedImage, hv_Scale); hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } catch (Exception ex) { } } currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Learning...", Color.Yellow); UpdateLabelMessage(currentFMCData.FocusPercentage); } else { hv_MaxAutoCorrelation = new HTuple(); HTuple hv_Change = new HTuple(); if (hv_AutoCorrelationTuple.TupleLength() > 0) { hv_MaxAutoCorrelation = hv_AutoCorrelationTuple.TupleMax(); hv_Change = ((hv_AutoCorrelationTuple.TupleMax() - hv_AutoCorrelationTuple.TupleMin()) / hv_AutoCorrelationTuple.TupleMax()) * 100; if (hv_MaxAutoCorrelation.D <= 0.0 || hv_Change.D < Properties.Settings.Default.MinimumFocusLearningRangeRequired) { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Range of focus learning is not enough.", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } else { this.CameraAcquisition.CurrentCameraSetupProperties.FocusMaxAutoCorrelationValue = hv_MaxAutoCorrelation.D; } } else { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Sharpness measurement failed", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } this.focusLearningStarted = false; } #endregion } else if (this.focusLearningOver) { #region FocusTestingPhase if (!calibrationTargetContrastOK) { currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; SetFocusStatusMessage("Incorrect Pattern. Not enough contrast", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); return; } HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(imageCropped, hv_Scale); if (hv_currentSharpness > hv_MaxAutoCorrelation + 2) { SetFocusStatusMessage("Current sharpness is more than learnt sharpness. Insert valid calibration doc or Re-do focus learning !!", Color.Orange); currentFMCData.PixelResolution = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.FocusPercentage = ""; UpdateLabelMessage(currentFMCData.FocusPercentage); return; } else if (hv_currentSharpness > hv_MaxAutoCorrelation) { hv_MaxAutoCorrelation = hv_currentSharpness; hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } hv_focus = 100 - (((hv_MaxAutoCorrelation - hv_currentSharpness) / hv_MaxAutoCorrelation) * 100); hv_focus = hv_focus.TupleRound(); currentFMCData.FocusPercentage = hv_focus.ToString(); if (hv_focus > 100) { // Not Focused currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus learning not done properly", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); } if (hv_focus >= 95 && hv_focus <= 100) { // Focused SetFocusStatusMessage("Focused", Color.LimeGreen); } else if (hv_focus > 70 && hv_focus < 95) { //// Fine Tuning is required SetFocusStatusMessage("Fine Tuning is required", Color.Yellow); } else { // Not Focused SetFocusStatusMessage("Not focused", Color.Red); } #endregion } else if (!focusLearningOver && !focusLearningDone) { if (hv_MaxAutoCorrelation == null) { SetFocusStatusMessage("Focus learning not done", Color.Yellow); } } UpdateLabelMessage(currentFMCData.FocusPercentage); #endregion }
public void ProcessStep(HImage imageGrabbed) { try { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == 6) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); int presentImageNoOfChannels = imageGrabbed.CountChannels(); if (presentImageNoOfChannels == Properties.Settings.Default.NumberOfChannelsInColorImage) { lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } if (whitebalancingStarted) { wbIterationCompletedEvent.Reset(); } imageGrabbedEvent.Set(); HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentWBData.Image = imageGrabbed; currentWBData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentWBData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentWBData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentWBData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); HImage ho_GrayImage; ho_GrayImage = imageGrabbed.Rgb1ToGray(); HRegion whiteRegion = ho_GrayImage.Threshold( Properties.Settings.Default.MinThresholdInDeterminingGain, Properties.Settings.Default.MaxThresholdInDeterminingGain); whiteRegion = whiteRegion.FillUp(); whiteRegion = whiteRegion.ErosionRectangle1(20, 20); double rClipValue = 15.0; double gClipValue = 15.0; double bClipValue = 15.0; //CalculateOptimumClipValue(whiteRegion, ho_R, out rClipValue); //CalculateOptimumClipValue(whiteRegion, ho_G, out gClipValue); //CalculateOptimumClipValue(whiteRegion, ho_B, out bClipValue); double rMin, rMax, rRange; double gMin, gMax, gRange; double bMin, bMax, bRange; ho_R.MinMaxGray(whiteRegion, rClipValue, out rMin, out rMax, out rRange); ho_G.MinMaxGray(whiteRegion, gClipValue, out gMin, out gMax, out gRange); ho_B.MinMaxGray(whiteRegion, bClipValue, out bMin, out bMax, out bRange); double RGDiff = rMax - gMax; double GBDiff = gMax - bMax; double BRDiff = bMax - rMax; currentWBData.ErrorLevel = (Math.Max(RGDiff, Math.Max(GBDiff, BRDiff)) / this.CameraAcquisition.CurrentCameraProperties.BrightRegionReferenceGrayLevel) * 100; currentWBData.RedMax = rMax; currentWBData.GreenMax = gMax; currentWBData.BlueMax = bMax; HOperatorSet.CountSeconds(out endTime); currentWBData.TimeTaken = (endTime - startTime).D; UpdateControlUI(); wbIterationCompletedEvent.WaitOne(); } } catch (Exception ex) { string errorMessage = "Exception occurred during white balancing step. "; errorMessage = errorMessage + " Error Message: " + ex.Message; MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }