public HRegion Extract(HImage image) { var domain = image.GetDomain(); var offsetRow1 = domain.GetRow1(); var offsetColumn1 = domain.GetColumn1(); var croppedImage = image.CropDomain(); var swThresholdImageFilter = new NotifyStopwatch("DynThresholdCroppedRegionExtractor.ThresholdImageFilter"); HImage thresholdImage = ThresholdImageFilter.Process(croppedImage); swThresholdImageFilter.Dispose(); var swDynThreshold = new NotifyStopwatch("DynThresholdCroppedRegionExtractor.DynThreshold"); HRegion region = croppedImage.DynThreshold( thresholdImage, Offset, LightDark.ToHalconString()); swDynThreshold.Dispose(); var movedRegion = region.MoveRegion(offsetRow1, offsetColumn1); croppedImage.Dispose(); thresholdImage.Dispose(); region.Dispose(); return(movedRegion); }
public static void WriteImageOfTiffLzwOfCropDomain(this HImage image, string fileName, double background = 0) { var cropDomain = image.CropDomain(); cropDomain.WriteImageOfTiffLzw(fileName); cropDomain.Dispose(); }
public HRegion Extract(HImage image) { var domain = image.GetDomain(); var offsetRow1 = domain.GetRow1(); var offsetColumn1 = domain.GetColumn1(); var croppedImage = image.CropDomain(); var croppedRegion = RegionExtractor.Extract(croppedImage); var movedRegion = croppedRegion.MoveRegion(offsetRow1, offsetColumn1); croppedImage.Dispose(); croppedRegion.Dispose(); domain.Dispose(); return(movedRegion); }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData = new OnCameraFFCData(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentOnCameraFFCData.Image = imageGrabbed; if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayValues = Globals.GetGrayValuesOfLine(irImg); currentOnCameraFFCData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentOnCameraFFCData.GrayValues.MPlaneVals = IRAOIGrayValues.ToDArr(); } else if (this.CameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentOnCameraFFCData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); } HOperatorSet.CountSeconds(out endTime); currentOnCameraFFCData.TimeTaken = (endTime - startTime).D * 1000; } catch (Exception ex) { string errorMessage = "Exception occurred during On-Camera FFC step. "; errorMessage = errorMessage + " Error Message: " + ex.Message; MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public bool FindDataCode(HImage img, HRegion region, bool train = false) { if (!region.IsInitialized()) { region = img.GetDomain(); } if (!train) { Display(0, region); } HImage reduceImg = img.ReduceDomain(region); HImage searchImg = reduceImg.CropDomain(); reduceImg.Dispose(); int width, height; searchImg.GetImageSize(out width, out height); HImage zoomImg = searchImg.ZoomImageFactor(DateCodeZoomImg, DateCodeZoomImg, "constant"); DataCodeContour.Dispose(); searchImg.Dispose(); HTuple ResultHandles, DecodedDataStrings; if (train) { DataCodeContour = CodeReaderHand.FindDataCode2d(zoomImg, "train", "all", out ResultHandles, out DecodedDataStrings); } else { DataCodeContour = CodeReaderHand.FindDataCode2d(zoomImg, "stop_after_result_num", 1, out ResultHandles, out DecodedDataStrings); } if (DecodedDataStrings.Length > 0) { DataCodeString = DecodedDataStrings[0].S; HHomMat2D mat = new HHomMat2D(); HHomMat2D scalMat = mat.HomMat2dScale(1 / DateCodeZoomImg, 1 / DateCodeZoomImg, 0.0, 0.0); double row, col; region.AreaCenter(out row, out col); HHomMat2D tranMat = scalMat.HomMat2dTranslate(row - height / 2.0, col - width / 2.0); HXLDCont tranDataCodeContour = tranMat.AffineTransContourXld(DataCodeContour); DataCodeContour.Dispose(); DataCodeContour = tranDataCodeContour; if (!train) { Display(2); Display(3); } } else { if (UseBrighten) { HImage brightenImg = zoomImg.ScaleImage(UseBrightenValue, 0.0); DataCodeContour = CodeReaderHand.FindDataCode2d(brightenImg, "stop_after_result_num", 1, out ResultHandles, out DecodedDataStrings); if (DecodedDataStrings.Length > 0) { DataCodeString = DecodedDataStrings[0].S; HHomMat2D mat = new HHomMat2D(); HHomMat2D scalMat = mat.HomMat2dScale(1 / DateCodeZoomImg, 1 / DateCodeZoomImg, 0.0, 0.0); double row, col; region.AreaCenter(out row, out col); HHomMat2D tranMat = scalMat.HomMat2dTranslate(row - height / 2.0, col - width / 2.0); HXLDCont tranDataCodeContour = tranMat.AffineTransContourXld(DataCodeContour); DataCodeContour.Dispose(); DataCodeContour = tranDataCodeContour; if (!train) { Display(2); Display(3); } return(true); } } DataCodeString = string.Empty; if (!train) { Display(4); } return(false); } return(true); }
public void ProcessStep(HImage imageGrabbed) { HTuple startTime = null; HTuple endTime = null; HOperatorSet.CountSeconds(out startTime); HOperatorSet.CountSeconds(out endTime); currentLSAData = new LightStickAlignmentData(); HImage imageCropped = new HImage(); try { int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); int presentImageNoOfChannels = imageGrabbed.CountChannels(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentLSAData.Image = imageGrabbed; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayValues = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayValues = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentLSAData.GrayValues.RPlaneVals = RAOIGrayValues.ToDArr(); currentLSAData.GrayValues.GPlaneVals = GAOIGrayValues.ToDArr(); currentLSAData.GrayValues.BPlaneVals = BAOIGrayValues.ToDArr(); currentLSAData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.LICROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.LICROIRectangleWidth, Properties.Settings.Default.LICROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); ho_R = imageCropped.Decompose3(out ho_G, out ho_B); RAOIGrayValues = Globals.GetGrayValuesOfLine(ho_R); // Clipping the pixels at Left & Right of the line RAOIGrayValues = RAOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, (Properties.Settings.Default.LICROIRectangleWidth * 2) - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int RminGrayValue; int RmaxGrayValue; // Checking whether the gray values is within the defined range in RED plane bool RPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(RAOIGrayValues.ToDArr(), out RminGrayValue, out RmaxGrayValue); currentLSAData.MinGrayValue = RminGrayValue; currentLSAData.MaxGrayValue = RmaxGrayValue; currentLSAData.Status = RPlaneOptimum; } else if (this.cameraAcquisition.CurrentNumberOfChannels == Properties.Settings.Default.NumberOfChannelsInMonoImage) { HTuple AOIGrayValues = Globals.GetGrayValuesOfLine(imageGrabbed); currentLSAData.GrayValues.MPlaneVals = AOIGrayValues.ToDArr(); // Clipping the pixels at Left & Right of the line AOIGrayValues = AOIGrayValues.TupleSelectRange( this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.cameraAcquisition.CurrentImageWidth - this.cameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); // Update the labels showing the results of light uniformity check int minGrayValue; int maxGrayValue; // Checking whether the gray values is within the defined range in MONO plane bool MonoPlaneOptimum = this.cameraAcquisition.IsLightIntensityLevelOptimum(AOIGrayValues.ToDArr(), out minGrayValue, out maxGrayValue); currentLSAData.MinGrayValue = minGrayValue; currentLSAData.MaxGrayValue = maxGrayValue; currentLSAData.Status = MonoPlaneOptimum; } HOperatorSet.CountSeconds(out endTime); currentLSAData.TimeTaken = (endTime - startTime).D * 1000; this.BeginInvoke(new Action(UpdateStepUI)); } catch (Exception ex) { string errorMessage = "Exception occurred during light intensity check. "; errorMessage = errorMessage + " Error Message: " + ex.Message; //MessageBox.Show(errorMessage, "Camera Setup Tool", MessageBoxButtons.OK, MessageBoxIcon.Error); this.BeginInvoke(new Action(UpdateStepUI)); //lblStatus.Text = errorMessage; //lblStatus.BackColor = Color.Red; } }
public void ProcessStep(HImage imageGrabbed) { currentFMCData = new FocusMagnificationCheckData(); lock (this.ImageToBeSaved) { this.ImageToBeSaved = imageGrabbed.CopyImage(); } bool calibrationTargetContrastOK = true; // Feed the image to the script imageGrabbed = imageGrabbed.CropDomain(); currentFMCData.Image = imageGrabbed; int imageChannels = imageGrabbed.CountChannels(); HImage rImg = null, gImg = null, bImg = null, rgbImg = null, irImg = null; if (imageChannels == Properties.Settings.Default.NumberOfChannelsInIRAndRGBImage) { rImg = imageGrabbed.Decompose4(out gImg, out bImg, out irImg); rgbImg = rImg.Compose3(gImg, bImg); } HImage grayImage = new HImage(); if (rgbImg == null) { return; } imageGrabbed = rgbImg.CopyImage(); #region IntensityCheckForCheckingCalibrationTarget // Check the minimum and maximum light intensity to determine whether correct calibration target // has been placed grayImage = imageGrabbed.Rgb1ToGray(); HImage ho_R; HImage ho_G; HImage ho_B; ho_R = imageGrabbed.Decompose3(out ho_G, out ho_B); HTuple RAOIGrayVals = Globals.GetGrayValuesOfLine(ho_R); HTuple GAOIGrayVals = Globals.GetGrayValuesOfLine(ho_G); HTuple BAOIGrayVals = Globals.GetGrayValuesOfLine(ho_B); HTuple IRAOIGrayVals = Globals.GetGrayValuesOfLine(irImg); currentFMCData.GrayValues.RPlaneVals = RAOIGrayVals.ToDArr(); currentFMCData.GrayValues.GPlaneVals = GAOIGrayVals.ToDArr(); currentFMCData.GrayValues.BPlaneVals = BAOIGrayVals.ToDArr(); currentFMCData.GrayValues.MPlaneVals = IRAOIGrayVals.ToDArr(); int RPlaneMinValue; int RPlaneMaxValue; RAOIGrayVals = RAOIGrayVals.TupleSelectRange(this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtLeft, this.CameraAcquisition.CurrentImageWidth - this.CameraAcquisition.CurrentCameraProperties.NumberOfPixelsToBeClippedAtRight); calibrationTargetContrastOK = this.CameraAcquisition.IsLightIntensityLevelOptimum(GAOIGrayVals.ToDArr(), out RPlaneMinValue, out RPlaneMaxValue); calibrationTargetContrastOK = (RPlaneMaxValue - RPlaneMinValue) > Properties.Settings.Default.FocusPatternCheckReferenceGrayValue; #endregion #region FindPatternArea //Finds the pattern area HImage reducedImage = Globals.FindPatternArea(grayImage); if (reducedImage == null) { SetFocusStatusMessage("Image is too dark or Incorrect Pattern", Color.Red); currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; //this.BeginInvoke(new Action<FocusMagnificationCheckData>(UpdateFocusMagnificationStepUI), currentFMCData); return; } #endregion #region Focus Learning hv_focus = new HTuple(); HTuple hv_currentSharpness = new HTuple(); HImage imageCropped = new HImage(); //Indicates that execution has entered the focus learning stage //(For both successful and failed learning) bool focusLearningDone = false; if (this.focusLearningStarted) { #region FocusLearningStage focusLearningDone = true; if (!this.focusLearningOver) { HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; int grabbedImageWidth, grabbedImageHeight; imageGrabbed.GetImageSize(out grabbedImageWidth, out grabbedImageHeight); // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); int croppedImageWidth, croppedImageHeight; imageCropped.GetImageSize(out croppedImageWidth, out croppedImageHeight); int noOfSamples = croppedImageHeight / Properties.Settings.Default.NoOfSamplesUsedInFocusStep; for (int i = 0; i < Properties.Settings.Default.NoOfSamplesUsedInFocusStep; i++) { try { int row = i * noOfSamples; HImage newCroppedImage = imageCropped.CopyImage().CropPart(row, 0, croppedImageWidth, noOfSamples); //newCroppedImage.WriteImage("bmp", 0, "D:\\imageCropped" + i.ToString()); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(newCroppedImage, hv_Scale); hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } catch (Exception ex) { } } currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Learning...", Color.Yellow); UpdateLabelMessage(currentFMCData.FocusPercentage); } else { hv_MaxAutoCorrelation = new HTuple(); HTuple hv_Change = new HTuple(); if (hv_AutoCorrelationTuple.TupleLength() > 0) { hv_MaxAutoCorrelation = hv_AutoCorrelationTuple.TupleMax(); hv_Change = ((hv_AutoCorrelationTuple.TupleMax() - hv_AutoCorrelationTuple.TupleMin()) / hv_AutoCorrelationTuple.TupleMax()) * 100; if (hv_MaxAutoCorrelation.D <= 0.0 || hv_Change.D < Properties.Settings.Default.MinimumFocusLearningRangeRequired) { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Range of focus learning is not enough.", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } else { this.CameraAcquisition.CurrentCameraSetupProperties.FocusMaxAutoCorrelationValue = hv_MaxAutoCorrelation.D; } } else { currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus Learning not done properly. Sharpness measurement failed", Color.Orange); this.focusLearningOver = false; UpdateLabelMessage(currentFMCData.FocusPercentage); } this.focusLearningStarted = false; } #endregion } else if (this.focusLearningOver) { #region FocusTestingPhase if (!calibrationTargetContrastOK) { currentFMCData.FocusPercentage = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.PixelResolution = ""; SetFocusStatusMessage("Incorrect Pattern. Not enough contrast", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); return; } HTuple hv_Scale; HTuple hv_Subsampling = 1; hv_Scale = 1.0 / hv_Subsampling; // Crop the image before learning imageCropped = imageGrabbed.CropPart((this.CameraAcquisition.CurrentImageHeight / 2) - Properties.Settings.Default.FocusROIRectangleHeight, (this.CameraAcquisition.CurrentImageWidth / 2) - Properties.Settings.Default.FocusROIRectangleWidth, Properties.Settings.Default.FocusROIRectangleWidth * 2, (this.CameraAcquisition.CurrentImageHeight / 2)); // Function Call for sharpness Measurement hv_currentSharpness = MeasureSharpness(imageCropped, hv_Scale); if (hv_currentSharpness > hv_MaxAutoCorrelation + 2) { SetFocusStatusMessage("Current sharpness is more than learnt sharpness. Insert valid calibration doc or Re-do focus learning !!", Color.Orange); currentFMCData.PixelResolution = ""; currentFMCData.MagnificationPercentage = ""; currentFMCData.FocusPercentage = ""; UpdateLabelMessage(currentFMCData.FocusPercentage); return; } else if (hv_currentSharpness > hv_MaxAutoCorrelation) { hv_MaxAutoCorrelation = hv_currentSharpness; hv_AutoCorrelationTuple = hv_AutoCorrelationTuple.TupleConcat(hv_currentSharpness); } hv_focus = 100 - (((hv_MaxAutoCorrelation - hv_currentSharpness) / hv_MaxAutoCorrelation) * 100); hv_focus = hv_focus.TupleRound(); currentFMCData.FocusPercentage = hv_focus.ToString(); if (hv_focus > 100) { // Not Focused currentFMCData.FocusPercentage = ""; SetFocusStatusMessage("Focus learning not done properly", Color.Red); UpdateLabelMessage(currentFMCData.FocusPercentage); } if (hv_focus >= 95 && hv_focus <= 100) { // Focused SetFocusStatusMessage("Focused", Color.LimeGreen); } else if (hv_focus > 70 && hv_focus < 95) { //// Fine Tuning is required SetFocusStatusMessage("Fine Tuning is required", Color.Yellow); } else { // Not Focused SetFocusStatusMessage("Not focused", Color.Red); } #endregion } else if (!focusLearningOver && !focusLearningDone) { if (hv_MaxAutoCorrelation == null) { SetFocusStatusMessage("Focus learning not done", Color.Yellow); } } UpdateLabelMessage(currentFMCData.FocusPercentage); #endregion }
private EdgeSearchingResult CropDomain(HImage image, EdgeSearchingDefinition definition) { var swSearchEdge = new NotifyStopwatch("SearchEdge: " + definition.Name); var esr = new EdgeSearchingResult { Definition = definition.DeepClone(), Name = definition.Name }; var rectImage = image.ChangeDomainForRoiRectangle(definition.Line, definition.ROIWidth); var rectDomain = rectImage.GetDomain(); var rectDomainRect1 = rectDomain.GetSmallestRectangle1(); var rectCroppedImage = rectImage.CropDomain(); if (definition.Domain_SaveCacheImageEnabled) { rectCroppedImage.WriteImageOfTiffLzw( _cacheImageDir + "\\SearchEdges_" + definition.Name + "_1_Domain_Cropped.tif"); } // RegionExtractor HRegion roiDomain; if (esr.Definition.RegionExtractor != null) { var croppedRoiDomain = esr.Definition.RegionExtractor.Extract(rectCroppedImage); roiDomain = croppedRoiDomain.MoveRegion(rectDomainRect1.Row1, rectDomainRect1.Column1); } else { roiDomain = rectDomain; } var roiDomainRect1 = roiDomain.GetSmallestRectangle1(); HImage roiCroppedImage = image.CropRectangle1(roiDomainRect1); // ImageFilter HImage filterImage = null; if (esr.Definition.ImageFilter != null) { var sw = new NotifyStopwatch("ImageFilter"); filterImage = esr.Definition.ImageFilter.Process(roiCroppedImage); sw.Dispose(); if (definition.ImageFilter_SaveCacheImageEnabled) { var cropDomain = filterImage.CropDomain(); cropDomain.WriteImageOfTiffLzw(_cacheImageDir + "\\SearchEdges_" + definition.Name + "_3_ImageFilter_Cropped.tif"); cropDomain.Dispose(); // var paintedImage = filterImage.PaintGrayOffset(image, offsetY, offsetX); // paintedImage.WriteImageOfJpeg(_cacheImageDir + "\\SearchEdges_" + definition.Name + // "_3_ImageFilter_PaintGrayOffset.jpg"); // paintedImage.Dispose(); } } else { filterImage = roiCroppedImage; } Line offsetLine = new Line(esr.Definition.Line.X1 - roiDomainRect1.Column1, esr.Definition.Line.Y1 - roiDomainRect1.Row1, esr.Definition.Line.X2 - roiDomainRect1.Column1, esr.Definition.Line.Y2 - roiDomainRect1.Row1); var line = esr.Definition.LineExtractor.FindLine(filterImage, offsetLine); var translatedLine = new Line(line.X1 + roiDomainRect1.Column1, line.Y1 + roiDomainRect1.Row1, line.X2 + roiDomainRect1.Column1, line.Y2 + roiDomainRect1.Row1); esr.EdgeLine = translatedLine; if (line.IsEmpty) { esr.IsNotFound = true; Debug.WriteLine("Edge not found: " + esr.Name); } swSearchEdge.Dispose(); return(esr); }
private EdgeSearchingResult NoCropDomain(HImage image, EdgeSearchingDefinition definition) { var swSearchEdge = new NotifyStopwatch("SearchEdge: " + definition.Name); var esr = new EdgeSearchingResult { Definition = definition.DeepClone(), Name = definition.Name }; if (esr.Definition.ImageFilter_Disabled) { esr.Definition.ImageFilter = null; } if (esr.Definition.RegionExtractor_Disabled) { esr.Definition.RegionExtractor = null; } var rectImage = HDevelopExport.Singletone.ChangeDomainForRectangle( image, definition.Line, definition.ROIWidth); if (definition.Domain_SaveCacheImageEnabled) { rectImage.WriteImageOfTiffLzwOfCropDomain( _cacheImageDir + "\\SearchEdges_" + definition.Name + "_1_Domain_Cropped.tif"); } // RegionExtractor HImage roiImage = null; if (esr.Definition.RegionExtractor != null) { var rectDomain = rectImage.GetDomain(); HRegion roiDomain; if (!esr.Definition.RegionExtractor_CropDomainEnabled) { var swRegionExtractor = new NotifyStopwatch("EdgeInspector.RegionExtractor: " + definition.Name); roiDomain = esr.Definition.RegionExtractor.Extract(rectImage); swRegionExtractor.Dispose(); if (definition.RegionExtractor_SaveCacheImageEnabled) { rectImage.WriteImageOfTiffLzwOfCropDomain(roiDomain, _cacheImageDir + "\\SearchEdges_" + definition.Name + "_2_ROI.tif"); } roiImage = rectImage.ReduceDomain(roiDomain); rectImage.Dispose(); } else { throw new NotImplementedException(); var domainOffsetRow1 = rectDomain.GetRow1(); var domainOffsetColumn1 = rectDomain.GetColumn1(); var croppedRectImage = rectImage.CropDomain(); var croppedRoiDomain = esr.Definition.RegionExtractor.Extract(croppedRectImage); roiDomain = croppedRoiDomain.MoveRegion(domainOffsetRow1, domainOffsetColumn1); throw new NotImplementedException(); } } else { roiImage = rectImage; } // ImageFilter HImage filterImage = null; if (esr.Definition.ImageFilter != null) { if (!esr.Definition.ImageFilter_CropDomainEnabled) { var swImageFilter = new NotifyStopwatch("EdgeInspector.ImageFilter: " + definition.Name); filterImage = esr.Definition.ImageFilter.Process(roiImage); swImageFilter.Dispose(); roiImage.Dispose(); if (definition.ImageFilter_SaveCacheImageEnabled) { var filterImageDomain = filterImage.GetDomain(); var offsetRow = filterImageDomain.GetRow1(); var offsetColumn = filterImageDomain.GetColumn1(); var cropDomain = filterImage.CropDomain(); cropDomain.WriteImageOfTiffLzw(_cacheImageDir + "\\SearchEdges_" + definition.Name + "_3_ImageFilter_Cropped.tif"); var paintedImage = cropDomain.PaintGrayOffset(image, offsetRow, offsetColumn); paintedImage.WriteImageOfJpeg(_cacheImageDir + "\\SearchEdges_" + definition.Name + "_3_ImageFilter_PaintGrayOffset.jpg"); cropDomain.Dispose(); paintedImage.Dispose(); } } else { throw new NotImplementedException(); var roiDomain = roiImage.GetDomain(); int offsetX = 0; int offsetY = 0; offsetX = roiDomain.GetColumn1(); offsetY = roiDomain.GetRow1(); var croppedImage = roiImage.CropDomain(); var sw = new NotifyStopwatch("ImageFilter"); filterImage = esr.Definition.ImageFilter.Process(croppedImage); sw.Dispose(); if (definition.ImageFilter_SaveCacheImageEnabled) { var cropDomain = filterImage.CropDomain(); cropDomain.WriteImageOfTiffLzw(_cacheImageDir + "\\SearchEdges_" + definition.Name + "_3_ImageFilter_Cropped.tif"); cropDomain.Dispose(); var paintedImage = filterImage.PaintGrayOffset(image, offsetY, offsetX); paintedImage.WriteImageOfJpeg(_cacheImageDir + "\\SearchEdges_" + definition.Name + "_3_ImageFilter_PaintGrayOffset.jpg"); paintedImage.Dispose(); } /* Line offsetLine = new Line(esd.Line.X1 - offsetX, * esd.Line.Y1 - offsetY, esd.Line.X2 - offsetX, * esd.Line.Y2 - offsetY); * * var line = esd.LineExtractor.FindLine(filterImage, offsetLine); * * var translatedLine = new Line(line.X1 + offsetX, * line.Y1 + offsetY, line.X2 + offsetX, * line.Y2 + offsetY); * * esr.EdgeLine = translatedLine; * * if (line.IsEmpty) * { * esr.IsNotFound = true; * Debug.WriteLine("Edge not found: " + esr.Name); * }*/ throw new NotImplementedException(); } } else { filterImage = roiImage; } var swLineExtractor = new NotifyStopwatch("EdgeInspector.LineExtractor: " + definition.Name); var line = definition.LineExtractor.FindLine(filterImage, definition.Line); swLineExtractor.Dispose(); if (line.IsEmpty) { esr.IsNotFound = true; Debug.WriteLine("Edge not found: " + esr.Name); } esr.EdgeLine = line; swSearchEdge.Dispose(); return(esr); }