public void HandleBoolValue(bool valueFromTND) { ((ReadValueFromTNDDefinition)Definition()).TNDReadRequest.RemoveValueListener(mBoolDelegate); ((ReadValueFromTNDDefinition)Definition()).TNDReadRequest.SuggestGoingInActive(); mDataValueInstance.SetValue(valueFromTND); mDataValueInstance.SetIsComplete(); TestExecution().LogMessageWithTimeFromTrigger("TND Read " + Name + " completed (bool)"); }
public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); double result_pixels = -1; double result = -1; if (mReferencePoint1.GetValueAsDouble() < 0 || mReferencePoint2.GetValueAsDouble() < 0) { TestExecution().LogMessage("ERROR: Reference point(s) for '" + Name + "' do not have valid values. ref1=" + mReferencePoint1.GetValueAsDouble() + " ref2=" + mReferencePoint2.GetValueAsDouble()); } else if (mEnsure1Before2 != null && mEnsure1Before2.ValueAsBoolean() && mReferencePoint1.GetValueAsDouble() > mReferencePoint2.GetValueAsDouble()) { TestExecution().LogMessage("ERROR: Reference point(s) for '" + Name + "' are out of order (1 > 2). ref1=" + mReferencePoint1.GetValueAsDouble() + " ref2=" + mReferencePoint2.GetValueAsDouble()); } else { try { result_pixels = Math.Abs(mReferencePoint2.GetValueAsDouble() - mReferencePoint1.GetValueAsDouble()); if (mPixelsPerUnit.ValueAsDecimal() == 0) { TestExecution().LogMessage("ERROR: PixelsPerUnit for '" + Name + "' is zero (0). Unable to use it for conversion."); } else { result = result_pixels / mPixelsPerUnit.ValueAsDecimal(); } } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { } } // end main block ("else" after all initial setup error checks) mDistance.SetValue(result); mDistance.SetIsComplete(); if (mDistance_pixels != null) { mDistance_pixels.SetValue(result_pixels); mDistance_pixels.SetIsComplete(); } DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " computed distance of " + result); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); double result = -1; if (mReferencePoint1.GetValueAsDouble() < 0 || mReferencePoint2.GetValueAsDouble() < 0) { TestExecution().LogErrorWithTimeFromTrigger("Reference point(s) for '" + Name + "' do not have valid values. ref1=" + mReferencePoint1.GetValueAsDouble() + " ref2=" + mReferencePoint2.GetValueAsDouble()); } else if (mKnownDistance.ValueAsDecimal() == 0) { TestExecution().LogErrorWithTimeFromTrigger("KnownDistance for '" + Name + "' is zero (0). Unable to use it for conversion."); } else { try { result = Math.Abs(mReferencePoint2.GetValueAsDouble() - mReferencePoint1.GetValueAsDouble()) / mKnownDistance.ValueAsDecimal(); } catch (Exception e) { TestExecution().LogErrorWithTimeFromTrigger("Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { } } // end main block ("else" after all initial setup error checks) mConversionFactor.SetValue(result); mConversionFactor.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " computed conversion factor of " + result); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); try { if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met. Skipping."); } else { switch (mDestinationValue.Type) { case DataType.Boolean: mDestinationValue.SetValue(mSourceValue.ValueAsBoolean()); break; case DataType.DecimalNumber: mDestinationValue.SetValue(mSourceValue.ValueAsDecimal()); break; case DataType.IntegerNumber: mDestinationValue.SetValue(mSourceValue.ValueAsLong()); break; case DataType.NotDefined: TestExecution().LogErrorWithTimeFromTrigger(Name + " can't copy value since the destination doesn't have its type defined."); /* * switch (mSourceValue.Type) * { * case DataType.Boolean: * mDestinationValue.SetValue(mSourceValue.ValueAsBoolean()); * break; * case DataType.DecimalNumber: * mDestinationValue.SetValue(mSourceValue.ValueAsDecimal()); * break; * case DataType.IntegerNumber: * mDestinationValue.SetValue(mSourceValue.ValueAsLong()); * break; * case DataType.NotDefined: * TestExecution().LogErrorWithTimeFromTrigger(Name + " can't copy value since neither the destination nor the source values have their type defined."); * mDestinationValue.SetValue(mSourceValue.ValueAsDecimal()); * break; * default: * TestExecution().LogErrorWithTimeFromTrigger(Name + " can't copy value since the source is an unsupported type."); * break; * } */ break; default: TestExecution().LogErrorWithTimeFromTrigger(Name + " can't copy value since the destination is an unsupported type."); break; } } } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { } mDestinationValue.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; mIsComplete = true; TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public static readonly int PIXEL_BYTE_WIDTH = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int startX = (int)mStartPoint_X.ValueAsLong(); int startY = (int)mStartPoint_Y.ValueAsLong(); int leftEdge = -1; int rightEdge = -1; int topEdge = -1; int bottomEdge = -1; if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met. Skipping."); } else if (mSourceImage.Bitmap == null) { TestExecution().LogErrorWithTimeFromTrigger("source image for '" + Name + "' does not exist."); } else if (startX < 0 || startX >= mSourceImage.Bitmap.Width || startY < 0 || startY >= mSourceImage.Bitmap.Height) { TestExecution().LogErrorWithTimeFromTrigger("Start position for '" + Name + "' isn't within the image bounds; start=" + startX + "," + startY + "; image size=" + mSourceImage.Bitmap.Width + "x" + mSourceImage.Bitmap.Height); } else { int stepSize = (int)mStepSize.ValueAsLong(); bool detailedSearchAtEnd = mDetailedSearch.ValueAsBoolean(); sourceBitmap = SourceImage.Bitmap; if (mImageToMark != null && mImageToMark.Bitmap != null) { markedBitmap = mImageToMark.Bitmap; } try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PIXEL_FORMAT); } sourceStride = sourceBitmapData.Stride; sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (startY * sourceStride) + (startX * PIXEL_BYTE_WIDTH); // adjust to current point Color theColor = Color.FromArgb(sourcePointer[2], sourcePointer[1], sourcePointer[0]); if (!mColorMatchDefinition.Matches(theColor)) { TestExecution().LogErrorWithTimeFromTrigger(Name + " start position isn't within the match color; start=" + startX + "," + startY + " color=" + theColor); } else { if (mFindBoundingRectangleDefinition.SearchRecord.GetLength(0) < sourceBitmap.Width || mFindBoundingRectangleDefinition.SearchRecord.GetLength(1) < sourceBitmap.Height) { mFindBoundingRectangleDefinition.SearchRecord = new short[sourceBitmap.Width, sourceBitmap.Height]; mFindBoundingRectangleDefinition.LastMarkerUsed = 0; } if (mFindBoundingRectangleDefinition.LastMarkerUsed == int.MaxValue) { for (int x = 0; x < mFindBoundingRectangleDefinition.SearchRecord.GetLength(0); x++) { for (int y = 0; y < mFindBoundingRectangleDefinition.SearchRecord.GetLength(1); y++) { mFindBoundingRectangleDefinition.SearchRecord[x, y] = 0; } } mFindBoundingRectangleDefinition.LastMarkerUsed = 0; } mFindBoundingRectangleDefinition.LastMarkerUsed++; EdgeSearch topEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, startY, -1 * stepSize, 0, startX, 0, sourceBitmap.Width - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed); EdgeSearch bottomEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, startY, +1 * stepSize, sourceBitmap.Height - 1, startX, 0, sourceBitmap.Width - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed); EdgeSearch leftEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, startX, -1 * stepSize, 0, startY, 0, sourceBitmap.Height - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed); EdgeSearch rightEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, startX, +1 * stepSize, sourceBitmap.Width - 1, startY, 0, sourceBitmap.Height - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed); topEdgeSearch.minSideEdge = leftEdgeSearch; topEdgeSearch.maxSideEdge = rightEdgeSearch; bottomEdgeSearch.minSideEdge = leftEdgeSearch; bottomEdgeSearch.maxSideEdge = rightEdgeSearch; leftEdgeSearch.minSideEdge = topEdgeSearch; leftEdgeSearch.maxSideEdge = bottomEdgeSearch; rightEdgeSearch.minSideEdge = topEdgeSearch; rightEdgeSearch.maxSideEdge = bottomEdgeSearch; while (!(topEdgeSearch.Done() && bottomEdgeSearch.Done() && leftEdgeSearch.Done() && rightEdgeSearch.Done())) { if (!topEdgeSearch.Done()) { topEdgeSearch.TestLine(); } if (!bottomEdgeSearch.Done()) { bottomEdgeSearch.TestLine(); } if (!leftEdgeSearch.Done()) { leftEdgeSearch.TestLine(); } if (!rightEdgeSearch.Done()) { rightEdgeSearch.TestLine(); } } if (detailedSearchAtEnd) { //topEdgeSearch.mStep } leftEdge = leftEdgeSearch.lastPosWhereObjectSeen; rightEdge = rightEdgeSearch.lastPosWhereObjectSeen; topEdge = topEdgeSearch.lastPosWhereObjectSeen; bottomEdge = bottomEdgeSearch.lastPosWhereObjectSeen; /* TODO: rectangle decoration? force user to use ROI? * mResultantRay.SetStartX(centerX); * mResultantRay.SetStartY(centerY); * mResultantRay.SetEndX((int)(centerX + outerRadius * Math.Cos(overallRad))); * mResultantRay.SetEndY((int)(centerY + outerRadius * Math.Sin(overallRad))); * mResultantRay.SetIsComplete(); */ } } // end unsafe block } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } // end main block ("else" after all initial setup error checks) mLeftBound.SetValue(leftEdge); mLeftBound.SetIsComplete(); mRightBound.SetValue(rightEdge); mRightBound.SetIsComplete(); mTopBound.SetValue(topEdge); mTopBound.SetIsComplete(); mBottomBound.SetValue(bottomEdge); mBottomBound.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " found bounding rectangle; left=" + leftEdge + " right=" + rightEdge + " top=" + topEdge + " bottom=" + bottomEdge); if (mAutoSave) { try { string filePath = ((FindRadialLineDefinition)Definition()).AutoSavePath; mSourceImage.Save(filePath, Name, true); if (mImageToMark != null) { mImageToMark.Save(filePath, Name, "_marked_" + leftEdge + "_" + rightEdge + "_" + topEdge + "_" + bottomEdge); } TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved"); } catch (ArgumentException e) { Project().Window().logMessage("ERROR: " + e.Message); TestExecution().LogErrorWithTimeFromTrigger(e.Message); } catch (Exception e) { Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full. Low-level message=" + e.Message); TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full."); } } TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); long result = -1; try { if (mEnabled == null) { TestExecution().LogErrorWithTimeFromTrigger("Enabled isn't defined in " + Name); } else if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met. Skipping."); } else if (!mEnabled.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + " disabled. Skipped entry."); } else { string[] messageComponents = mOperatorQueryDefinition.QueryMessage.Split(new char[] { '|' }); if (messageComponents.GetLength(0) != mOperatorQueryDefinition.mValuesToReference.Count + 1) { string msg = Name + " has a mismatch between the QueryMessage and ValuesToReference. " + mOperatorQueryDefinition.mValuesToReference.Count + " values were provided, but " + messageComponents.GetLength(0) + " were expected."; TestExecution().LogErrorWithTimeFromTrigger(msg); } else { string queryMessage = string.Empty; for (int x = 0; x < mOperatorQueryDefinition.mValuesToReference.Count; x++) { queryMessage += messageComponents[x] + TestExecution().DataValueRegistry.GetObject(mOperatorQueryDefinition.mValuesToReference[x].Name).Value; } queryMessage += messageComponents[messageComponents.GetLength(0) - 1]; TestSequence().StopExecutionTimeoutTimer(); if (DialogResult.Yes == MessageBox.Show(queryMessage, "Operator Query", MessageBoxButtons.YesNo)) { result = 1; } else { result = 0; } } } } catch (Exception e) { TestExecution().LogErrorWithTimeFromTrigger("Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { TestSequence().StartExecutionTimeoutTimer(); } mOperatorAnswer.SetValue(result); mOperatorAnswer.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " answer was " + result); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { //if (!mSourceImage.IsComplete() || !AreExplicitDependenciesComplete()) return; TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " started"); Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; if (mMarkedImage != null && sourceBitmap != null) { mMarkedImage.SetImage(new Bitmap(sourceBitmap)); markedBitmap = mMarkedImage.Bitmap; TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings"); } long resultValue = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } const int pixelByteWidth = 4; // determined by PixelFormat.Format32bppArgb int stride = sourceBitmapData.Stride; int strideOffset = stride - (sourceBitmapData.Width * pixelByteWidth); Color color; int pixel1Intensity; int pixel2Intensity; long variation = 0; long threshhold = mVariationThreshhold.ValueAsLong(); Point lastPoint = new Point(-1, -1); Point currentPoint = new Point(-1, -1); if (mTestXAxis) { TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " testing X Axis"); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel1Intensity = (int)(color.GetBrightness() * 100); lastPoint.X = currentPoint.X; lastPoint.Y = currentPoint.Y; mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel2Intensity = (int)(color.GetBrightness() * 100); if (currentPoint.Y == lastPoint.Y) { variation = Math.Abs(pixel2Intensity - pixel1Intensity); if (variation > threshhold) { resultValue += variation; if (mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); markedPointer[3] = mMarkColor.A; markedPointer[2] = mMarkColor.R; markedPointer[1] = mMarkColor.G; markedPointer[0] = mMarkColor.B; } } } pixel1Intensity = pixel2Intensity; lastPoint.X = currentPoint.X; lastPoint.Y = currentPoint.Y; mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } // end unsafe block } if (mTestYAxis) { TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " testing Y Axis"); mROI.GetFirstPointOnYAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel1Intensity = (int)(color.GetBrightness() * 100); lastPoint.X = currentPoint.X; lastPoint.Y = currentPoint.Y; mROI.GetNextPointOnYAxis(mSourceImage, ref currentPoint); while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel2Intensity = (int)(color.GetBrightness() * 100); if (currentPoint.X == lastPoint.X) { variation = Math.Abs(pixel2Intensity - pixel1Intensity); if (variation > threshhold) { resultValue += variation; if (mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); markedPointer[3] = mMarkColor.A; markedPointer[2] = mMarkColor.R; markedPointer[1] = mMarkColor.G; markedPointer[0] = mMarkColor.B; } } } pixel1Intensity = pixel2Intensity; lastPoint.X = currentPoint.X; lastPoint.Y = currentPoint.Y; mROI.GetNextPointOnYAxis(mSourceImage, ref currentPoint); } } // end unsafe block } } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } mResult.SetValue(resultValue); mResult.SetIsComplete(); if (mMarkedImage != null) { mMarkedImage.SetIsComplete(); } TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " completed"); }
public static readonly int PIXEL_BYTE_WIDTH = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int leftEdge = -1; int rightEdge = -1; int topEdge = -1; int bottomEdge = -1; int minObjectHeight = -1; int maxObjectHeight = -1; int minObjectWidth = -1; int maxObjectWidth = -1; double allowedObjectSizeVariation = 0; if (mAllowedSizeVariation != null) { allowedObjectSizeVariation = mAllowedSizeVariation.ValueAsDecimal() / 100.0; } if (mExpectedObjectHeight != null) { minObjectHeight = (int)(mExpectedObjectHeight.ValueAsDecimal() * (1 - allowedObjectSizeVariation)); maxObjectHeight = (int)(mExpectedObjectHeight.ValueAsDecimal() * (1 + allowedObjectSizeVariation)); } if (mExpectedObjectWidth != null) { minObjectWidth = (int)(mExpectedObjectWidth.ValueAsDecimal() * (1 - allowedObjectSizeVariation)); maxObjectWidth = (int)(mExpectedObjectWidth.ValueAsDecimal() * (1 + allowedObjectSizeVariation)); } if (mMinObjectHeight != null) { minObjectHeight = (int)mMinObjectHeight.ValueAsLong(); } if (mMinObjectWidth != null) { minObjectWidth = (int)mMinObjectWidth.ValueAsLong(); } if (mMaxObjectHeight != null) { maxObjectHeight = (int)mMaxObjectHeight.ValueAsLong(); } if (mMaxObjectWidth != null) { maxObjectWidth = (int)mMaxObjectWidth.ValueAsLong(); } if (minObjectHeight < 0) { TestExecution().LogErrorWithTimeFromTrigger("A minimum height for the object hasn't been defined within '" + Name + "'."); } else if (maxObjectHeight < 0) { TestExecution().LogErrorWithTimeFromTrigger("A maximum height for the object hasn't been defined within '" + Name + "'."); } else if (minObjectWidth < 0) { TestExecution().LogErrorWithTimeFromTrigger("A minimum width for the object hasn't been defined within '" + Name + "'."); } else if (maxObjectWidth < 0) { TestExecution().LogErrorWithTimeFromTrigger("A maximum width for the object hasn't been defined within '" + Name + "'."); } else if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met. Skipping."); } else if (mSourceImage.Bitmap == null) { TestExecution().LogErrorWithTimeFromTrigger("Source image for '" + Name + "' does not exist."); } else { int searchXStep = Math.Max(1, (int)(minObjectWidth * 0.3)); int searchYStep = Math.Max(1, (int)(minObjectHeight * 0.3)); int startX = (int)mROI.Left + searchXStep; int startY = (int)mROI.Top + searchYStep; if (startX < 0 || startX >= mSourceImage.Bitmap.Width || startY < 0 || startY >= mSourceImage.Bitmap.Height) { TestExecution().LogErrorWithTimeFromTrigger("Start position for '" + Name + "' isn't within the image bounds; start=" + startX + "," + startY + "; image size=" + mSourceImage.Bitmap.Width + "x" + mSourceImage.Bitmap.Height); } else { int stepSize = (int)mStepSize.ValueAsLong(); bool detailedSearchAtEnd = mDetailedSearch.ValueAsBoolean(); sourceBitmap = SourceImage.Bitmap; if (mImageMarkingEnabled.ValueAsBoolean() && mImageToMark != null && mImageToMark.Bitmap != null) { markedBitmap = mImageToMark.Bitmap; } // TODO: replace LockBits implementation with array pointer try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PIXEL_FORMAT); } sourceStride = sourceBitmapData.Stride; sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; if (mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(0) < sourceBitmap.Width || mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(1) < sourceBitmap.Height) { mFindBlobOfSizeAndColorDefinition.SearchRecord = new short[sourceBitmap.Width, sourceBitmap.Height]; mFindBlobOfSizeAndColorDefinition.LastMarkerUsed = 0; } if (mFindBlobOfSizeAndColorDefinition.LastMarkerUsed == short.MaxValue) { short initialValue = short.MinValue + 1; // we don't use short.MinValue since that is a special case (see ClearSearchRecordArea(); before switching from int to short, we were initializing to 0 here and -1 in ClearSearchRecordArea()) for (int x = 0; x < mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(0); x++) { for (int y = 0; y < mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(1); y++) { mFindBlobOfSizeAndColorDefinition.SearchRecord[x, y] = initialValue; } } mFindBlobOfSizeAndColorDefinition.LastMarkerUsed = 0; } mFindBlobOfSizeAndColorDefinition.LastMarkerUsed++; for (int x = startX; x < ROI.Right && leftEdge < 0; x += searchXStep) { for (int y = startY; y < ROI.Bottom && leftEdge < 0; y += searchYStep) { if (markedBitmap != null) { markedPointer = (byte *)markedBitmapData.Scan0; // init to first byte of image markedPointer += (y * sourceStride) + (x * PIXEL_BYTE_WIDTH); // adjust to current point markedPointer[3] = Color.Lime.A; markedPointer[2] = Color.Lime.R; markedPointer[1] = Color.Lime.G; markedPointer[0] = Color.Lime.B; } bool failed = false; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (y * sourceStride) + (x * PIXEL_BYTE_WIDTH); // adjust to current point Color theColor = Color.FromArgb(sourcePointer[2], sourcePointer[1], sourcePointer[0]); if (mColorMatchDefinition.Matches(theColor)) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": found match at " + x + "," + y + "; beginning search of area"); EdgeSearch topEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, y, -1 * stepSize, mROI.Top, x, mROI.Left, mROI.Right, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed); EdgeSearch bottomEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, y, +1 * stepSize, mROI.Bottom, x, mROI.Left, mROI.Right, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed); EdgeSearch leftEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, x, -1 * stepSize, mROI.Left, y, mROI.Top, mROI.Bottom, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed); EdgeSearch rightEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, x, +1 * stepSize, mROI.Right, y, mROI.Top, mROI.Bottom, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed); topEdgeSearch.minSideEdge = leftEdgeSearch; topEdgeSearch.maxSideEdge = rightEdgeSearch; topEdgeSearch.opposingEdge = bottomEdgeSearch; bottomEdgeSearch.minSideEdge = leftEdgeSearch; bottomEdgeSearch.maxSideEdge = rightEdgeSearch; bottomEdgeSearch.opposingEdge = topEdgeSearch; leftEdgeSearch.minSideEdge = topEdgeSearch; leftEdgeSearch.maxSideEdge = bottomEdgeSearch; leftEdgeSearch.opposingEdge = rightEdgeSearch; rightEdgeSearch.minSideEdge = topEdgeSearch; rightEdgeSearch.maxSideEdge = bottomEdgeSearch; rightEdgeSearch.opposingEdge = leftEdgeSearch; topEdgeSearch.maxSize = maxObjectHeight; bottomEdgeSearch.maxSize = maxObjectHeight; leftEdgeSearch.maxSize = maxObjectWidth; rightEdgeSearch.maxSize = maxObjectWidth; do { if (!topEdgeSearch.Done()) { topEdgeSearch.TestLine(); } if (!bottomEdgeSearch.Done()) { bottomEdgeSearch.TestLine(); } if (!leftEdgeSearch.Done()) { leftEdgeSearch.TestLine(); } if (!rightEdgeSearch.Done()) { rightEdgeSearch.TestLine(); } if (bottomEdgeSearch.lastPosWhereObjectSeen - topEdgeSearch.lastPosWhereObjectSeen > maxObjectHeight) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because y-axis size exceeded; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } if (rightEdgeSearch.lastPosWhereObjectSeen - leftEdgeSearch.lastPosWhereObjectSeen > maxObjectWidth) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because x-axis size exceeded; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } if (rightEdgeSearch.lastPosWhereObjectSeen == mROI.Right) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into right edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } if (leftEdgeSearch.lastPosWhereObjectSeen == mROI.Left) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into left edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } if (topEdgeSearch.lastPosWhereObjectSeen == mROI.Top) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into top edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } if (bottomEdgeSearch.lastPosWhereObjectSeen == mROI.Bottom) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into bottom edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; break; } } while (!(topEdgeSearch.Done() && bottomEdgeSearch.Done() && leftEdgeSearch.Done() && rightEdgeSearch.Done())); if (detailedSearchAtEnd) { //topEdgeSearch.mStep //TODO: finish //TODO: recheck if object too big } if (leftEdgeSearch.abort || rightEdgeSearch.abort || topEdgeSearch.abort || bottomEdgeSearch.abort) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because an edge search aborted (probably ran into an already searched pixel); top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; } if (bottomEdgeSearch.lastPosWhereObjectSeen - topEdgeSearch.lastPosWhereObjectSeen < minObjectHeight) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": excluding object since size too small on y-axis; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; // if the object/blob was too small, then if we re-check one of the pixels during a future search we don't want to abort on the assumption the object must be too big // ...this issue came up in Head Rest's Weld Orrient 9/18/08...in certain cases we would first find a small blob to the left of the weld, but would abort because it was too small...it's edges would be close to, but below the search color boundary. Then we would find the main chunk of the light, which would wrap partially around the small blob (to the right and below)....during it's not-so-smart-but-fast search it would retest a pixel of the small blob and immediately abort...mistakenly assuming it bumped into a previous "too large" blob. // ...it could bump into a previous "too small" blob because of the way to search within the entire bounding rectangle...as a simplified method of catching "U" or "Z" shaped blobs (ie ones that double back) ClearSearchRecordArea(leftEdgeSearch.lastPosWhereObjectSeen, rightEdgeSearch.lastPosWhereObjectSeen, topEdgeSearch.lastPosWhereObjectSeen, bottomEdgeSearch.lastPosWhereObjectSeen); } else if (rightEdgeSearch.lastPosWhereObjectSeen - leftEdgeSearch.lastPosWhereObjectSeen < minObjectWidth) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": excluding object since size too small on x-axis; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); failed = true; // if the object/blob was too small, then if we re-check one of the pixels during a future search we don't want to abort on the assumption the object must be too big // ...this issue came up in Head Rest's Weld Orrient 9/18/08...in certain cases we would first find a small blob to the left of the weld, but would abort because it was too small...it's edges would be close to, but below the search color boundary. Then we would find the main chunk of the light, which would wrap partially around the small blob (to the right and below)....during it's not-so-smart-but-fast search it would retest a pixel of the small blob and immediately abort...mistakenly assuming it bumped into a previous "too large" blob. // ...it could bump into a previous "too small" blob because of the way to search within the entire bounding rectangle...as a simplified method of catching "U" or "Z" shaped blobs (ie ones that double back) ClearSearchRecordArea(leftEdgeSearch.lastPosWhereObjectSeen, rightEdgeSearch.lastPosWhereObjectSeen, topEdgeSearch.lastPosWhereObjectSeen, bottomEdgeSearch.lastPosWhereObjectSeen); } if (!failed) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": selected object bounded by: top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen); leftEdge = leftEdgeSearch.lastPosWhereObjectSeen; rightEdge = rightEdgeSearch.lastPosWhereObjectSeen; topEdge = topEdgeSearch.lastPosWhereObjectSeen; bottomEdge = bottomEdgeSearch.lastPosWhereObjectSeen; } } } } } // end unsafe block } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } } // end main block ("else" after all initial setup error checks) mLeftBound.SetValue(leftEdge); mLeftBound.SetIsComplete(); mRightBound.SetValue(rightEdge); mRightBound.SetIsComplete(); mTopBound.SetValue(topEdge); mTopBound.SetIsComplete(); mBottomBound.SetValue(bottomEdge); mBottomBound.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; if (leftEdge < 0) { TestExecution().LogMessageWithTimeFromTrigger(Name + " FAILED TO FIND BLOB"); } if (mAutoSave) { try { string filePath = ((FindRadialLineDefinition)Definition()).AutoSavePath; mSourceImage.Save(filePath, Name, true); if (mImageToMark != null) { mImageToMark.Save(filePath, Name, "_marked_" + leftEdge + "_" + rightEdge + "_" + topEdge + "_" + bottomEdge); } TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved"); } catch (ArgumentException e) { Project().Window().logMessage("ERROR: " + e.Message); TestExecution().LogErrorWithTimeFromTrigger(e.Message); } catch (Exception e) { Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full. Low-level message=" + e.Message); TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full."); } } TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { // NOTE: this code was adapted from FindObjectCenterOnLine; one difference is that it searched until it found the object, then quit. We search the entire path DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int resultX = -1; int resultY = -1; if (mSourceImage.Bitmap == null) { TestExecution().LogErrorWithTimeFromTrigger("source image for '" + Name + "' does not exist."); } else if (mSearchPath == null) { TestExecution().LogErrorWithTimeFromTrigger("search line for '" + Name + "' isn't defined."); } else if (mSearchPath.StartX == null || mSearchPath.StartY == null || mSearchPath.EndX == null || mSearchPath.EndY == null) { TestExecution().LogErrorWithTimeFromTrigger("search line '" + mSearchPath.Name + "' for '" + Name + "' isn't fully defined."); } else if (mSearchPath.StartX.ValueAsLong() < 0 || mSearchPath.StartX.ValueAsLong() >= mSourceImage.Bitmap.Width || mSearchPath.StartY.ValueAsLong() < 0 || mSearchPath.StartY.ValueAsLong() >= mSourceImage.Bitmap.Height) { TestExecution().LogErrorWithTimeFromTrigger("The search line start point for '" + Name + "' isn't valid: " + mSearchPath.StartX.ValueAsLong() + "," + mSearchPath.StartY.ValueAsLong()); } else if (mSearchPath.EndX.ValueAsLong() < 0 || mSearchPath.EndX.ValueAsLong() >= mSourceImage.Bitmap.Width || mSearchPath.EndY.ValueAsLong() < 0 || mSearchPath.EndY.ValueAsLong() >= mSourceImage.Bitmap.Height) { TestExecution().LogErrorWithTimeFromTrigger("The search line end point for '" + Name + "' isn't valid: " + mSearchPath.EndX.ValueAsLong() + "," + mSearchPath.EndY.ValueAsLong()); } else if (Math.Abs(mSearchPath.StartX.ValueAsLong() - mSearchPath.EndX.ValueAsLong()) < 1 && Math.Abs(mSearchPath.StartY.ValueAsLong() - mSearchPath.EndY.ValueAsLong()) < 1) { TestExecution().LogErrorWithTimeFromTrigger("Search path is too small."); } else { long width = Math.Abs(mSearchPath.StartX.ValueAsLong() - mSearchPath.EndX.ValueAsLong()) + 1; long height = Math.Abs(mSearchPath.StartY.ValueAsLong() - mSearchPath.EndY.ValueAsLong()) + 1; double run; double rise; double angle; double sineOfAngle = -1; double cosineOfAngle = -1; LineType lineType; long length; startX = mSearchPath.StartX.ValueAsLong(); startY = mSearchPath.StartY.ValueAsLong(); endX = mSearchPath.EndX.ValueAsLong(); endY = mSearchPath.EndY.ValueAsLong(); if (mSearchPath.StartY.ValueAsLong() == mSearchPath.EndY.ValueAsLong()) // if it is horizonal line (no Y deviation) { lineType = LineType.Horizontal; length = width; ySearchChange = 0; if (mSearchPath.StartX.ValueAsLong() < mSearchPath.EndX.ValueAsLong()) { xSearchChange = 1; } else { xSearchChange = -1; } } else if (mSearchPath.StartX.ValueAsLong() == mSearchPath.EndX.ValueAsLong()) // if it is vertical line (no X deviation) { lineType = LineType.Vertical; length = height; xSearchChange = 0; if (mSearchPath.StartY.ValueAsLong() < mSearchPath.EndY.ValueAsLong()) // line is down { ySearchChange = 1; } else // line is up { ySearchChange = -1; } } else // slanted line { run = mSearchPath.EndX.ValueAsLong() - mSearchPath.StartX.ValueAsLong(); rise = mSearchPath.EndY.ValueAsLong() - mSearchPath.StartY.ValueAsLong(); angle = Math.Atan(rise / run); sineOfAngle = Math.Sin(angle); cosineOfAngle = Math.Cos(angle); lineType = LineType.Slanted; length = (long)Math.Sqrt(height * height + width * width); } leftEdgeOfSearch = Math.Max(0, Math.Min(startX, endX)); rightEdgeOfSearch = Math.Min(mSourceImage.Bitmap.Width, Math.Max(startX, endX)); topEdgeOfSearch = Math.Max(0, Math.Min(startY, endY)); bottomEdgeOfSearch = Math.Min(mSourceImage.Bitmap.Height, Math.Max(startY, endY)); x = (int)startX; y = (int)startY; TestExecution().LogMessage(Name + " starting at " + x + "," + y); abort = false; Point firstPoint = new Point(-1, -1); Point lastPoint = new Point(-1, -1); for (int searchIndex = 0; searchIndex <= length && !abort; searchIndex++) { switch (lineType) { case LineType.Horizontal: x = (int)(startX + (searchIndex * xSearchChange)); break; case LineType.Vertical: y = (int)(startY + (searchIndex * ySearchChange)); break; case LineType.Slanted: x = (int)(startX + Math.Round(searchIndex * cosineOfAngle)); y = (int)(startY + Math.Round(searchIndex * sineOfAngle)); break; } if (x < leftEdgeOfSearch || x > rightEdgeOfSearch || y < topEdgeOfSearch || y > bottomEdgeOfSearch) { TestExecution().LogErrorWithTimeFromTrigger(Name + " aborting at " + x + "," + y + ". Out of range from path for some reason."); abort = true; } else { pixelColor = mSourceImage.GetColor(x, y); if (mSearchColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage(Name + " found color match at " + x + "," + y); if (firstPoint.X < 0) { firstPoint.X = x; firstPoint.Y = y; } lastPoint.X = x; lastPoint.Y = y; } else { //mSourceImage.SetColor(x, y, Color.Lime); //TestExecution().LogMessage(Name + " " + x + "," + y + " is not a match"); } } // end if for x,y verification } // end search loop resultX = (firstPoint.X + lastPoint.X) / 2; resultY = (firstPoint.Y + lastPoint.Y) / 2; } // end main block ("else" after all initial setup error checks) mResultX.SetValue(resultX); mResultY.SetValue(resultY); mResultX.SetIsComplete(); mResultY.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessage(Name + " computed object center at " + resultX + "," + resultY); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { //if (!mSourceImage.IsComplete() || !mROI.IsComplete() || !AreExplicitDependenciesComplete()) return; DateTime startTime = DateTime.Now; Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; TestExecution().LogMessageWithTimeFromTrigger("ColorMatchCount " + Name + " started"); // All ROI are taken at 640x480 // We need to scale ROIs to the actual image dimensions // RectangleROI rectangleROI = ScaleROI(roi.NetworkCamera().Resolution); // RectangleROI_old rectangleROI = Project().FindCamera(Camera).GetROI(TestExecution(),ROI); if (mCreateMarkedImage && mImageToMark != null && mImageToMark.Bitmap != null) { markedBitmap = mImageToMark.Bitmap; } mMatchCount = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } const int pixelByteWidth = 4; // determined by PixelFormat.Format32bppArgb int stride = sourceBitmapData.Stride; int strideOffset = stride - (sourceBitmapData.Width * pixelByteWidth); int bottom = Math.Min(sourceBitmap.Height - 1, ROI.Bottom); int top = Math.Max(0, ROI.Top); int left = Math.Max(0, ROI.Left); int right = Math.Min(sourceBitmap.Width - 1, ROI.Right); Color color; unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; for (int j = top; j <= bottom; j++) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (j * stride) + (left * pixelByteWidth); // adjust to first byte of ROI for (int i = left; i <= right; i++) { color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha if (mColorMatcher.Matches(color)) { mMatchCount++; if (markedBitmap != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (j * stride) + (i * pixelByteWidth); markedPointer[3] = Color.Magenta.A; markedPointer[2] = Color.Magenta.R; markedPointer[1] = Color.Magenta.G; markedPointer[0] = Color.Magenta.B; } } sourcePointer += pixelByteWidth; // adjust to next pixel to the right } //sourcePointer += (((width-right) * pixelByteWidth) + strideOffset + (left * pixelByteWidth)); // adjust to the first pixel of the next row by skipping the "extra bytes" (stride offset) } } // end unsafe block } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } mResult.SetValue(mMatchCount); mResult.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " took " + computeTime.TotalMilliseconds + "ms"); //MessageBox.Show("done in color count for " + Name); }
public override void DoWork() { /* * if (!mStartX.IsComplete() || * !mStartY.IsComplete() || * !mSearchBackgroundColorDefinition.IsComplete() || * !mFollowingEdgeColorDefinition.IsComplete() || * !mTargetEdgeColorDefinition.IsComplete() || * !mTargetEdgeWidth.IsComplete() || * !mSourceImage.IsComplete() || * !AreExplicitDependenciesComplete() * ) return;*/ //MessageBox.Show("in find corner for " + Name); DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int resultX = -1; int resultY = -1; if (mSourceImage.Bitmap == null) { TestExecution().LogMessage("ERROR: source image does not exist."); } else if (mStartX.ValueAsLong() < 0 || mStartX.ValueAsLong() >= mSourceImage.Bitmap.Width || mStartY.ValueAsLong() < 0 || mStartY.ValueAsLong() >= mSourceImage.Bitmap.Height) { TestExecution().LogMessage("ERROR: The search start point isn't valid: " + mStartX.ValueAsLong() + "," + mStartY.ValueAsLong()); } else { switch (mSearchDirection) { case Direction.Left: xSearchChange = -1; ySearchChange = 0; break; case Direction.Right: xSearchChange = 1; ySearchChange = 0; break; case Direction.Up: xSearchChange = 0; ySearchChange = -1; break; case Direction.Down: xSearchChange = 0; ySearchChange = 1; break; case Direction.NotDefined: TestExecution().LogMessage("ERROR: Search direction not defined."); abort = true; break; default: TestExecution().LogMessage("ERROR: Unsupported Search direction; direction=" + mSearchDirection); abort = true; break; } switch (mTargetEdgeDirection) { case Direction.Left: xStepAwayChange = -1; yStepAwayChange = 0; break; case Direction.Right: xStepAwayChange = 1; yStepAwayChange = 0; break; case Direction.Up: xStepAwayChange = 0; yStepAwayChange = -1; break; case Direction.Down: xStepAwayChange = 0; yStepAwayChange = 1; break; case Direction.NotDefined: TestExecution().LogMessage("ERROR: Target Edge Direction not defined."); abort = true; break; default: TestExecution().LogMessage("ERROR: Unsupported Target Edge Direction; direction=" + mTargetEdgeDirection); abort = true; break; } x = (int)mStartX.ValueAsLong(); y = (int)mStartY.ValueAsLong(); pixelColor = mSourceImage.Bitmap.GetPixel(x, y); if (!mFollowingEdgeColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage("ERROR: Start position isn't on the following edge."); abort = true; // TODO: try to find it by search in the opposite direction of the target edge for about 5-10 pixels. } lastXOnFollowingEdge = x; lastYOnFollowingEdge = y; searchDistFromFollowingEdge = Math.Min(10, (int)(mTargetEdgeWidth.ValueAsLong() * 0.75)); searchLength = searchDistFromFollowingEdge; maxSearchLength = searchLength; // this will be updated with the slope int searchIndex = 0; TestExecution().LogMessage("Starting at " + x + "," + y + " searchLength=" + searchLength + " searchDistFromFollowingEdge=" + searchDistFromFollowingEdge + " xStepAwayChange=" + xStepAwayChange + " yStepAwayChange=" + yStepAwayChange + " xSearchChange=" + xSearchChange + " ySearchChange=" + ySearchChange); abort = false; foundTarget = false; while (!foundTarget && !abort && x >= 0 && x < mSourceImage.Bitmap.Width && y >= 0 && y < mSourceImage.Bitmap.Height) { // LineDecorationInstance searchPath = new LineDecorationInstance(theDefinition.SearchPath, testExecution); // move away from following edge and one pixel along it to start searching for the target edge. We want to stay far enough away from the following edge so that we don't accidently run into it if it isn't not exactly parallel. x = lastXOnFollowingEdge + xSearchChange + (xStepAwayChange * searchDistFromFollowingEdge); y = lastYOnFollowingEdge + ySearchChange + (yStepAwayChange * searchDistFromFollowingEdge); // move a certain distance (searchLength) along the Following Edge in search of the Target Edge searchIndex = 0; int consecutiveUnexpectedColors = 0; int totalUnexpectedColors = 0; while (!foundTarget && !abort && searchIndex < searchLength && x >= 0 && x < mSourceImage.Bitmap.Width && y >= 0 && y < mSourceImage.Bitmap.Height) { pixelColor = SourceImage.Bitmap.GetPixel(x, y); if (mTargetEdgeColorDefinition.Matches(pixelColor)) { consecutiveUnexpectedColors = 0; TestExecution().LogMessage("Found target at x=" + x + " y=" + y); foundTarget = true; } else if (mFollowingEdgeColorDefinition.Matches(pixelColor)) { // NOTE: must test this after the TargettedEdgeColor since these may be the exact same color defs! TestExecution().LogMessage("ERROR: Unexpectedly ran into Following Edge."); abort = true; // TODO: does it also match the background? if so, throw an error about definition overlap // TODO: handle differently if slope computed and we were near following edge? } else { if (!mSearchBackgroundColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage("WARNING: Ran into unexpected color at " + x + "," + y + "."); totalUnexpectedColors++; consecutiveUnexpectedColors++; } else { consecutiveUnexpectedColors = 0; } if (consecutiveUnexpectedColors > 2 || totalUnexpectedColors > 10) { abort = true; TestExecution().LogMessage("ERROR: aborting due to too many unexpected colors; consecutive=" + consecutiveUnexpectedColors + " total=" + totalUnexpectedColors); } else { x += xSearchChange; y += ySearchChange; searchIndex++; } } } if (!foundTarget && !abort) { TestExecution().LogMessage("Finished search stint at x=" + x + " y=" + y + "; search length=" + searchLength + " max=" + maxSearchLength); ReFindFollowingEdge(); if (foundFollowingEdge) { ComputeSlope(); } } } if (foundTarget) { const int backupDistance = 5; // back away 5 pixels from the Target Edge and then re-find the following edge. From there we will estimate the corner x -= xSearchChange * backupDistance; y -= ySearchChange * backupDistance; ReFindFollowingEdge(); if (foundFollowingEdge) { ComputeSlope(); } else { TestExecution().LogMessage("WARNING: couldn't find Following Edge " + backupDistance + " pixel away from Target Edge"); } if (numSummedSlopes > 0) { resultX = (int)(lastXOnFollowingEdge + backupDistance * xSearchChange + bigSlope * backupDistance * xStepAwayChange); resultY = (int)(lastYOnFollowingEdge + backupDistance * ySearchChange + bigSlope * backupDistance * yStepAwayChange); } else { resultX = lastXOnFollowingEdge + backupDistance * xSearchChange; resultY = lastYOnFollowingEdge + backupDistance * ySearchChange; } } else { TestExecution().LogMessage("ERROR: Couldn't find Target Edge. x=" + x + " y=" + y); abort = true; } } mResultX.SetValue(resultX); mResultY.SetValue(resultY); mResultX.SetIsComplete(); mResultY.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessage("Corner at x=" + resultX + " y=" + resultY); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); // mSearchEndX.SetIsComplete(); // mSearchEndY.SetIsComplete(); //MessageBox.Show("done in find corner for " + Name); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " started"); Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; PatternMatchOfGrayValueDefinition theDef = (PatternMatchOfGrayValueDefinition)Definition(); if (theDef.mPatternMinValues == null || theDef.mPatternMaxValues == null) { theDef.LoadPatterns(false); } Bitmap patternMinValues = theDef.mPatternMinValues; Bitmap patternMaxValues = theDef.mPatternMaxValues; if (patternMinValues == null || patternMaxValues == null) { throw new ArgumentException("Pattern to match isn't defined."); } if (mMarkedImage != null && sourceBitmap != null) { mMarkedImage.SetImage(new Bitmap(sourceBitmap)); markedBitmap = mMarkedImage.Bitmap; TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings"); } long score = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; BitmapData patternMinValuesBitmapData = null; BitmapData patternMaxValuesBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayValueDefinition.TRAINING_PIXEL_FORMAT); patternMinValuesBitmapData = patternMinValues.LockBits(new Rectangle(0, 0, patternMinValues.Width, patternMinValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayValueDefinition.PATTERN_PIXEL_FORMAT); patternMaxValuesBitmapData = patternMaxValues.LockBits(new Rectangle(0, 0, patternMaxValues.Width, patternMaxValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayValueDefinition.PATTERN_PIXEL_FORMAT); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } int sourceStride = sourceBitmapData.Stride; int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PatternMatchOfGrayValueDefinition.TRAINING_PIXEL_BYTE_WIDTH); int patternStride = patternMinValuesBitmapData.Stride; int patternStrideOffset = patternStride - (patternMinValuesBitmapData.Width * PatternMatchOfGrayValueDefinition.PATTERN_PIXEL_BYTE_WIDTH); Color color; int grayValue; long variation = 0; long patternWindow = 0; long threshhold = mVariationThreshhold.ValueAsLong(); double sloppiness = mSloppiness.ValueAsDecimal() / 100.0; long minWindow = Math.Max(1, mMinWindow.ValueAsLong()); double brightPixelFactor = mBrightPixelFactor.ValueAsDecimal(); double darkPixelFactor = mDarkPixelFactor.ValueAsDecimal(); bool needToMark = false; long scoreChange = 0; Point currentPoint = new Point(-1, -1); TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing X Axis"); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; byte *patternMinValuesPointer; byte *patternMaxValuesPointer; while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayValueDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha grayValue = (int)(0.3 * color.R + 0.59 * color.G + 0.11 * color.B); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). // http://www.bobpowell.net/grayscale.htm // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1 patternMinValuesPointer = (byte *)patternMinValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayValueDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point patternMaxValuesPointer = (byte *)patternMaxValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayValueDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point patternWindow = patternMaxValuesPointer[0] - patternMinValuesPointer[0]; // give tight windows more weight in the score patternWindow = Math.Max(minWindow, patternWindow); // ensure minWindow>0 to prevent divideBy0 if (patternWindow > threshhold) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayValueDefinition.TRAINING_PIXEL_BYTE_WIDTH); markedPointer[3] = Color.Yellow.A; markedPointer[2] = Color.Yellow.R; markedPointer[1] = Color.Yellow.G; markedPointer[0] = Color.Yellow.B; } else { if (grayValue < patternMinValuesPointer[0] - sloppiness * patternWindow) { variation = patternMinValuesPointer[0] - grayValue; //scoreChange = (long)(((variation / patternWindow) + 1) * darkPixelFactor); scoreChange = (long)(variation * ((variation / (patternWindow / 2)) + 1) * darkPixelFactor); score += scoreChange; needToMark = true; TestExecution().LogMessage("Pattern Match score event: " + currentPoint.X + "," + currentPoint.Y + " dark spot score=" + scoreChange + " gray=" + grayValue + " min=" + patternMinValuesPointer[0] + " max=" + patternMaxValuesPointer[0] + " window=" + patternWindow + " var=" + variation); } else if (grayValue > patternMaxValuesPointer[0] + sloppiness * patternWindow) { variation = grayValue - patternMaxValuesPointer[0]; //scoreChange = (long)(((variation / patternWindow) + 1) * brightPixelFactor); scoreChange = (long)(variation * ((variation / (patternWindow / 2)) + 1) * brightPixelFactor); score += scoreChange; needToMark = true; TestExecution().LogMessage("Pattern Match score event: " + currentPoint.X + "," + currentPoint.Y + " bright spot score=" + scoreChange + " gray=" + grayValue + " min=" + patternMinValuesPointer[0] + " max=" + patternMaxValuesPointer[0] + " window=" + patternWindow + " var=" + variation); } else { needToMark = false; } if (needToMark && mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayValueDefinition.TRAINING_PIXEL_BYTE_WIDTH); markedPointer[3] = mMarkColor.A; markedPointer[2] = mMarkColor.R; markedPointer[1] = mMarkColor.G; markedPointer[0] = mMarkColor.B; } } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } // end unsafe block } finally { sourceBitmap.UnlockBits(sourceBitmapData); patternMinValues.UnlockBits(patternMinValuesBitmapData); patternMaxValues.UnlockBits(patternMaxValuesBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } mResult.SetValue(score); mResult.SetIsComplete(); if (mMarkedImage != null) { mMarkedImage.SetIsComplete(); } TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " completed; score=" + score); }
public static readonly int PIXEL_BYTE_WIDTH = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm public override void DoWork() { /* TODO: OPTIMIZATIONS: * - compute surfaceNoiseLevel based on image analysis * - make debug output to log optional * - surface/transition decorations (biggest problem is that there can be a variable number...only for first edge to start?) * - for marked image, save decorations...don't copy/paint_on image */ DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int resultX = -1; int resultY = -1; if (mSourceImage.Bitmap == null) { TestExecution().LogMessage("ERROR: source image for '" + Name + "' does not exist."); } else { Bitmap sourceBitmap = SourceImage.Bitmap; BitmapData sourceBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT); int sourceStride = sourceBitmapData.Stride; int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH); int brightnessThreshold = (int)mBrightnessThreshold.ValueAsLong(); Point currentPoint = new Point(-1, -1); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); ValueGrouper xGrouper = new ValueGrouper(0, 255, 50); ValueGrouper yGrouper = new ValueGrouper(0, 255, 50); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PIXEL_BYTE_WIDTH); // adjust to current point pixelGrayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). // http://www.bobpowell.net/grayscale.htm // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1 //TestExecution().LogMessage(currentPoint.X + "," + currentPoint.Y + " " + pixelGrayValue + " " + brightnessThreshold); if (pixelGrayValue >= brightnessThreshold) { xGrouper.AddValue(currentPoint.X); yGrouper.AddValue(currentPoint.Y); } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] finished analyzing pixels"); } // end unsafe block for (int z = 0; z < xGrouper.NumGroups; z++) { ValueGrouper.GroupStats groupStats = xGrouper.GetGroup(z); TestExecution().LogMessage(groupStats.start + " " + groupStats.end + " " + groupStats.count + " " + groupStats.Average()); } for (int z = 0; z < yGrouper.NumGroups; z++) { ValueGrouper.GroupStats groupStats = yGrouper.GetGroup(z); TestExecution().LogMessage(groupStats.start + " " + groupStats.end + " " + groupStats.count + " " + groupStats.Average()); } ValueGrouper.GroupStats biggestXGroup = xGrouper.BiggestGroupWithNeighbors(); if (biggestXGroup != null) { resultX = biggestXGroup.Average(); } ValueGrouper.GroupStats biggestYGroup = yGrouper.BiggestGroupWithNeighbors(); if (biggestXGroup != null) { resultY = biggestYGroup.Average(); } } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { sourceBitmap.UnlockBits(sourceBitmapData); } } // end main block ("else" after all initial setup error checks) mBrightSpot_X.SetValue(resultX); mBrightSpot_Y.SetValue(resultY); mBrightSpot_X.SetIsComplete(); mBrightSpot_Y.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " computed bright spot at " + resultX + "," + resultY); if (mAutoSave) { try { string filePath = ((FindBrightestSpotDefinition)Definition()).AutoSavePath; mSourceImage.Save(filePath, Name, true); TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved"); } catch (ArgumentException e) { Project().Window().logMessage("ERROR: " + e.Message); TestExecution().LogErrorWithTimeFromTrigger(e.Message); } catch (Exception e) { Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full. Low-level message=" + e.Message); TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full."); } } TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { if (mCollectImages) { try { mSourceImage.Save(((PatternMatchOfGrayVariationDefinition)Definition()).LearningPath, Name, true); TestExecution().LogMessageWithTimeFromTrigger(Name + " collected image in learning folder. Skipping test."); } catch (ArgumentException e) { Project().Window().logMessage("ERROR: " + e.Message); TestExecution().LogErrorWithTimeFromTrigger(e.Message); } catch (Exception e) { string errMsg = "Unable to save collected image. Ensure path valid and disk not full."; Project().Window().logMessage("ERROR: " + errMsg + " Low-level message=" + e.Message); TestExecution().LogErrorWithTimeFromTrigger(errMsg); } mResult.SetValue(score); mResult.SetIsComplete(); return; } TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " started"); /* * if (Definition(.ScoreFilter != null) * { * mScoreFilter = testExecution.GetScoreFilter(theDefinition.ScoreFilter.Name); * } */ Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; PatternMatchOfGrayVariationDefinition theDef = (PatternMatchOfGrayVariationDefinition)Definition(); if (theDef.mPatternMinDownValues == null || theDef.mPatternMaxDownValues == null) { theDef.LoadPatterns(false); } Bitmap patternMinDownValues = theDef.mPatternMinDownValues; Bitmap patternMaxDownValues = theDef.mPatternMaxDownValues; Bitmap patternMinUpValues = theDef.mPatternMinUpValues; Bitmap patternMaxUpValues = theDef.mPatternMaxUpValues; Bitmap patternMinRightValues = theDef.mPatternMinRightValues; Bitmap patternMaxRightValues = theDef.mPatternMaxRightValues; Bitmap patternMinLeftValues = theDef.mPatternMinLeftValues; Bitmap patternMaxLeftValues = theDef.mPatternMaxLeftValues; if (patternMinDownValues == null || patternMaxDownValues == null) { throw new ArgumentException("Pattern to match isn't defined."); } if (mMarkedImage != null && sourceBitmap != null) { mMarkedImage.SetImage(new Bitmap(sourceBitmap)); markedBitmap = mMarkedImage.Bitmap; TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings"); } score = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData patternMinDownValuesBitmapData = null; BitmapData patternMaxDownValuesBitmapData = null; BitmapData patternMinUpValuesBitmapData = null; BitmapData patternMaxUpValuesBitmapData = null; BitmapData patternMinRightValuesBitmapData = null; BitmapData patternMaxRightValuesBitmapData = null; BitmapData patternMinLeftValuesBitmapData = null; BitmapData patternMaxLeftValuesBitmapData = null; if (mScoreFilter != null) { mScoreFilter.SetImageSize(mSourceImage.Bitmap.Width, mSourceImage.Bitmap.Height); } try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_FORMAT); patternMinDownValuesBitmapData = patternMinDownValues.LockBits(new Rectangle(0, 0, patternMinDownValues.Width, patternMinDownValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMaxDownValuesBitmapData = patternMaxDownValues.LockBits(new Rectangle(0, 0, patternMaxDownValues.Width, patternMaxDownValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMinUpValuesBitmapData = patternMinUpValues.LockBits(new Rectangle(0, 0, patternMinUpValues.Width, patternMinUpValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMaxUpValuesBitmapData = patternMaxUpValues.LockBits(new Rectangle(0, 0, patternMaxUpValues.Width, patternMaxUpValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMinRightValuesBitmapData = patternMinRightValues.LockBits(new Rectangle(0, 0, patternMinRightValues.Width, patternMinRightValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMaxRightValuesBitmapData = patternMaxRightValues.LockBits(new Rectangle(0, 0, patternMaxRightValues.Width, patternMaxRightValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMinLeftValuesBitmapData = patternMinLeftValues.LockBits(new Rectangle(0, 0, patternMinLeftValues.Width, patternMinLeftValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMaxLeftValuesBitmapData = patternMaxLeftValues.LockBits(new Rectangle(0, 0, patternMaxLeftValues.Width, patternMaxLeftValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } sourceStride = sourceBitmapData.Stride; int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); int patternStride = patternMinDownValuesBitmapData.Stride; int patternStrideOffset = patternStride - (patternMinDownValuesBitmapData.Width * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); //Color color; int grayValue; int grayValue2; threshhold = mVariationThreshhold.ValueAsLong(); sloppiness = mSloppiness.ValueAsDecimal() / 100.0; minWindow = Math.Max(1, mMinWindow.ValueAsLong()); scoreThreshold = mScoreThreshold.ValueAsLong(); brightPixelFactor = mBrightPixelFactor.ValueAsDecimal(); darkPixelFactor = mDarkPixelFactor.ValueAsDecimal(); int varSum; int minVarForThisPixel; int maxVarForThisPixel; Point currentPoint = new Point(-1, -1); int lastX = -1; int lastY = -1; int[] variationArray = new int[PatternMatchOfGrayVariationDefinition.PixelsPerTest]; int positionsUntested = 0; mFirstAxisScores = new long[sourceBitmap.Width, sourceBitmap.Height]; unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *sourcePointer2; byte *patternMinValuesPointer; byte *patternMaxValuesPointer; TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing Y Axis"); mROI.GetFirstPointOnYAxis(mSourceImage, ref currentPoint); while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point //color = Color.FromArgb(sourcePointer[3], , , ); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha grayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). // http://www.bobpowell.net/grayscale.htm // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1 // check pixel above sourcePointer2 = sourcePointer - sourceStride; // TODO: ensure y>0 grayValue2 = (int)(0.3 * sourcePointer2[2] + 0.59 * sourcePointer2[1] + 0.11 * sourcePointer2[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). varSum = grayValue - grayValue2; // NOTE: using '=' to init varSum for this pixel if (currentPoint.X != lastX || currentPoint.Y != lastY + 1) { // init variationArray for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i) { variationArray[i] = PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED; } if (positionsUntested > 0) { // TODO: if this isn't 0, then mark untested pixels a certain color? // this should only happen when the ROI is less than PixelsPerTest high at a particular X value TestExecution().LogMessageWithTimeFromTrigger("WARNING: " + positionsUntested + " pixels were not tested above " + lastX + "," + lastY); } positionsUntested = 0; } // shift variationArray for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1; ++i) { variationArray[i] = variationArray[i + 1]; } // store most recent value variationArray[PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1] = varSum; if (variationArray[0] == PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED) { positionsUntested++; } else { int variationSum = 0; // compute sum variation over X pixel transitions for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i) { variationSum += variationArray[i]; } variationSum = Math.Max(-127, Math.Min(128, variationSum)); // make sure we stay within 1 byte (0..255) // test pixel patternMinValuesPointer = (byte *)patternMinDownValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point minVarForThisPixel = patternMinValuesPointer[0] - 127; patternMaxValuesPointer = (byte *)patternMaxDownValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point maxVarForThisPixel = patternMaxValuesPointer[0] - 127; TestPixel(currentPoint.X, currentPoint.Y, variationSum, minVarForThisPixel, maxVarForThisPixel, true); if (positionsUntested > 0) { // if we missed testing a pixel above us (because it was near an ROI or image top edge where there weren't pixels above it to compute from), we test them here computing in the opposite direction (up values vs down values) // current pixel - PixelsPerTest = -variationSum int testPositionY = currentPoint.Y - PatternMatchOfGrayVariationDefinition.PixelsPerTest; if (testPositionY < 0) { throw new ArgumentException("Fatal logic error in test 93420rf"); } patternMinValuesPointer = (byte *)patternMinUpValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (testPositionY * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point minVarForThisPixel = patternMinValuesPointer[0] - 127; patternMaxValuesPointer = (byte *)patternMaxUpValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (testPositionY * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point maxVarForThisPixel = patternMaxValuesPointer[0] - 127; TestPixel(currentPoint.X, testPositionY, -variationSum, minVarForThisPixel, maxVarForThisPixel, true); positionsUntested--; } } lastX = currentPoint.X; lastY = currentPoint.Y; mROI.GetNextPointOnYAxis(mSourceImage, ref currentPoint); } TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing X Axis"); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point //color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha grayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). // http://www.bobpowell.net/grayscale.htm // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1 // check pixel behind sourcePointer2 = sourcePointer - PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH; // TODO: ensure y>0 grayValue2 = (int)(0.3 * sourcePointer2[2] + 0.59 * sourcePointer2[1] + 0.11 * sourcePointer2[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). varSum = grayValue - grayValue2; // NOTE: using '=' to init varSum for this pixel if (currentPoint.Y != lastY || currentPoint.X != lastX + 1) { // init variationArray for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i) { variationArray[i] = PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED; } if (positionsUntested > 0) { // TODO: if this isn't 0, then mark untested pixels a certain color? // this should only happen when the ROI is less than PixelsPerTest high at a particular X value TestExecution().LogMessageWithTimeFromTrigger("WARNING: " + positionsUntested + " pixels were not tested behind " + lastX + "," + lastY); } positionsUntested = 0; } // shift variationArray for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1; ++i) { variationArray[i] = variationArray[i + 1]; } // store most recent value variationArray[PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1] = varSum; if (variationArray[0] == PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED) { positionsUntested++; } else { int variationSum = 0; // compute sum variation over X pixel transitions for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i) { variationSum += variationArray[i]; } variationSum = Math.Max(-127, Math.Min(128, variationSum)); // make sure we stay within 1 byte (0..255) // test pixel patternMinValuesPointer = (byte *)patternMinDownValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point minVarForThisPixel = patternMinValuesPointer[0] - 127; patternMaxValuesPointer = (byte *)patternMaxDownValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point maxVarForThisPixel = patternMaxValuesPointer[0] - 127; TestPixel(currentPoint.X, currentPoint.Y, variationSum, minVarForThisPixel, maxVarForThisPixel, false); if (positionsUntested > 0) { // if we missed testing a pixel behind us (because it was near an ROI or image left edge where there weren't pixels behind it to compute from), we test them here computing in the opposite direction (left values vs right values) // current pixel - PixelsPerTest = -variationSum int testPositionX = currentPoint.X - PatternMatchOfGrayVariationDefinition.PixelsPerTest; if (testPositionX < 0) { throw new ArgumentException("Fatal logic error in test 93430rf"); } patternMinValuesPointer = (byte *)patternMinUpValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (currentPoint.Y * patternStride) + (testPositionX * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point minVarForThisPixel = patternMinValuesPointer[0] - 127; patternMaxValuesPointer = (byte *)patternMaxUpValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (currentPoint.Y * patternStride) + (testPositionX * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point maxVarForThisPixel = patternMaxValuesPointer[0] - 127; TestPixel(testPositionX, currentPoint.Y, -variationSum, minVarForThisPixel, maxVarForThisPixel, false); positionsUntested--; } } lastX = currentPoint.X; lastY = currentPoint.Y; mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } // end unsafe block } catch (Exception e) { TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace); } finally { mFirstAxisScores = null; sourceBitmap.UnlockBits(sourceBitmapData); patternMinDownValues.UnlockBits(patternMinDownValuesBitmapData); patternMaxDownValues.UnlockBits(patternMaxDownValuesBitmapData); patternMinUpValues.UnlockBits(patternMinUpValuesBitmapData); patternMaxUpValues.UnlockBits(patternMaxUpValuesBitmapData); patternMinRightValues.UnlockBits(patternMinRightValuesBitmapData); patternMaxRightValues.UnlockBits(patternMaxRightValuesBitmapData); patternMinLeftValues.UnlockBits(patternMinLeftValuesBitmapData); patternMaxLeftValues.UnlockBits(patternMaxLeftValuesBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } if (mMarkedImage != null && mScoreFilter.Score > 0) { mScoreFilter.MarkImage(mMarkedImage.Bitmap, Color.Red); } mResult.SetValue(score); mResult.SetIsComplete(); if (mMarkedImage != null) { mMarkedImage.SetIsComplete(); } string msg = "PatternMatch " + Name + " completed; score=" + score; TestExecution().LogMessageWithTimeFromTrigger(msg); TestExecution().LogSummaryMessage(msg); if (score >= mAutoSaveOnScore || mScoreFilter.Score >= mAutoSaveOnCellScore) { try { string filePath = ((PatternMatchOfGrayVariationDefinition)Definition()).AutoSavePath; mSourceImage.Save(filePath, Name, true); if (mMarkedImage != null) { mMarkedImage.Save(filePath, Name, "_marked_" + score + "_" + mScoreFilter.Score); } TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved"); } catch (ArgumentException e) { Project().Window().logMessage("ERROR: " + e.Message); TestExecution().LogErrorWithTimeFromTrigger(e.Message); } catch (Exception e) { Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full. Low-level message=" + e.Message); TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ". Ensure path valid and disk not full."); } } }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { TestExecution().LogMessageWithTimeFromTrigger("ColorMatchCount " + Name + " started"); DateTime startTime = DateTime.Now; mMatchCount = 0; if (mSourceImage.Bitmap != null) { if (true) { Point currentPoint = new Point(-1, -1); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); Color color; while (currentPoint.X != -1 && currentPoint.Y != -1) { color = mSourceImage.GetColor(currentPoint.X, currentPoint.Y); if (mColorMatcher.Matches(color)) { mMatchCount++; if (mImageToMark != null && mImageToMark.Bitmap != null) { mImageToMark.SetColor(currentPoint.X, currentPoint.Y, mMarkColor); } } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } else { Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; if (mCreateMarkedImage && mImageToMark != null && mImageToMark.Bitmap != null) { markedBitmap = mImageToMark.Bitmap; } // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } const int pixelByteWidth = 4; // determined by PixelFormat.Format32bppArgb int stride = sourceBitmapData.Stride; int strideOffset = stride - (sourceBitmapData.Width * pixelByteWidth); Point currentPoint = new Point(-1, -1); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; Color color; while (currentPoint.X != -1 && currentPoint.Y != -1) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha if (mColorMatcher.Matches(color)) { mMatchCount++; if (markedBitmap != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); markedPointer[3] = mMarkColor.A; markedPointer[2] = mMarkColor.R; markedPointer[1] = mMarkColor.G; markedPointer[0] = mMarkColor.B; } } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } // end unsafe block } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } } mResult.SetValue(mMatchCount); mResult.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessageWithTimeFromTrigger(Name + " took " + computeTime.TotalMilliseconds + "ms"); //MessageBox.Show("done in color count for " + Name); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { mResult.SetValue(mValue1.ValueAsLong() + mValue2.ValueAsLong()); mResult.SetIsComplete(); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " started"); /* * if (Definition(.ScoreFilter != null) * { * mScoreFilter = testExecution.GetScoreFilter(theDefinition.ScoreFilter.Name); * } */ Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; PatternMatchOfAvgGrayVariationDefinition theDef = (PatternMatchOfAvgGrayVariationDefinition)Definition(); if (theDef.mPatternAvgValues == null) { theDef.LoadPatterns(false); } Bitmap patternAvgValues = theDef.mPatternAvgValues; Bitmap patternStdDevValues = theDef.mPatternStdDevValues; Bitmap patternMinValues = theDef.mPatternMinValues; Bitmap patternMaxValues = theDef.mPatternMaxValues; if (patternAvgValues == null || patternStdDevValues == null || patternMinValues == null || patternMaxValues == null) { throw new ArgumentException("Pattern to match isn't defined."); } if (mMarkedImage != null && sourceBitmap != null) { mMarkedImage.SetImage(new Bitmap(sourceBitmap)); markedBitmap = mMarkedImage.Bitmap; TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings"); } long score = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; BitmapData patternAvgValuesBitmapData = null; BitmapData patternStdDevValuesBitmapData = null; BitmapData patternMinValuesBitmapData = null; BitmapData patternMaxValuesBitmapData = null; if (mScoreFilter != null) { mScoreFilter.SetImageSize(mSourceImage.Bitmap.Width, mSourceImage.Bitmap.Height); } try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PatternMatchOfAvgGrayVariationDefinition.TRAINING_PIXEL_FORMAT); patternAvgValuesBitmapData = patternAvgValues.LockBits(new Rectangle(0, 0, patternAvgValues.Width, patternAvgValues.Height), ImageLockMode.ReadOnly, PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternStdDevValuesBitmapData = patternStdDevValues.LockBits(new Rectangle(0, 0, patternStdDevValues.Width, patternStdDevValues.Height), ImageLockMode.ReadOnly, PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMinValuesBitmapData = patternMinValues.LockBits(new Rectangle(0, 0, patternMinValues.Width, patternMinValues.Height), ImageLockMode.ReadOnly, PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_FORMAT); patternMaxValuesBitmapData = patternMaxValues.LockBits(new Rectangle(0, 0, patternMaxValues.Width, patternMaxValues.Height), ImageLockMode.ReadOnly, PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_FORMAT); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } int sourceStride = sourceBitmapData.Stride; int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PatternMatchOfAvgGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); int patternStride = patternAvgValuesBitmapData.Stride; int patternStrideOffset = patternStride - (patternAvgValuesBitmapData.Width * PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); int grayValue; int grayValue2; long variation = 0; long patternWindow = 0; long threshhold = mVariationThreshhold.ValueAsLong(); double sloppiness = mSloppiness.ValueAsDecimal() / 100.0; long minWindow = Math.Max(1, mMinWindow.ValueAsLong()); double brightPixelFactor = mBrightPixelFactor.ValueAsDecimal(); double darkPixelFactor = mDarkPixelFactor.ValueAsDecimal(); bool needToMark = false; long scoreChange = 0; int testPixelVariation; int minVarForThisPixel; int maxVarForThisPixel; Point currentPoint = new Point(-1, -1); TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing X Axis"); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *sourcePointer2; byte *markedPointer; byte *patternAvgValuesPointer; byte *patternStdDevValuesPointer; byte *patternMinValuesPointer; byte *patternMaxValuesPointer; while (currentPoint.X != -1 && currentPoint.Y != -1) { scoreChange = -999; variation = -999; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point grayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). // http://www.bobpowell.net/grayscale.htm // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1 // check pixel above sourcePointer2 = sourcePointer - sourceStride; grayValue2 = (int)(0.3 * sourcePointer2[2] + 0.59 * sourcePointer2[1] + 0.11 * sourcePointer2[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). testPixelVariation = grayValue - grayValue2; // NOTE: using '=' to init varSum for this pixel testPixelVariation = Math.Max(-127, Math.Min(128, testPixelVariation)); // make sure we stay within 1 byte (0..255) patternAvgValuesPointer = (byte *)patternAvgValuesBitmapData.Scan0; // init to first byte of image patternAvgValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point patternStdDevValuesPointer = (byte *)patternStdDevValuesBitmapData.Scan0; // init to first byte of image patternStdDevValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point patternMinValuesPointer = (byte *)patternMinValuesBitmapData.Scan0; // init to first byte of image patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point minVarForThisPixel = patternMinValuesPointer[0] - 127; patternMaxValuesPointer = (byte *)patternMaxValuesBitmapData.Scan0; // init to first byte of image patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point maxVarForThisPixel = patternMaxValuesPointer[0] - 127; patternWindow = maxVarForThisPixel - minVarForThisPixel; // give tight windows more weight in the score patternWindow = Math.Max(minWindow, patternWindow); // ensure minWindow>0 to prevent divideBy0 if (patternWindow > threshhold) { scoreChange = 0; markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); markedPointer[3] = Color.Yellow.A; markedPointer[2] = Color.Yellow.R; markedPointer[1] = Color.Yellow.G; markedPointer[0] = Color.Yellow.B; } else { if (testPixelVariation < minVarForThisPixel - sloppiness * patternWindow) { variation = minVarForThisPixel - testPixelVariation; //scoreChange = (long)(((variation / patternWindow) + 1) * darkPixelFactor); scoreChange = (long)(variation * ((variation / (patternWindow / 2)) + 1) * darkPixelFactor); score += scoreChange; needToMark = true; TestExecution().LogMessage("Pattern Match score event: " + currentPoint.X + "," + currentPoint.Y + " dark spot score=" + scoreChange + " var=" + testPixelVariation + " min=" + minVarForThisPixel + " max=" + maxVarForThisPixel + " window=" + patternWindow + " var=" + variation); if (mScoreFilter != null) { mScoreFilter.ProcessScore(currentPoint.X, currentPoint.Y, scoreChange); } } else if (testPixelVariation > maxVarForThisPixel + sloppiness * patternWindow) { variation = testPixelVariation - maxVarForThisPixel; //scoreChange = (long)(((variation / patternWindow) + 1) * brightPixelFactor); scoreChange = (long)(variation * ((variation / (patternWindow / 2)) + 1) * brightPixelFactor); score += scoreChange; needToMark = true; TestExecution().LogMessage("Pattern Match score event: " + currentPoint.X + "," + currentPoint.Y + " bright spot score=" + scoreChange + " var=" + testPixelVariation + " min=" + minVarForThisPixel + " max=" + maxVarForThisPixel + " window=" + patternWindow + " var=" + variation); if (mScoreFilter != null) { mScoreFilter.ProcessScore(currentPoint.X, currentPoint.Y, scoreChange); } } else { variation = 0; scoreChange = 0; needToMark = false; } if (needToMark && mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfAvgGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); markedPointer[3] = mMarkColor.A; markedPointer[2] = mMarkColor.R; markedPointer[1] = mMarkColor.G; markedPointer[0] = mMarkColor.B; } } if (mDeepAnalysisEnabled && currentPoint.X >= mDeepAnalysisLeft && currentPoint.X <= mDeepAnalysisRight && currentPoint.Y >= mDeepAnalysisTop && currentPoint.Y <= mDeepAnalysisBottom) { string message = "DEEP ANALYSIS: '" + Name + "' " + currentPoint.X + "," + currentPoint.Y + " "; if (patternWindow > threshhold) { message += "PATTERN WINDOW > THRESHOLD;"; } message += " score change=" + scoreChange + " var=" + testPixelVariation + " min=" + minVarForThisPixel + " max=" + maxVarForThisPixel + " window=" + patternWindow + " slop=" + (sloppiness * patternWindow) + " marked=" + needToMark ; TestExecution().LogMessage(message); } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } } // end unsafe block } finally { sourceBitmap.UnlockBits(sourceBitmapData); patternAvgValues.UnlockBits(patternAvgValuesBitmapData); patternStdDevValues.UnlockBits(patternStdDevValuesBitmapData); patternMinValues.UnlockBits(patternMinValuesBitmapData); patternMaxValues.UnlockBits(patternMaxValuesBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } mResult.SetValue(score); mResult.SetIsComplete(); if (mMarkedImage != null) { mMarkedImage.SetIsComplete(); } TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " completed; score=" + score); }
public override void DoWork() { /* * if (!mStartX.IsComplete() || * !mStartY.IsComplete() || * !mSlopeRise.IsComplete() || * !mSlopeRun.IsComplete() || * !mRequiredConsecutivePixels.IsComplete() || * !mColorMatcher.IsComplete() || * !mSourceImage.IsComplete() || * !AreExplicitDependenciesComplete() * ) return;*/ TestExecution().LogMessageWithTimeFromTrigger("FindColorOnLine " + Name + " started"); int resultX = -1; int resultY = -1; if (mSourceImage.Bitmap != null) { Color pixelColor; int consecutivePixels = 0; bool done = false; int x = (int)mStartX.ValueAsLong(); int y = (int)mStartY.ValueAsLong(); int rise = (int)mSlopeRise.ValueAsLong(); int run = (int)mSlopeRun.ValueAsLong(); TestExecution().LogMessageWithTimeFromTrigger(Name + " starting searching at " + x + "," + y); while (!done && x >= 0 && x < mSourceImage.Bitmap.Width && y >= 0 && y < mSourceImage.Bitmap.Height) { pixelColor = SourceImage.Bitmap.GetPixel(x, y); if (mColorMatcher.Matches(pixelColor)) { if (consecutivePixels == 0) { resultX = x; resultY = y; TestExecution().LogMessageWithTimeFromTrigger(Name + " found 1st match at " + x + "," + y); } consecutivePixels++; if (consecutivePixels >= mRequiredConsecutivePixels.ValueAsLong()) { TestExecution().LogMessageWithTimeFromTrigger(Name + " found last consecutive match at " + x + "," + y); done = true; } } else { consecutivePixels = 0; resultX = -2; resultY = -2; } mSearchEndX.SetValue(x); mSearchEndY.SetValue(y); x += run; y += rise; } } else { mSearchEndX.SetValue(-1); mSearchEndY.SetValue(-1); } mResultX.SetValue(resultX); mResultY.SetValue(resultY); mResultX.SetIsComplete(); mResultY.SetIsComplete(); mSearchEndX.SetIsComplete(); mSearchEndY.SetIsComplete(); TestExecution().LogMessageWithTimeFromTrigger("FindColorOnLine " + Name + " completed; result=" + resultX + "," + resultY); }
public override void DoWork() { DateTime startTime = DateTime.Now; TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine); int objectStartEdgeX = -1; int objectStartEdgeY = -1; int objectEndEdgeX = -1; int objectEndEdgeY = -1; int resultX = -1; int resultY = -1; long requiredConsecMatches = mRequiredConsecutiveColorMatches.ValueAsLong(); int consecutiveMatches = 0; int firstMatch_x = -1; int firstMatch_y = -1; if (mSourceImage.Bitmap == null) { TestExecution().LogMessage("ERROR: source image for '" + Name + "' does not exist."); } else if (mSearchPath == null) { TestExecution().LogMessage("ERROR: search line for '" + Name + "' isn't defined."); } else if (mSearchPath.StartX == null || mSearchPath.StartY == null || mSearchPath.EndX == null || mSearchPath.EndY == null) { TestExecution().LogMessage("ERROR: search line '" + mSearchPath.Name + "' for '" + Name + "' isn't fully defined."); } else if (mSearchPath.StartX.ValueAsLong() < 0 || mSearchPath.StartX.ValueAsLong() >= mSourceImage.Bitmap.Width || mSearchPath.StartY.ValueAsLong() < 0 || mSearchPath.StartY.ValueAsLong() >= mSourceImage.Bitmap.Height) { TestExecution().LogMessage("ERROR: The search line start point for '" + Name + "' isn't valid: " + mSearchPath.StartX.ValueAsLong() + "," + mSearchPath.StartY.ValueAsLong()); } else if (mSearchPath.EndX.ValueAsLong() < 0 || mSearchPath.EndX.ValueAsLong() >= mSourceImage.Bitmap.Width || mSearchPath.EndY.ValueAsLong() < 0 || mSearchPath.EndY.ValueAsLong() >= mSourceImage.Bitmap.Height) { TestExecution().LogMessage("ERROR: The search line end point for '" + Name + "' isn't valid: " + mSearchPath.EndX.ValueAsLong() + "," + mSearchPath.EndY.ValueAsLong()); } else { switch (mSearchDirection) { case Direction.Left: xSearchChange = -1; ySearchChange = 0; if (mSearchPath.StartX.ValueAsLong() == mSearchPath.EndX.ValueAsLong() && mSearchPath.StartY.ValueAsLong() != mSearchPath.EndY.ValueAsLong()) { TestExecution().LogMessage("ERROR: can't search left on line '" + mSearchPath.Name + "' for '" + Name + "' since line is vertical."); } else if (mSearchPath.StartX.ValueAsLong() < mSearchPath.EndX.ValueAsLong()) { startX = mSearchPath.EndX.ValueAsLong(); startY = mSearchPath.EndY.ValueAsLong(); endX = mSearchPath.StartX.ValueAsLong(); endY = mSearchPath.StartY.ValueAsLong(); } else { startX = mSearchPath.StartX.ValueAsLong(); startY = mSearchPath.StartY.ValueAsLong(); endX = mSearchPath.EndX.ValueAsLong(); endY = mSearchPath.EndY.ValueAsLong(); } break; case Direction.Right: xSearchChange = 1; ySearchChange = 0; if (mSearchPath.StartX.ValueAsLong() == mSearchPath.EndX.ValueAsLong() && mSearchPath.StartY.ValueAsLong() != mSearchPath.EndY.ValueAsLong()) { TestExecution().LogMessage("ERROR: can't search right on line '" + mSearchPath.Name + "' for '" + Name + "' since line is vertical."); } else if (mSearchPath.StartX.ValueAsLong() < mSearchPath.EndX.ValueAsLong()) { startX = mSearchPath.StartX.ValueAsLong(); startY = mSearchPath.StartY.ValueAsLong(); endX = mSearchPath.EndX.ValueAsLong(); endY = mSearchPath.EndY.ValueAsLong(); } else { startX = mSearchPath.EndX.ValueAsLong(); startY = mSearchPath.EndY.ValueAsLong(); endX = mSearchPath.StartX.ValueAsLong(); endY = mSearchPath.StartY.ValueAsLong(); } break; case Direction.Up: xSearchChange = 0; ySearchChange = -1; if (mSearchPath.StartY.ValueAsLong() == mSearchPath.EndY.ValueAsLong() && mSearchPath.StartX.ValueAsLong() != mSearchPath.EndX.ValueAsLong()) { TestExecution().LogMessage("ERROR: can't search up on line '" + mSearchPath.Name + "' for '" + Name + "' since line is horizontal."); } else if (mSearchPath.StartY.ValueAsLong() < mSearchPath.EndY.ValueAsLong()) // line is down { startX = mSearchPath.EndX.ValueAsLong(); startY = mSearchPath.EndY.ValueAsLong(); endX = mSearchPath.StartX.ValueAsLong(); endY = mSearchPath.StartY.ValueAsLong(); } else // line is up { startX = mSearchPath.StartX.ValueAsLong(); startY = mSearchPath.StartY.ValueAsLong(); endX = mSearchPath.EndX.ValueAsLong(); endY = mSearchPath.EndY.ValueAsLong(); } break; case Direction.Down: xSearchChange = 0; ySearchChange = 1; if (mSearchPath.StartY.ValueAsLong() == mSearchPath.EndY.ValueAsLong() && mSearchPath.StartX.ValueAsLong() != mSearchPath.EndX.ValueAsLong()) { TestExecution().LogMessage("ERROR: can't search down on line '" + mSearchPath.Name + "' for '" + Name + "' since line is horizontal."); } else if (mSearchPath.StartY.ValueAsLong() < mSearchPath.EndY.ValueAsLong()) // line is down { startX = mSearchPath.StartX.ValueAsLong(); startY = mSearchPath.StartY.ValueAsLong(); endX = mSearchPath.EndX.ValueAsLong(); endY = mSearchPath.EndY.ValueAsLong(); } else // line is up { startX = mSearchPath.EndX.ValueAsLong(); startY = mSearchPath.EndY.ValueAsLong(); endX = mSearchPath.StartX.ValueAsLong(); endY = mSearchPath.StartY.ValueAsLong(); } break; case Direction.NotDefined: TestExecution().LogMessage("ERROR: Search direction not defined."); abort = true; break; default: TestExecution().LogMessage("ERROR: Unsupported Search direction; direction=" + mSearchDirection); abort = true; break; } leftEdgeOfSearch = Math.Max(0, Math.Min(startX, endX)); rightEdgeOfSearch = Math.Min(mSourceImage.Bitmap.Width, Math.Max(startX, endX)); topEdgeOfSearch = Math.Max(0, Math.Min(startY, endY)); bottomEdgeOfSearch = Math.Min(mSourceImage.Bitmap.Height, Math.Max(startY, endY)); LineType lineType; if (startY == endY) { lineType = LineType.Horizontal; slope = 0; } else if (startX == endX) { lineType = LineType.Vertical; slope = 999999999999 / 0.000000001; } else { lineType = LineType.Slanted; slope = (double)(endY - startY) / (double)(endX - startX); } x = (int)startX; y = (int)startY; TestExecution().LogMessage(Name + " starting at " + x + "," + y); abort = false; state = SearchState.FindBackground; int searchIndex = 0; while (state != SearchState.Done && !abort) { switch (lineType) { case LineType.Horizontal: x = (int)(startX + (searchIndex * xSearchChange)); break; case LineType.Vertical: y = (int)(startY + (searchIndex * ySearchChange)); break; case LineType.Slanted: x = (int)(startX + (searchIndex * xSearchChange) + ((searchIndex * ySearchChange) / slope)); y = (int)(startY + (searchIndex * ySearchChange) + ((searchIndex * xSearchChange) * slope)); break; } if (x < leftEdgeOfSearch || x > rightEdgeOfSearch || y < topEdgeOfSearch || y > bottomEdgeOfSearch) { TestExecution().LogMessage("ERROR: " + Name + " exhausted search without finding full object; end position = " + x + "," + y + "; state = " + state); state = SearchState.Done; } else { pixelColor = SourceImage.Bitmap.GetPixel(x, y); switch (state) { case SearchState.FindBackground: if (mSearchBackgroundColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage(Name + " found background at " + x + "," + y); state = SearchState.FindObject; } else if (mObjectColorDefinition.Matches(pixelColor)) { } else { LogWarning(Name + " found unexpected color searching for initial background at " + x + "," + y); } break; case SearchState.FindObject: if (mSearchBackgroundColorDefinition.Matches(pixelColor)) { consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; } else if (mObjectColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage(Name + " found start of object at " + x + "," + y); consecutiveMatches++; if (consecutiveMatches == 1) { firstMatch_x = x; firstMatch_y = y; } if (consecutiveMatches >= requiredConsecMatches) { // remember edge objectStartEdgeX = firstMatch_x; objectStartEdgeY = firstMatch_y; TestExecution().LogMessage(Name + " found the " + requiredConsecMatches + " needed consec matches for start of object at " + x + "," + y + "; set start to " + objectStartEdgeX + "," + objectStartEdgeY); // get ready for next state state = SearchState.FindFarEdgeOfObject; consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; } } else { consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; LogWarning(Name + " found unexpected color before object at " + x + "," + y); } break; case SearchState.FindFarEdgeOfObject: if (mSearchBackgroundColorDefinition.Matches(pixelColor)) { TestExecution().LogMessage(Name + " found end of object at " + x + "," + y); consecutiveMatches++; if (consecutiveMatches == 1) { firstMatch_x = x; firstMatch_y = y; } if (consecutiveMatches >= requiredConsecMatches) { // remember edge objectEndEdgeX = firstMatch_x; objectEndEdgeY = firstMatch_y; resultX = (objectStartEdgeX + objectEndEdgeX) / 2; resultY = (objectStartEdgeY + objectEndEdgeY) / 2; TestExecution().LogMessage(Name + " found the " + requiredConsecMatches + " needed consec matches for end of object at " + x + "," + y + "; set end to " + objectEndEdgeX + "," + objectEndEdgeY); // get ready for next state state = SearchState.Done; consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; } } else if (mObjectColorDefinition.Matches(pixelColor)) { consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; } else { consecutiveMatches = 0; firstMatch_x = -1; firstMatch_y = -1; LogWarning(Name + " found unexpected color within object at " + x + "," + y); } break; } // end switch searchIndex++; } // end if for x,y verification } // end search loop } // end main block ("else" after all initial setup error checks) mResultX.SetValue(resultX); mResultY.SetValue(resultY); mResultX.SetIsComplete(); mResultY.SetIsComplete(); DateTime doneTime = DateTime.Now; TimeSpan computeTime = doneTime - startTime; TestExecution().LogMessage(Name + " computed object center at " + resultX + "," + resultY); TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + " | took " + computeTime.TotalMilliseconds + "ms"); }
public override void DoWork() { TestExecution().LogMessageWithTimeFromTrigger(Name + " started"); Stopwatch watch = new Stopwatch(); watch.Start(); long numPixels = 0; int H_current; double H_avg = -1; int H_min = 999999; int H_max = -999999; double H_stddev = -1; long H_sum = 0; int S_current; double S_avg = -1; int S_min = 999999; int S_max = -999999; double S_stddev = -1; long S_sum = 0; int I_current; double I_avg = -1; int I_min = 999999; int I_max = -999999; double I_stddev = -1; long I_sum = 0; int R_current; double R_avg = -1; int R_min = 999999; int R_max = -999999; double R_stddev = -1; long R_sum = 0; int G_current; double G_avg = -1; int G_min = 999999; int G_max = -999999; double G_stddev = -1; long G_sum = 0; int B_current; double B_avg = -1; int B_min = 999999; int B_max = -999999; double B_stddev = -1; long B_sum = 0; int Grey_current; double Grey_avg = -1; int Grey_min = 999999; int Grey_max = -999999; double Grey_stddev = -1; long Grey_sum = 0; if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean()) { TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met. Skipping."); } else { if (true) { if (mSourceImage != null && mSourceImage.Bitmap != null) { Point currentPoint = new Point(-1, -1); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); Color color; while (currentPoint.X > -1 && currentPoint.Y > -1) { numPixels++; color = mSourceImage.GetColor(currentPoint.X, currentPoint.Y); H_current = (int)(color.GetHue()); S_current = (int)(color.GetSaturation() * 100); I_current = (int)(color.GetBrightness() * 100); Grey_current = (int)(0.3 * color.R + 0.59 * color.G + 0.11 * color.B); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). R_current = color.R; G_current = color.G; B_current = color.B; H_sum += H_current; if (H_current < H_min) { H_min = H_current; } if (H_current > H_max) { H_max = H_current; } S_sum += S_current; if (S_current < S_min) { S_min = S_current; } if (S_current > S_max) { S_max = S_current; } I_sum += I_current; if (I_current < I_min) { I_min = I_current; } if (I_current > I_max) { I_max = I_current; } R_sum += R_current; if (R_current < R_min) { R_min = R_current; } if (R_current > R_max) { R_max = R_current; } G_sum += G_current; if (G_current < G_min) { G_min = G_current; } if (G_current > G_max) { G_max = G_current; } B_sum += B_current; if (B_current < B_min) { B_min = B_current; } if (B_current > B_max) { B_max = B_current; } Grey_sum += Grey_current; if (Grey_current < Grey_min) { Grey_min = Grey_current; } if (Grey_current > Grey_max) { Grey_max = Grey_current; } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } if (numPixels > 0) { H_avg = ((double)H_sum) / numPixels; S_avg = ((double)S_sum) / numPixels; I_avg = ((double)I_sum) / numPixels; R_avg = ((double)R_sum) / numPixels; G_avg = ((double)G_sum) / numPixels; B_avg = ((double)B_sum) / numPixels; Grey_avg = ((double)Grey_sum) / numPixels; } else { TestExecution().LogErrorWithTimeFromTrigger("ColorAnalysis " + Name + " didn't analyze any pixels -- check ROI size."); } } // if image not null } else // if not new array pixel access, then use old pointer access { Bitmap sourceBitmap = SourceImage.Bitmap; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); const int pixelByteWidth = 4; // determined by PixelFormat.Format32bppArgb int stride = sourceBitmapData.Stride; int strideOffset = stride - (sourceBitmapData.Width * pixelByteWidth); Point currentPoint = new Point(-1, -1); mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; Color color; while (currentPoint.X > -1 && currentPoint.Y > -1) { numPixels++; sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (currentPoint.Y * stride) + (currentPoint.X * pixelByteWidth); // adjust to current point color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha H_current = (int)(color.GetHue()); S_current = (int)(color.GetSaturation() * 100); I_current = (int)(color.GetBrightness() * 100); Grey_current = (int)(0.3 * color.R + 0.59 * color.G + 0.11 * color.B); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue). R_current = color.R; G_current = color.G; B_current = color.B; H_sum += H_current; if (H_current < H_min) { H_min = H_current; } if (H_current > H_max) { H_max = H_current; } S_sum += S_current; if (S_current < S_min) { S_min = S_current; } if (S_current > S_max) { S_max = S_current; } I_sum += I_current; if (I_current < I_min) { I_min = I_current; } if (I_current > I_max) { I_max = I_current; } R_sum += R_current; if (R_current < R_min) { R_min = R_current; } if (R_current > R_max) { R_max = R_current; } G_sum += G_current; if (G_current < G_min) { G_min = G_current; } if (G_current > G_max) { G_max = G_current; } B_sum += B_current; if (B_current < B_min) { B_min = B_current; } if (B_current > B_max) { B_max = B_current; } Grey_sum += Grey_current; if (Grey_current < Grey_min) { Grey_min = Grey_current; } if (Grey_current > Grey_max) { Grey_max = Grey_current; } mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint); } if (numPixels > 0) { H_avg = ((double)H_sum) / numPixels; S_avg = ((double)S_sum) / numPixels; I_avg = ((double)I_sum) / numPixels; R_avg = ((double)R_sum) / numPixels; G_avg = ((double)G_sum) / numPixels; B_avg = ((double)B_sum) / numPixels; Grey_avg = ((double)Grey_sum) / numPixels; } else { TestExecution().LogErrorWithTimeFromTrigger("ColorAnalysis " + Name + " didn't analyze any pixels -- check ROI size."); } } // end unsafe block } finally { sourceBitmap.UnlockBits(sourceBitmapData); } } // if bitmap } // pixel access type } // if prereqs met if (mH_Average != null) { mH_Average.SetValue(H_avg); mH_Average.SetIsComplete(); } if (mH_Min != null) { mH_Min.SetValue(H_min); mH_Min.SetIsComplete(); } if (mH_Max != null) { mH_Max.SetValue(H_max); mH_Max.SetIsComplete(); } if (mH_StdDev != null) { mH_StdDev.SetValue(H_stddev); mH_StdDev.SetIsComplete(); } if (mS_Average != null) { mS_Average.SetValue(S_avg); mS_Average.SetIsComplete(); } if (mS_Min != null) { mS_Min.SetValue(S_min); mS_Min.SetIsComplete(); } if (mS_Max != null) { mS_Max.SetValue(S_max); mS_Max.SetIsComplete(); } if (mS_StdDev != null) { mS_StdDev.SetValue(S_stddev); mS_StdDev.SetIsComplete(); } if (mI_Average != null) { mI_Average.SetValue(I_avg); mI_Average.SetIsComplete(); } if (mI_Min != null) { mI_Min.SetValue(I_min); mI_Min.SetIsComplete(); } if (mI_Max != null) { mI_Max.SetValue(I_max); mI_Max.SetIsComplete(); } if (mI_StdDev != null) { mI_StdDev.SetValue(I_stddev); mI_StdDev.SetIsComplete(); } if (mR_Average != null) { mR_Average.SetValue(R_avg); mR_Average.SetIsComplete(); } if (mR_Min != null) { mR_Min.SetValue(R_min); mR_Min.SetIsComplete(); } if (mR_Max != null) { mR_Max.SetValue(R_max); mR_Max.SetIsComplete(); } if (mR_StdDev != null) { mR_StdDev.SetValue(R_stddev); mR_StdDev.SetIsComplete(); } if (mG_Average != null) { mG_Average.SetValue(G_avg); mG_Average.SetIsComplete(); } if (mG_Min != null) { mG_Min.SetValue(G_min); mG_Min.SetIsComplete(); } if (mG_Max != null) { mG_Max.SetValue(G_max); mG_Max.SetIsComplete(); } if (mG_StdDev != null) { mG_StdDev.SetValue(G_stddev); mG_StdDev.SetIsComplete(); } if (mB_Average != null) { mB_Average.SetValue(B_avg); mB_Average.SetIsComplete(); } if (mB_Min != null) { mB_Min.SetValue(B_min); mB_Min.SetIsComplete(); } if (mB_Max != null) { mB_Max.SetValue(B_max); mB_Max.SetIsComplete(); } if (mB_StdDev != null) { mB_StdDev.SetValue(B_stddev); mB_StdDev.SetIsComplete(); } if (mGrey_Average != null) { mGrey_Average.SetValue(Grey_avg); mGrey_Average.SetIsComplete(); } if (mGrey_Min != null) { mGrey_Min.SetValue(Grey_min); mGrey_Min.SetIsComplete(); } if (mGrey_Max != null) { mGrey_Max.SetValue(Grey_max); mGrey_Max.SetIsComplete(); } if (mGrey_StdDev != null) { mGrey_StdDev.SetValue(Grey_stddev); mGrey_StdDev.SetIsComplete(); } mIsComplete = true; watch.Stop(); TestExecution().LogMessageWithTimeFromTrigger(Name + " took " + watch.ElapsedMilliseconds + "ms (" + watch.ElapsedTicks + " ticks for " + numPixels + " pixels)"); }
// public const string AnalysisType = "Color Present Fails"; // public override string Type() { return AnalysisType; } public override void DoWork() { //if (!mSourceImage.IsComplete() || !AreExplicitDependenciesComplete()) return; Bitmap sourceBitmap = SourceImage.Bitmap; Bitmap markedBitmap = null; TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " started"); if (mMarkedImage != null && sourceBitmap != null) { mMarkedImage.SetImage(new Bitmap(sourceBitmap)); markedBitmap = mMarkedImage.Bitmap; TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings"); } long resultValue = 0; if (sourceBitmap != null) { // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 BitmapData sourceBitmapData = null; BitmapData markedBitmapData = null; try { sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); if (markedBitmap != null) { markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } const int pixelByteWidth = 4; // determined by PixelFormat.Format32bppArgb int stride = sourceBitmapData.Stride; int strideOffset = stride - (sourceBitmapData.Width * pixelByteWidth); Color color; int pixel1Intensity; int pixel2Intensity; long variation = 0; long threshhold = mVariationThreshhold.ValueAsLong(); int bottom = Math.Min(sourceBitmap.Height - 1, ROI.Bottom); int top = Math.Max(0, ROI.Top); int left = Math.Max(0, ROI.Left); int right = Math.Min(sourceBitmap.Width - 1, ROI.Right); if (mTestXAxis) { TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " testing X Axis"); unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623 { byte *sourcePointer; byte *markedPointer; for (int j = top; j <= bottom; j++) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (j * stride) + (left * pixelByteWidth); // adjust to first byte of ROI row color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel1Intensity = (int)(color.GetBrightness() * 100); sourcePointer += pixelByteWidth; // adjust to next pixel to the right for (int i = left + 1; i <= right; i++) // starting at left+1 since we already have the value for "left" { color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel2Intensity = (int)(color.GetBrightness() * 100); variation = Math.Abs(pixel2Intensity - pixel1Intensity); if (variation > threshhold) { resultValue += variation; if (mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (j * stride) + (i * pixelByteWidth); markedPointer[3] = Color.Yellow.A; markedPointer[2] = Color.Yellow.R; markedPointer[1] = Color.Yellow.G; markedPointer[0] = Color.Yellow.B; } } pixel1Intensity = pixel2Intensity; sourcePointer += pixelByteWidth; // adjust to next pixel to the right } //sourcePointer += ((width-right)*pixelByteWidth) + strideOffset + (left * pixelByteWidth); // adjust to the first pixel of the next row by skipping the "extra bytes" (stride offset) } } // end unsafe block } if (mTestYAxis) { TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " testing X Axis"); unsafe { byte *sourcePointer; byte *markedPointer; for (int i = left; i <= right; i++) { sourcePointer = (byte *)sourceBitmapData.Scan0; // init to first byte of image sourcePointer += (top * stride) + (i * pixelByteWidth); // adjust to top pixel of the column // get value for pixel on top of column...to init our for loop below (loop references two values) color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel1Intensity = (int)(color.GetBrightness() * 100); sourcePointer += stride; // adjust to next pixel down the column for (int j = top + 1; j <= bottom; j++) // starting at top+1 since we already have the value for "top" { color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha pixel2Intensity = (int)(color.GetBrightness() * 100); variation = Math.Abs(pixel2Intensity - pixel1Intensity); if (variation > threshhold) { resultValue += variation; if (mMarkedImage != null) { markedPointer = (byte *)markedBitmapData.Scan0; markedPointer += (j * stride) + (i * pixelByteWidth); markedPointer[3] = Color.Yellow.A; markedPointer[2] = Color.Yellow.R; markedPointer[1] = Color.Yellow.G; markedPointer[0] = Color.Yellow.B; } } pixel1Intensity = pixel2Intensity; sourcePointer += stride; // move down one pixel on the y-axis } } } // end unsafe block } } finally { sourceBitmap.UnlockBits(sourceBitmapData); if (markedBitmap != null) { markedBitmap.UnlockBits(markedBitmapData); } } } mResult.SetValue(resultValue); mResult.SetIsComplete(); if (mMarkedImage != null) { mMarkedImage.SetIsComplete(); } TestExecution().LogMessageWithTimeFromTrigger("IntensityVariation " + Name + " completed"); }