public static readonly int PIXEL_BYTE_WIDTH     = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm
        public override void DoWork()
        {
            DateTime startTime = DateTime.Now;

            TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine);

            int startX = (int)mStartPoint_X.ValueAsLong();
            int startY = (int)mStartPoint_Y.ValueAsLong();

            int leftEdge   = -1;
            int rightEdge  = -1;
            int topEdge    = -1;
            int bottomEdge = -1;

            if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean())
            {
                TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met.  Skipping.");
            }
            else if (mSourceImage.Bitmap == null)
            {
                TestExecution().LogErrorWithTimeFromTrigger("source image for '" + Name + "' does not exist.");
            }
            else if (startX < 0 || startX >= mSourceImage.Bitmap.Width ||
                     startY < 0 || startY >= mSourceImage.Bitmap.Height)
            {
                TestExecution().LogErrorWithTimeFromTrigger("Start position for '" + Name + "' isn't within the image bounds; start=" + startX + "," + startY + "; image size=" + mSourceImage.Bitmap.Width + "x" + mSourceImage.Bitmap.Height);
            }
            else
            {
                int  stepSize            = (int)mStepSize.ValueAsLong();
                bool detailedSearchAtEnd = mDetailedSearch.ValueAsBoolean();

                sourceBitmap = SourceImage.Bitmap;
                if (mImageToMark != null && mImageToMark.Bitmap != null)
                {
                    markedBitmap = mImageToMark.Bitmap;
                }

                try
                {
                    sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT);
                    if (markedBitmap != null)
                    {
                        markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PIXEL_FORMAT);
                    }
                    sourceStride       = sourceBitmapData.Stride;
                    sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH);

                    unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623
                    {
                        byte *sourcePointer;
                        byte *markedPointer;

                        sourcePointer  = (byte *)sourceBitmapData.Scan0;                        // init to first byte of image
                        sourcePointer += (startY * sourceStride) + (startX * PIXEL_BYTE_WIDTH); // adjust to current point

                        Color theColor = Color.FromArgb(sourcePointer[2], sourcePointer[1], sourcePointer[0]);
                        if (!mColorMatchDefinition.Matches(theColor))
                        {
                            TestExecution().LogErrorWithTimeFromTrigger(Name + " start position isn't within the match color; start=" + startX + "," + startY + "   color=" + theColor);
                        }
                        else
                        {
                            if (mFindBoundingRectangleDefinition.SearchRecord.GetLength(0) < sourceBitmap.Width || mFindBoundingRectangleDefinition.SearchRecord.GetLength(1) < sourceBitmap.Height)
                            {
                                mFindBoundingRectangleDefinition.SearchRecord   = new short[sourceBitmap.Width, sourceBitmap.Height];
                                mFindBoundingRectangleDefinition.LastMarkerUsed = 0;
                            }
                            if (mFindBoundingRectangleDefinition.LastMarkerUsed == int.MaxValue)
                            {
                                for (int x = 0; x < mFindBoundingRectangleDefinition.SearchRecord.GetLength(0); x++)
                                {
                                    for (int y = 0; y < mFindBoundingRectangleDefinition.SearchRecord.GetLength(1); y++)
                                    {
                                        mFindBoundingRectangleDefinition.SearchRecord[x, y] = 0;
                                    }
                                }
                                mFindBoundingRectangleDefinition.LastMarkerUsed = 0;
                            }
                            mFindBoundingRectangleDefinition.LastMarkerUsed++;

                            EdgeSearch topEdgeSearch    = new EdgeSearch(this, mColorMatchDefinition, Axis.X, startY, -1 * stepSize, 0, startX, 0, sourceBitmap.Width - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed);
                            EdgeSearch bottomEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, startY, +1 * stepSize, sourceBitmap.Height - 1, startX, 0, sourceBitmap.Width - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed);
                            EdgeSearch leftEdgeSearch   = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, startX, -1 * stepSize, 0, startY, 0, sourceBitmap.Height - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed);
                            EdgeSearch rightEdgeSearch  = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, startX, +1 * stepSize, sourceBitmap.Width - 1, startY, 0, sourceBitmap.Height - 1, mFindBoundingRectangleDefinition.SearchRecord, mFindBoundingRectangleDefinition.LastMarkerUsed);
                            topEdgeSearch.minSideEdge    = leftEdgeSearch;
                            topEdgeSearch.maxSideEdge    = rightEdgeSearch;
                            bottomEdgeSearch.minSideEdge = leftEdgeSearch;
                            bottomEdgeSearch.maxSideEdge = rightEdgeSearch;
                            leftEdgeSearch.minSideEdge   = topEdgeSearch;
                            leftEdgeSearch.maxSideEdge   = bottomEdgeSearch;
                            rightEdgeSearch.minSideEdge  = topEdgeSearch;
                            rightEdgeSearch.maxSideEdge  = bottomEdgeSearch;

                            while (!(topEdgeSearch.Done() && bottomEdgeSearch.Done() && leftEdgeSearch.Done() && rightEdgeSearch.Done()))
                            {
                                if (!topEdgeSearch.Done())
                                {
                                    topEdgeSearch.TestLine();
                                }
                                if (!bottomEdgeSearch.Done())
                                {
                                    bottomEdgeSearch.TestLine();
                                }
                                if (!leftEdgeSearch.Done())
                                {
                                    leftEdgeSearch.TestLine();
                                }
                                if (!rightEdgeSearch.Done())
                                {
                                    rightEdgeSearch.TestLine();
                                }
                            }

                            if (detailedSearchAtEnd)
                            {
                                //topEdgeSearch.mStep
                            }

                            leftEdge   = leftEdgeSearch.lastPosWhereObjectSeen;
                            rightEdge  = rightEdgeSearch.lastPosWhereObjectSeen;
                            topEdge    = topEdgeSearch.lastPosWhereObjectSeen;
                            bottomEdge = bottomEdgeSearch.lastPosWhereObjectSeen;

                            /* TODO: rectangle decoration? force user to use ROI?
                             * mResultantRay.SetStartX(centerX);
                             * mResultantRay.SetStartY(centerY);
                             * mResultantRay.SetEndX((int)(centerX + outerRadius * Math.Cos(overallRad)));
                             * mResultantRay.SetEndY((int)(centerY + outerRadius * Math.Sin(overallRad)));
                             * mResultantRay.SetIsComplete();
                             */
                        }
                    } // end unsafe block
                }
                catch (Exception e)
                {
                    TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace);
                }
                finally
                {
                    sourceBitmap.UnlockBits(sourceBitmapData);
                    if (markedBitmap != null)
                    {
                        markedBitmap.UnlockBits(markedBitmapData);
                    }
                }
            } // end main block ("else" after all initial setup error checks)
            mLeftBound.SetValue(leftEdge);
            mLeftBound.SetIsComplete();
            mRightBound.SetValue(rightEdge);
            mRightBound.SetIsComplete();
            mTopBound.SetValue(topEdge);
            mTopBound.SetIsComplete();
            mBottomBound.SetValue(bottomEdge);
            mBottomBound.SetIsComplete();
            DateTime doneTime    = DateTime.Now;
            TimeSpan computeTime = doneTime - startTime;

            TestExecution().LogMessageWithTimeFromTrigger(Name + " found bounding rectangle; left=" + leftEdge + " right=" + rightEdge + " top=" + topEdge + " bottom=" + bottomEdge);

            if (mAutoSave)
            {
                try
                {
                    string filePath = ((FindRadialLineDefinition)Definition()).AutoSavePath;
                    mSourceImage.Save(filePath, Name, true);
                    if (mImageToMark != null)
                    {
                        mImageToMark.Save(filePath, Name, "_marked_" + leftEdge + "_" + rightEdge + "_" + topEdge + "_" + bottomEdge);
                    }
                    TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved");
                }
                catch (ArgumentException e)
                {
                    Project().Window().logMessage("ERROR: " + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(e.Message);
                }
                catch (Exception e)
                {
                    Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.  Low-level message=" + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.");
                }
            }
            TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + "  | took " + computeTime.TotalMilliseconds + "ms");
        }
        public override void DoWork()
        {
            if (mCollectImages)
            {
                try
                {
                    mSourceImage.Save(((PatternMatchOfGrayVariationDefinition)Definition()).LearningPath, Name, true);
                    TestExecution().LogMessageWithTimeFromTrigger(Name + " collected image in learning folder.  Skipping test.");
                }
                catch (ArgumentException e)
                {
                    Project().Window().logMessage("ERROR: " + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(e.Message);
                }
                catch (Exception e)
                {
                    string errMsg = "Unable to save collected image.  Ensure path valid and disk not full.";
                    Project().Window().logMessage("ERROR: " + errMsg + "  Low-level message=" + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(errMsg);
                }
                mResult.SetValue(score);
                mResult.SetIsComplete();
                return;
            }

            TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " started");

            /*
             * if (Definition(.ScoreFilter != null)
             * {
             *  mScoreFilter = testExecution.GetScoreFilter(theDefinition.ScoreFilter.Name);
             * }
             */

            Bitmap sourceBitmap = SourceImage.Bitmap;
            Bitmap markedBitmap = null;
            PatternMatchOfGrayVariationDefinition theDef = (PatternMatchOfGrayVariationDefinition)Definition();

            if (theDef.mPatternMinDownValues == null || theDef.mPatternMaxDownValues == null)
            {
                theDef.LoadPatterns(false);
            }
            Bitmap patternMinDownValues  = theDef.mPatternMinDownValues;
            Bitmap patternMaxDownValues  = theDef.mPatternMaxDownValues;
            Bitmap patternMinUpValues    = theDef.mPatternMinUpValues;
            Bitmap patternMaxUpValues    = theDef.mPatternMaxUpValues;
            Bitmap patternMinRightValues = theDef.mPatternMinRightValues;
            Bitmap patternMaxRightValues = theDef.mPatternMaxRightValues;
            Bitmap patternMinLeftValues  = theDef.mPatternMinLeftValues;
            Bitmap patternMaxLeftValues  = theDef.mPatternMaxLeftValues;

            if (patternMinDownValues == null || patternMaxDownValues == null)
            {
                throw new ArgumentException("Pattern to match isn't defined.");
            }

            if (mMarkedImage != null && sourceBitmap != null)
            {
                mMarkedImage.SetImage(new Bitmap(sourceBitmap));
                markedBitmap = mMarkedImage.Bitmap;
                TestExecution().LogMessageWithTimeFromTrigger("Created copy of image for markings");
            }

            score = 0;
            if (sourceBitmap != null)
            {
                // for LockBits see http://www.bobpowell.net/lockingbits.htm & http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623
                BitmapData sourceBitmapData = null;
                BitmapData patternMinDownValuesBitmapData  = null;
                BitmapData patternMaxDownValuesBitmapData  = null;
                BitmapData patternMinUpValuesBitmapData    = null;
                BitmapData patternMaxUpValuesBitmapData    = null;
                BitmapData patternMinRightValuesBitmapData = null;
                BitmapData patternMaxRightValuesBitmapData = null;
                BitmapData patternMinLeftValuesBitmapData  = null;
                BitmapData patternMaxLeftValuesBitmapData  = null;

                if (mScoreFilter != null)
                {
                    mScoreFilter.SetImageSize(mSourceImage.Bitmap.Width, mSourceImage.Bitmap.Height);
                }

                try
                {
                    sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_FORMAT);
                    patternMinDownValuesBitmapData  = patternMinDownValues.LockBits(new Rectangle(0, 0, patternMinDownValues.Width, patternMinDownValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMaxDownValuesBitmapData  = patternMaxDownValues.LockBits(new Rectangle(0, 0, patternMaxDownValues.Width, patternMaxDownValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMinUpValuesBitmapData    = patternMinUpValues.LockBits(new Rectangle(0, 0, patternMinUpValues.Width, patternMinUpValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMaxUpValuesBitmapData    = patternMaxUpValues.LockBits(new Rectangle(0, 0, patternMaxUpValues.Width, patternMaxUpValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMinRightValuesBitmapData = patternMinRightValues.LockBits(new Rectangle(0, 0, patternMinRightValues.Width, patternMinRightValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMaxRightValuesBitmapData = patternMaxRightValues.LockBits(new Rectangle(0, 0, patternMaxRightValues.Width, patternMaxRightValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMinLeftValuesBitmapData  = patternMinLeftValues.LockBits(new Rectangle(0, 0, patternMinLeftValues.Width, patternMinLeftValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    patternMaxLeftValuesBitmapData  = patternMaxLeftValues.LockBits(new Rectangle(0, 0, patternMaxLeftValues.Width, patternMaxLeftValues.Height), ImageLockMode.ReadOnly, PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_FORMAT);
                    if (markedBitmap != null)
                    {
                        markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb);
                    }
                    sourceStride = sourceBitmapData.Stride;
                    int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH);

                    int patternStride       = patternMinDownValuesBitmapData.Stride;
                    int patternStrideOffset = patternStride - (patternMinDownValuesBitmapData.Width * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH);

                    //Color color;
                    int grayValue;
                    int grayValue2;
                    threshhold        = mVariationThreshhold.ValueAsLong();
                    sloppiness        = mSloppiness.ValueAsDecimal() / 100.0;
                    minWindow         = Math.Max(1, mMinWindow.ValueAsLong());
                    scoreThreshold    = mScoreThreshold.ValueAsLong();
                    brightPixelFactor = mBrightPixelFactor.ValueAsDecimal();
                    darkPixelFactor   = mDarkPixelFactor.ValueAsDecimal();
                    int varSum;
                    int minVarForThisPixel;
                    int maxVarForThisPixel;

                    Point currentPoint = new Point(-1, -1);
                    int   lastX        = -1;
                    int   lastY        = -1;

                    int[] variationArray    = new int[PatternMatchOfGrayVariationDefinition.PixelsPerTest];
                    int   positionsUntested = 0;

                    mFirstAxisScores = new long[sourceBitmap.Width, sourceBitmap.Height];

                    unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623
                    {
                        byte *sourcePointer;
                        byte *sourcePointer2;

                        byte *patternMinValuesPointer;
                        byte *patternMaxValuesPointer;

                        TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing Y Axis");
                        mROI.GetFirstPointOnYAxis(mSourceImage, ref currentPoint);

                        while (currentPoint.X != -1 && currentPoint.Y != -1)
                        {
                            sourcePointer  = (byte *)sourceBitmapData.Scan0;                                                                                       // init to first byte of image
                            sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point
                            //color = Color.FromArgb(sourcePointer[3], , , ); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha
                            grayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]);                                         // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue).
                            // http://www.bobpowell.net/grayscale.htm
                            // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1

                            // check pixel above
                            sourcePointer2 = sourcePointer - sourceStride;                                                         // TODO: ensure y>0
                            grayValue2     = (int)(0.3 * sourcePointer2[2] + 0.59 * sourcePointer2[1] + 0.11 * sourcePointer2[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue).
                            varSum         = grayValue - grayValue2;                                                               // NOTE: using '=' to init varSum for this pixel

                            if (currentPoint.X != lastX || currentPoint.Y != lastY + 1)
                            {
                                // init variationArray
                                for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i)
                                {
                                    variationArray[i] = PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED;
                                }
                                if (positionsUntested > 0)
                                {
                                    // TODO: if this isn't 0, then mark untested pixels a certain color?
                                    // this should only happen when the ROI is less than PixelsPerTest high at a particular X value
                                    TestExecution().LogMessageWithTimeFromTrigger("WARNING: " + positionsUntested + " pixels were not tested above " + lastX + "," + lastY);
                                }
                                positionsUntested = 0;
                            }

                            // shift variationArray
                            for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1; ++i)
                            {
                                variationArray[i] = variationArray[i + 1];
                            }

                            // store most recent value
                            variationArray[PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1] = varSum;

                            if (variationArray[0] == PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED)
                            {
                                positionsUntested++;
                            }
                            else
                            {
                                int variationSum = 0;
                                // compute sum variation over X pixel transitions
                                for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i)
                                {
                                    variationSum += variationArray[i];
                                }

                                variationSum = Math.Max(-127, Math.Min(128, variationSum)); // make sure we stay within 1 byte (0..255)

                                // test pixel
                                patternMinValuesPointer  = (byte *)patternMinDownValuesBitmapData.Scan0;                                                                         // init to first byte of image
                                patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                minVarForThisPixel       = patternMinValuesPointer[0] - 127;

                                patternMaxValuesPointer  = (byte *)patternMaxDownValuesBitmapData.Scan0;                                                                         // init to first byte of image
                                patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                maxVarForThisPixel       = patternMaxValuesPointer[0] - 127;

                                TestPixel(currentPoint.X, currentPoint.Y, variationSum, minVarForThisPixel, maxVarForThisPixel, true);

                                if (positionsUntested > 0)
                                {
                                    // if we missed testing a pixel above us (because it was near an ROI or image top edge where there weren't pixels above it to compute from), we test them here computing in the opposite direction (up values vs down values)

                                    // current pixel - PixelsPerTest = -variationSum
                                    int testPositionY = currentPoint.Y - PatternMatchOfGrayVariationDefinition.PixelsPerTest;
                                    if (testPositionY < 0)
                                    {
                                        throw new ArgumentException("Fatal logic error in test 93420rf");
                                    }

                                    patternMinValuesPointer  = (byte *)patternMinUpValuesBitmapData.Scan0;                                                                          // init to first byte of image
                                    patternMinValuesPointer += (testPositionY * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                    minVarForThisPixel       = patternMinValuesPointer[0] - 127;

                                    patternMaxValuesPointer  = (byte *)patternMaxUpValuesBitmapData.Scan0;                                                                          // init to first byte of image
                                    patternMaxValuesPointer += (testPositionY * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                    maxVarForThisPixel       = patternMaxValuesPointer[0] - 127;

                                    TestPixel(currentPoint.X, testPositionY, -variationSum, minVarForThisPixel, maxVarForThisPixel, true);
                                    positionsUntested--;
                                }
                            }

                            lastX = currentPoint.X;
                            lastY = currentPoint.Y;
                            mROI.GetNextPointOnYAxis(mSourceImage, ref currentPoint);
                        }

                        TestExecution().LogMessageWithTimeFromTrigger("PatternMatch " + Name + " testing X Axis");
                        mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint);

                        while (currentPoint.X != -1 && currentPoint.Y != -1)
                        {
                            sourcePointer  = (byte *)sourceBitmapData.Scan0;                                                                                       // init to first byte of image
                            sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH); // adjust to current point
                            //color = Color.FromArgb(sourcePointer[3], sourcePointer[2], sourcePointer[1], sourcePointer[0]); // Array index 0 is blue, 1 is green, 2 is red, 0 is alpha
                            grayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]);                                         // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue).
                            // http://www.bobpowell.net/grayscale.htm
                            // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1

                            // check pixel behind
                            sourcePointer2 = sourcePointer - PatternMatchOfGrayVariationDefinition.TRAINING_PIXEL_BYTE_WIDTH;      // TODO: ensure y>0
                            grayValue2     = (int)(0.3 * sourcePointer2[2] + 0.59 * sourcePointer2[1] + 0.11 * sourcePointer2[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue).
                            varSum         = grayValue - grayValue2;                                                               // NOTE: using '=' to init varSum for this pixel

                            if (currentPoint.Y != lastY || currentPoint.X != lastX + 1)
                            {
                                // init variationArray
                                for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i)
                                {
                                    variationArray[i] = PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED;
                                }
                                if (positionsUntested > 0)
                                {
                                    // TODO: if this isn't 0, then mark untested pixels a certain color?
                                    // this should only happen when the ROI is less than PixelsPerTest high at a particular X value
                                    TestExecution().LogMessageWithTimeFromTrigger("WARNING: " + positionsUntested + " pixels were not tested behind " + lastX + "," + lastY);
                                }
                                positionsUntested = 0;
                            }

                            // shift variationArray
                            for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1; ++i)
                            {
                                variationArray[i] = variationArray[i + 1];
                            }

                            // store most recent value
                            variationArray[PatternMatchOfGrayVariationDefinition.PixelsPerTest - 1] = varSum;

                            if (variationArray[0] == PatternMatchOfGrayVariationDefinition.VALUE_NOT_DEFINED)
                            {
                                positionsUntested++;
                            }
                            else
                            {
                                int variationSum = 0;
                                // compute sum variation over X pixel transitions
                                for (int i = 0; i < PatternMatchOfGrayVariationDefinition.PixelsPerTest; ++i)
                                {
                                    variationSum += variationArray[i];
                                }

                                variationSum = Math.Max(-127, Math.Min(128, variationSum)); // make sure we stay within 1 byte (0..255)

                                // test pixel
                                patternMinValuesPointer  = (byte *)patternMinDownValuesBitmapData.Scan0;                                                                         // init to first byte of image
                                patternMinValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                minVarForThisPixel       = patternMinValuesPointer[0] - 127;

                                patternMaxValuesPointer  = (byte *)patternMaxDownValuesBitmapData.Scan0;                                                                         // init to first byte of image
                                patternMaxValuesPointer += (currentPoint.Y * patternStride) + (currentPoint.X * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                maxVarForThisPixel       = patternMaxValuesPointer[0] - 127;

                                TestPixel(currentPoint.X, currentPoint.Y, variationSum, minVarForThisPixel, maxVarForThisPixel, false);

                                if (positionsUntested > 0)
                                {
                                    // if we missed testing a pixel behind us (because it was near an ROI or image left edge where there weren't pixels behind it to compute from), we test them here computing in the opposite direction (left values vs right values)

                                    // current pixel - PixelsPerTest = -variationSum
                                    int testPositionX = currentPoint.X - PatternMatchOfGrayVariationDefinition.PixelsPerTest;
                                    if (testPositionX < 0)
                                    {
                                        throw new ArgumentException("Fatal logic error in test 93430rf");
                                    }

                                    patternMinValuesPointer  = (byte *)patternMinUpValuesBitmapData.Scan0;                                                                          // init to first byte of image
                                    patternMinValuesPointer += (currentPoint.Y * patternStride) + (testPositionX * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                    minVarForThisPixel       = patternMinValuesPointer[0] - 127;

                                    patternMaxValuesPointer  = (byte *)patternMaxUpValuesBitmapData.Scan0;                                                                          // init to first byte of image
                                    patternMaxValuesPointer += (currentPoint.Y * patternStride) + (testPositionX * PatternMatchOfGrayVariationDefinition.PATTERN_PIXEL_BYTE_WIDTH); // adjust to current point
                                    maxVarForThisPixel       = patternMaxValuesPointer[0] - 127;

                                    TestPixel(testPositionX, currentPoint.Y, -variationSum, minVarForThisPixel, maxVarForThisPixel, false);
                                    positionsUntested--;
                                }
                            }

                            lastX = currentPoint.X;
                            lastY = currentPoint.Y;
                            mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint);
                        }
                    } // end unsafe block
                }
                catch (Exception e)
                {
                    TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace);
                }
                finally
                {
                    mFirstAxisScores = null;

                    sourceBitmap.UnlockBits(sourceBitmapData);
                    patternMinDownValues.UnlockBits(patternMinDownValuesBitmapData);
                    patternMaxDownValues.UnlockBits(patternMaxDownValuesBitmapData);
                    patternMinUpValues.UnlockBits(patternMinUpValuesBitmapData);
                    patternMaxUpValues.UnlockBits(patternMaxUpValuesBitmapData);
                    patternMinRightValues.UnlockBits(patternMinRightValuesBitmapData);
                    patternMaxRightValues.UnlockBits(patternMaxRightValuesBitmapData);
                    patternMinLeftValues.UnlockBits(patternMinLeftValuesBitmapData);
                    patternMaxLeftValues.UnlockBits(patternMaxLeftValuesBitmapData);
                    if (markedBitmap != null)
                    {
                        markedBitmap.UnlockBits(markedBitmapData);
                    }
                }
            }

            if (mMarkedImage != null && mScoreFilter.Score > 0)
            {
                mScoreFilter.MarkImage(mMarkedImage.Bitmap, Color.Red);
            }

            mResult.SetValue(score);
            mResult.SetIsComplete();
            if (mMarkedImage != null)
            {
                mMarkedImage.SetIsComplete();
            }
            string msg = "PatternMatch " + Name + " completed; score=" + score;

            TestExecution().LogMessageWithTimeFromTrigger(msg);
            TestExecution().LogSummaryMessage(msg);

            if (score >= mAutoSaveOnScore || mScoreFilter.Score >= mAutoSaveOnCellScore)
            {
                try
                {
                    string filePath = ((PatternMatchOfGrayVariationDefinition)Definition()).AutoSavePath;
                    mSourceImage.Save(filePath, Name, true);
                    if (mMarkedImage != null)
                    {
                        mMarkedImage.Save(filePath, Name, "_marked_" + score + "_" + mScoreFilter.Score);
                    }
                    TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved");
                }
                catch (ArgumentException e)
                {
                    Project().Window().logMessage("ERROR: " + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(e.Message);
                }
                catch (Exception e)
                {
                    Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.  Low-level message=" + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.");
                }
            }
        }
        public static readonly int PIXEL_BYTE_WIDTH     = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm
        public override void DoWork()
        {
            /* TODO: OPTIMIZATIONS:
             * - compute surfaceNoiseLevel based on image analysis
             * - make debug output to log optional
             * - surface/transition decorations (biggest problem is that there can be a variable number...only for first edge to start?)
             * - for marked image, save decorations...don't copy/paint_on image
             */

            DateTime startTime = DateTime.Now;

            TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine);

            int resultX = -1;
            int resultY = -1;

            if (mSourceImage.Bitmap == null)
            {
                TestExecution().LogMessage("ERROR: source image for '" + Name + "' does not exist.");
            }
            else
            {
                Bitmap     sourceBitmap     = SourceImage.Bitmap;
                BitmapData sourceBitmapData = null;


                try
                {
                    sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT);
                    int sourceStride       = sourceBitmapData.Stride;
                    int sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH);

                    int brightnessThreshold = (int)mBrightnessThreshold.ValueAsLong();

                    Point currentPoint = new Point(-1, -1);
                    mROI.GetFirstPointOnXAxis(mSourceImage, ref currentPoint);

                    ValueGrouper xGrouper = new ValueGrouper(0, 255, 50);
                    ValueGrouper yGrouper = new ValueGrouper(0, 255, 50);
                    unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623
                    {
                        byte *sourcePointer;

                        while (currentPoint.X != -1 && currentPoint.Y != -1)
                        {
                            sourcePointer  = (byte *)sourceBitmapData.Scan0;                                                    // init to first byte of image
                            sourcePointer += (currentPoint.Y * sourceStride) + (currentPoint.X * PIXEL_BYTE_WIDTH);             // adjust to current point
                            pixelGrayValue = (int)(0.3 * sourcePointer[2] + 0.59 * sourcePointer[1] + 0.11 * sourcePointer[0]); // Then, add 30% of the red value, 59% of the green value, and 11% of the blue value, together. .... These percentages are chosen due to the different relative sensitivity of the normal human eye to each of the primary colors (less sensitive to green, more to blue).
                            // http://www.bobpowell.net/grayscale.htm
                            // https://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=440425&SiteID=1

                            //TestExecution().LogMessage(currentPoint.X + "," + currentPoint.Y + "  " + pixelGrayValue + " " + brightnessThreshold);
                            if (pixelGrayValue >= brightnessThreshold)
                            {
                                xGrouper.AddValue(currentPoint.X);
                                yGrouper.AddValue(currentPoint.Y);
                            }

                            mROI.GetNextPointOnXAxis(mSourceImage, ref currentPoint);
                        }
                        TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] finished analyzing pixels");
                    } // end unsafe block

                    for (int z = 0; z < xGrouper.NumGroups; z++)
                    {
                        ValueGrouper.GroupStats groupStats = xGrouper.GetGroup(z);
                        TestExecution().LogMessage(groupStats.start + " " + groupStats.end + " " + groupStats.count + " " + groupStats.Average());
                    }
                    for (int z = 0; z < yGrouper.NumGroups; z++)
                    {
                        ValueGrouper.GroupStats groupStats = yGrouper.GetGroup(z);
                        TestExecution().LogMessage(groupStats.start + " " + groupStats.end + " " + groupStats.count + " " + groupStats.Average());
                    }
                    ValueGrouper.GroupStats biggestXGroup = xGrouper.BiggestGroupWithNeighbors();
                    if (biggestXGroup != null)
                    {
                        resultX = biggestXGroup.Average();
                    }
                    ValueGrouper.GroupStats biggestYGroup = yGrouper.BiggestGroupWithNeighbors();
                    if (biggestXGroup != null)
                    {
                        resultY = biggestYGroup.Average();
                    }
                }
                catch (Exception e)
                {
                    TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace);
                }
                finally
                {
                    sourceBitmap.UnlockBits(sourceBitmapData);
                }
            } // end main block ("else" after all initial setup error checks)
            mBrightSpot_X.SetValue(resultX);
            mBrightSpot_Y.SetValue(resultY);
            mBrightSpot_X.SetIsComplete();
            mBrightSpot_Y.SetIsComplete();
            DateTime doneTime    = DateTime.Now;
            TimeSpan computeTime = doneTime - startTime;

            TestExecution().LogMessageWithTimeFromTrigger(Name + " computed bright spot at " + resultX + "," + resultY);

            if (mAutoSave)
            {
                try
                {
                    string filePath = ((FindBrightestSpotDefinition)Definition()).AutoSavePath;
                    mSourceImage.Save(filePath, Name, true);
                    TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved");
                }
                catch (ArgumentException e)
                {
                    Project().Window().logMessage("ERROR: " + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(e.Message);
                }
                catch (Exception e)
                {
                    Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.  Low-level message=" + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.");
                }
            }

            TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + "  | took " + computeTime.TotalMilliseconds + "ms");
        }
        public static readonly int PIXEL_BYTE_WIDTH     = 4; // determined by PixelFormat.Format32bppArgb; http://www.bobpowell.net/lockingbits.htm
        public override void DoWork()
        {
            DateTime startTime = DateTime.Now;

            TestExecution().LogMessageWithTimeFromTrigger("[" + Name + "] started at " + startTime + Environment.NewLine);

            int leftEdge   = -1;
            int rightEdge  = -1;
            int topEdge    = -1;
            int bottomEdge = -1;

            int    minObjectHeight            = -1;
            int    maxObjectHeight            = -1;
            int    minObjectWidth             = -1;
            int    maxObjectWidth             = -1;
            double allowedObjectSizeVariation = 0;

            if (mAllowedSizeVariation != null)
            {
                allowedObjectSizeVariation = mAllowedSizeVariation.ValueAsDecimal() / 100.0;
            }

            if (mExpectedObjectHeight != null)
            {
                minObjectHeight = (int)(mExpectedObjectHeight.ValueAsDecimal() * (1 - allowedObjectSizeVariation));
                maxObjectHeight = (int)(mExpectedObjectHeight.ValueAsDecimal() * (1 + allowedObjectSizeVariation));
            }
            if (mExpectedObjectWidth != null)
            {
                minObjectWidth = (int)(mExpectedObjectWidth.ValueAsDecimal() * (1 - allowedObjectSizeVariation));
                maxObjectWidth = (int)(mExpectedObjectWidth.ValueAsDecimal() * (1 + allowedObjectSizeVariation));
            }

            if (mMinObjectHeight != null)
            {
                minObjectHeight = (int)mMinObjectHeight.ValueAsLong();
            }
            if (mMinObjectWidth != null)
            {
                minObjectWidth = (int)mMinObjectWidth.ValueAsLong();
            }
            if (mMaxObjectHeight != null)
            {
                maxObjectHeight = (int)mMaxObjectHeight.ValueAsLong();
            }
            if (mMaxObjectWidth != null)
            {
                maxObjectWidth = (int)mMaxObjectWidth.ValueAsLong();
            }

            if (minObjectHeight < 0)
            {
                TestExecution().LogErrorWithTimeFromTrigger("A minimum height for the object hasn't been defined within '" + Name + "'.");
            }
            else if (maxObjectHeight < 0)
            {
                TestExecution().LogErrorWithTimeFromTrigger("A maximum height for the object hasn't been defined within '" + Name + "'.");
            }
            else if (minObjectWidth < 0)
            {
                TestExecution().LogErrorWithTimeFromTrigger("A minimum width for the object hasn't been defined within '" + Name + "'.");
            }
            else if (maxObjectWidth < 0)
            {
                TestExecution().LogErrorWithTimeFromTrigger("A maximum width for the object hasn't been defined within '" + Name + "'.");
            }
            else if (mPrerequisite != null && !mPrerequisite.ValueAsBoolean())
            {
                TestExecution().LogMessageWithTimeFromTrigger(Name + ": prerequisites not met.  Skipping.");
            }
            else if (mSourceImage.Bitmap == null)
            {
                TestExecution().LogErrorWithTimeFromTrigger("Source image for '" + Name + "' does not exist.");
            }
            else
            {
                int searchXStep = Math.Max(1, (int)(minObjectWidth * 0.3));
                int searchYStep = Math.Max(1, (int)(minObjectHeight * 0.3));
                int startX      = (int)mROI.Left + searchXStep;
                int startY      = (int)mROI.Top + searchYStep;

                if (startX < 0 || startX >= mSourceImage.Bitmap.Width || startY < 0 || startY >= mSourceImage.Bitmap.Height)
                {
                    TestExecution().LogErrorWithTimeFromTrigger("Start position for '" + Name + "' isn't within the image bounds; start=" + startX + "," + startY + "; image size=" + mSourceImage.Bitmap.Width + "x" + mSourceImage.Bitmap.Height);
                }
                else
                {
                    int  stepSize            = (int)mStepSize.ValueAsLong();
                    bool detailedSearchAtEnd = mDetailedSearch.ValueAsBoolean();

                    sourceBitmap = SourceImage.Bitmap;
                    if (mImageMarkingEnabled.ValueAsBoolean() && mImageToMark != null && mImageToMark.Bitmap != null)
                    {
                        markedBitmap = mImageToMark.Bitmap;
                    }

                    // TODO: replace LockBits implementation with array pointer

                    try
                    {
                        sourceBitmapData = sourceBitmap.LockBits(new Rectangle(0, 0, sourceBitmap.Width, sourceBitmap.Height), ImageLockMode.ReadOnly, PIXEL_FORMAT);
                        if (markedBitmap != null)
                        {
                            markedBitmapData = markedBitmap.LockBits(new Rectangle(0, 0, markedBitmap.Width, markedBitmap.Height), ImageLockMode.ReadWrite, PIXEL_FORMAT);
                        }
                        sourceStride       = sourceBitmapData.Stride;
                        sourceStrideOffset = sourceStride - (sourceBitmapData.Width * PIXEL_BYTE_WIDTH);

                        unsafe // see http://www.codeproject.com/csharp/quickgrayscale.asp?df=100&forumid=293759&select=2214623&msg=2214623
                        {
                            byte *sourcePointer;
                            byte *markedPointer;

                            if (mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(0) < sourceBitmap.Width || mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(1) < sourceBitmap.Height)
                            {
                                mFindBlobOfSizeAndColorDefinition.SearchRecord   = new short[sourceBitmap.Width, sourceBitmap.Height];
                                mFindBlobOfSizeAndColorDefinition.LastMarkerUsed = 0;
                            }
                            if (mFindBlobOfSizeAndColorDefinition.LastMarkerUsed == short.MaxValue)
                            {
                                short initialValue = short.MinValue + 1; // we don't use short.MinValue since that is a special case (see ClearSearchRecordArea(); before switching from int to short, we were initializing to 0 here and -1 in ClearSearchRecordArea())
                                for (int x = 0; x < mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(0); x++)
                                {
                                    for (int y = 0; y < mFindBlobOfSizeAndColorDefinition.SearchRecord.GetLength(1); y++)
                                    {
                                        mFindBlobOfSizeAndColorDefinition.SearchRecord[x, y] = initialValue;
                                    }
                                }
                                mFindBlobOfSizeAndColorDefinition.LastMarkerUsed = 0;
                            }
                            mFindBlobOfSizeAndColorDefinition.LastMarkerUsed++;

                            for (int x = startX; x < ROI.Right && leftEdge < 0; x += searchXStep)
                            {
                                for (int y = startY; y < ROI.Bottom && leftEdge < 0; y += searchYStep)
                                {
                                    if (markedBitmap != null)
                                    {
                                        markedPointer    = (byte *)markedBitmapData.Scan0;              // init to first byte of image
                                        markedPointer   += (y * sourceStride) + (x * PIXEL_BYTE_WIDTH); // adjust to current point
                                        markedPointer[3] = Color.Lime.A;
                                        markedPointer[2] = Color.Lime.R;
                                        markedPointer[1] = Color.Lime.G;
                                        markedPointer[0] = Color.Lime.B;
                                    }

                                    bool failed = false;
                                    sourcePointer  = (byte *)sourceBitmapData.Scan0;              // init to first byte of image
                                    sourcePointer += (y * sourceStride) + (x * PIXEL_BYTE_WIDTH); // adjust to current point

                                    Color theColor = Color.FromArgb(sourcePointer[2], sourcePointer[1], sourcePointer[0]);
                                    if (mColorMatchDefinition.Matches(theColor))
                                    {
                                        TestExecution().LogMessageWithTimeFromTrigger(Name + ": found match at " + x + "," + y + "; beginning search of area");

                                        EdgeSearch topEdgeSearch    = new EdgeSearch(this, mColorMatchDefinition, Axis.X, y, -1 * stepSize, mROI.Top, x, mROI.Left, mROI.Right, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed);
                                        EdgeSearch bottomEdgeSearch = new EdgeSearch(this, mColorMatchDefinition, Axis.X, y, +1 * stepSize, mROI.Bottom, x, mROI.Left, mROI.Right, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed);
                                        EdgeSearch leftEdgeSearch   = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, x, -1 * stepSize, mROI.Left, y, mROI.Top, mROI.Bottom, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed);
                                        EdgeSearch rightEdgeSearch  = new EdgeSearch(this, mColorMatchDefinition, Axis.Y, x, +1 * stepSize, mROI.Right, y, mROI.Top, mROI.Bottom, mFindBlobOfSizeAndColorDefinition.SearchRecord, mFindBlobOfSizeAndColorDefinition.LastMarkerUsed);
                                        topEdgeSearch.minSideEdge     = leftEdgeSearch;
                                        topEdgeSearch.maxSideEdge     = rightEdgeSearch;
                                        topEdgeSearch.opposingEdge    = bottomEdgeSearch;
                                        bottomEdgeSearch.minSideEdge  = leftEdgeSearch;
                                        bottomEdgeSearch.maxSideEdge  = rightEdgeSearch;
                                        bottomEdgeSearch.opposingEdge = topEdgeSearch;
                                        leftEdgeSearch.minSideEdge    = topEdgeSearch;
                                        leftEdgeSearch.maxSideEdge    = bottomEdgeSearch;
                                        leftEdgeSearch.opposingEdge   = rightEdgeSearch;
                                        rightEdgeSearch.minSideEdge   = topEdgeSearch;
                                        rightEdgeSearch.maxSideEdge   = bottomEdgeSearch;
                                        rightEdgeSearch.opposingEdge  = leftEdgeSearch;
                                        topEdgeSearch.maxSize         = maxObjectHeight;
                                        bottomEdgeSearch.maxSize      = maxObjectHeight;
                                        leftEdgeSearch.maxSize        = maxObjectWidth;
                                        rightEdgeSearch.maxSize       = maxObjectWidth;

                                        do
                                        {
                                            if (!topEdgeSearch.Done())
                                            {
                                                topEdgeSearch.TestLine();
                                            }
                                            if (!bottomEdgeSearch.Done())
                                            {
                                                bottomEdgeSearch.TestLine();
                                            }
                                            if (!leftEdgeSearch.Done())
                                            {
                                                leftEdgeSearch.TestLine();
                                            }
                                            if (!rightEdgeSearch.Done())
                                            {
                                                rightEdgeSearch.TestLine();
                                            }
                                            if (bottomEdgeSearch.lastPosWhereObjectSeen - topEdgeSearch.lastPosWhereObjectSeen > maxObjectHeight)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because y-axis size exceeded; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                            if (rightEdgeSearch.lastPosWhereObjectSeen - leftEdgeSearch.lastPosWhereObjectSeen > maxObjectWidth)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because x-axis size exceeded; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                            if (rightEdgeSearch.lastPosWhereObjectSeen == mROI.Right)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into right edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                            if (leftEdgeSearch.lastPosWhereObjectSeen == mROI.Left)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into left edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                            if (topEdgeSearch.lastPosWhereObjectSeen == mROI.Top)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into top edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                            if (bottomEdgeSearch.lastPosWhereObjectSeen == mROI.Bottom)
                                            {
                                                TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because ran into bottom edge of ROI; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                                failed = true;
                                                break;
                                            }
                                        } while (!(topEdgeSearch.Done() && bottomEdgeSearch.Done() && leftEdgeSearch.Done() && rightEdgeSearch.Done()));

                                        if (detailedSearchAtEnd)
                                        {
                                            //topEdgeSearch.mStep
                                            //TODO: finish
                                            //TODO: recheck if object too big
                                        }

                                        if (leftEdgeSearch.abort || rightEdgeSearch.abort || topEdgeSearch.abort || bottomEdgeSearch.abort)
                                        {
                                            TestExecution().LogMessageWithTimeFromTrigger(Name + ": aborting area search because an edge search aborted (probably ran into an already searched pixel); top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                            failed = true;
                                        }
                                        if (bottomEdgeSearch.lastPosWhereObjectSeen - topEdgeSearch.lastPosWhereObjectSeen < minObjectHeight)
                                        {
                                            TestExecution().LogMessageWithTimeFromTrigger(Name + ": excluding object since size too small on y-axis; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                            failed = true;

                                            // if the object/blob was too small, then if we re-check one of the pixels during a future search we don't want to abort on the assumption the object must be too big
                                            // ...this issue came up in Head Rest's Weld Orrient 9/18/08...in certain cases we would first find a small blob to the left of the weld, but would abort because it was too small...it's edges would be close to, but below the search color boundary.  Then we would find the main chunk of the light, which would wrap partially around the small blob (to the right and below)....during it's not-so-smart-but-fast search it would retest a pixel of the small blob and immediately abort...mistakenly assuming it bumped into a previous "too large" blob.
                                            // ...it could bump into a previous "too small" blob because of the way to search within the entire bounding rectangle...as a simplified method of catching "U" or "Z" shaped blobs (ie ones that double back)
                                            ClearSearchRecordArea(leftEdgeSearch.lastPosWhereObjectSeen, rightEdgeSearch.lastPosWhereObjectSeen, topEdgeSearch.lastPosWhereObjectSeen, bottomEdgeSearch.lastPosWhereObjectSeen);
                                        }
                                        else if (rightEdgeSearch.lastPosWhereObjectSeen - leftEdgeSearch.lastPosWhereObjectSeen < minObjectWidth)
                                        {
                                            TestExecution().LogMessageWithTimeFromTrigger(Name + ": excluding object since size too small on x-axis; top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                            failed = true;

                                            // if the object/blob was too small, then if we re-check one of the pixels during a future search we don't want to abort on the assumption the object must be too big
                                            // ...this issue came up in Head Rest's Weld Orrient 9/18/08...in certain cases we would first find a small blob to the left of the weld, but would abort because it was too small...it's edges would be close to, but below the search color boundary.  Then we would find the main chunk of the light, which would wrap partially around the small blob (to the right and below)....during it's not-so-smart-but-fast search it would retest a pixel of the small blob and immediately abort...mistakenly assuming it bumped into a previous "too large" blob.
                                            // ...it could bump into a previous "too small" blob because of the way to search within the entire bounding rectangle...as a simplified method of catching "U" or "Z" shaped blobs (ie ones that double back)
                                            ClearSearchRecordArea(leftEdgeSearch.lastPosWhereObjectSeen, rightEdgeSearch.lastPosWhereObjectSeen, topEdgeSearch.lastPosWhereObjectSeen, bottomEdgeSearch.lastPosWhereObjectSeen);
                                        }
                                        if (!failed)
                                        {
                                            TestExecution().LogMessageWithTimeFromTrigger(Name + ": selected object bounded by: top=" + topEdgeSearch.lastPosWhereObjectSeen + " bottom=" + bottomEdgeSearch.lastPosWhereObjectSeen + " left=" + leftEdgeSearch.lastPosWhereObjectSeen + " right=" + rightEdgeSearch.lastPosWhereObjectSeen);
                                            leftEdge   = leftEdgeSearch.lastPosWhereObjectSeen;
                                            rightEdge  = rightEdgeSearch.lastPosWhereObjectSeen;
                                            topEdge    = topEdgeSearch.lastPosWhereObjectSeen;
                                            bottomEdge = bottomEdgeSearch.lastPosWhereObjectSeen;
                                        }
                                    }
                                }
                            }
                        } // end unsafe block
                    }
                    catch (Exception e)
                    {
                        TestExecution().LogMessageWithTimeFromTrigger("ERROR: Failure in " + Name + "; msg=" + e.Message + " " + Environment.NewLine + e.StackTrace);
                    }
                    finally
                    {
                        sourceBitmap.UnlockBits(sourceBitmapData);
                        if (markedBitmap != null)
                        {
                            markedBitmap.UnlockBits(markedBitmapData);
                        }
                    }
                }
            } // end main block ("else" after all initial setup error checks)
            mLeftBound.SetValue(leftEdge);
            mLeftBound.SetIsComplete();
            mRightBound.SetValue(rightEdge);
            mRightBound.SetIsComplete();
            mTopBound.SetValue(topEdge);
            mTopBound.SetIsComplete();
            mBottomBound.SetValue(bottomEdge);
            mBottomBound.SetIsComplete();
            DateTime doneTime    = DateTime.Now;
            TimeSpan computeTime = doneTime - startTime;

            if (leftEdge < 0)
            {
                TestExecution().LogMessageWithTimeFromTrigger(Name + " FAILED TO FIND BLOB");
            }

            if (mAutoSave)
            {
                try
                {
                    string filePath = ((FindRadialLineDefinition)Definition()).AutoSavePath;
                    mSourceImage.Save(filePath, Name, true);
                    if (mImageToMark != null)
                    {
                        mImageToMark.Save(filePath, Name, "_marked_" + leftEdge + "_" + rightEdge + "_" + topEdge + "_" + bottomEdge);
                    }
                    TestExecution().LogMessageWithTimeFromTrigger("Snapshot saved");
                }
                catch (ArgumentException e)
                {
                    Project().Window().logMessage("ERROR: " + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger(e.Message);
                }
                catch (Exception e)
                {
                    Project().Window().logMessage("ERROR: Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.  Low-level message=" + e.Message);
                    TestExecution().LogErrorWithTimeFromTrigger("Unable to AutoSave snapshot from " + Name + ".  Ensure path valid and disk not full.");
                }
            }
            TestExecution().LogMessageWithTimeFromTrigger(Name + " finished at " + doneTime + "  | took " + computeTime.TotalMilliseconds + "ms");
        }