Esempio n. 1
0
        private void CompareImages(Bitmap masterImage, Bitmap capturedImage, string toleranceFile)
        {
            // Create Comparator
            ImageComparator comparator = new ImageComparator();

            comparator.ChannelsInUse = ChannelCompareMode.ARGB;

            // Set tolerance if one is provided.
            if (!string.IsNullOrEmpty(toleranceFile))
            {
                _suite.printStatus("VisualScan: Loading tolerance.");
                comparator.Curve.CurveTolerance.LoadTolerance(toleranceFile);
            }

            // Perform Comparison.
            _suite.printStatus("VisualScan: Comparing images.");
            if (comparator.Compare(new ImageAdapter(masterImage), new ImageAdapter(capturedImage), false))
            {
                _suite.printStatus("VisualScan: Succeeded.");
            }
            else
            {
                _suite.printStatus("VisualScan: Failed.");
                GenerateAndWriteDebuggingOutput(comparator, masterImage, capturedImage);
                _suite.failTest("Captured visual is different from Master.");
            }
        }
Esempio n. 2
0
        /// <summary>
        /// The scanner attemps to locate content assuming that:
        ///   contentOff is the imagaeadapter of the scene before content is shown
        ///   contentOn  is the imagaeadapter of the scene with the content shown
        /// Note: content may have a variable alpha map
        /// </summary>
        public Result Process(IImageAdapter content, IImageAdapter contentOff, IImageAdapter contentOn, double[] transformMatrix)
        {
            string paramEx = string.Empty;

            if (content == null)
            {
                paramEx = "<IImageAdapter content> ";
            }
            if (contentOff == null)
            {
                paramEx += "<IImageAdapter contentOff>";
            }
            if (contentOn == null)
            {
                paramEx += "<IImageAdapter contentOn>";
            }
            if (content == null)
            {
                paramEx += "<IImageAdapter content>";
            }
            if (transformMatrix == null)
            {
                paramEx += "<double []transformMatrix>";
            }

/*
 *                  if (transformMatrix.Length != Interpolator.TransformLength)
 *                  {
 *                      throw new Exception("Transform array length != " + Interpolator.TransformLength);
 *                  }
 */
            //sentinel
            bool         validResult = true;
            string       log         = string.Empty;
            ImageAdapter res         = GetTransformedContent(content, transformMatrix, contentOff.Width, contentOff.Height);

            // locate rec
            //  1) compute the diff image
            //  2) synthetise the transformed content
            //  3) compare with the sourceOn
            ImageComparator imageComparator = new ImageComparator();

            imageComparator.Compare(contentOn, contentOff);
            IImageAdapter imdiff = imageComparator.GetErrorDifference(ErrorDifferenceType.FilterEdge);

            // make sure there is only one descriptor and the background.
            //convert the diff image to a binary image (black-bg and white-fg)
            ImageAdapter imbinmap = new ImageAdapter(imdiff.Width, imdiff.Height);
            IColor       black    = new ColorByte(Color.Black);
            IColor       white    = new ColorByte(Color.White);

            for (int j = 0; j < imbinmap.Width; j++)
            {
                for (int i = 0; i < imbinmap.Height; i++)
                {
                    if (imdiff[j, i].Red + imdiff[j, i].Green + imdiff[j, i].Blue > 1e-6)
                    {
                        imdiff[j, i] = white;
                    }
                    else
                    {
                        imdiff[j, i] = black;
                    }
                }
            }
#if DEBUG
            ImageUtility.ToImageFile(imdiff, "bwmap.png", ImageFormat.Png);
#endif
            //Analyze the bin-diff-image
            VScan lvsc = new VScan(imdiff);
            lvsc.OriginalData.Analyze();

            //topological check
            //root nodes: either a pair of white and black descriptor
            //or a tree.
            // all further descendant must be children of the white cell

/*
 *                  int[] descriptorCounter = new int[2];
 *
 *                  //loop on the descriptors
 *                  foreach (IDescriptor desc in lvsc.OriginalData.Descriptors.ActiveDescriptors)
 *                  {
 *                      if (desc.Depth <= 1)
 *                      {
 *                          descriptorCounter[desc.Depth]++;
 *                      }
 *
 *                      Console.WriteLine("Descr " + desc.BoundingBox + "   " + desc.Depth);
 *                  }
 *
 *                  //check
 *                  int summ = descriptorCounter[0] + descriptorCounter[1];
 *                  if (summ != 2)
 *                  {
 *                      validResult = false;
 *                      if (summ == 0)
 *                      {
 *                          log = "<Fail> No top level descriptors found";
 *                      }
 *                      else
 *                      {
 *                          log = "<Fail> Too many top level descriptors found (should be two) :" + summ;
 *                      }
 *                  }
 */
            // topology is good to go, time to find the bounding box of the dynamic content
            int minx = int.MaxValue;
            int miny = int.MaxValue;
            int maxx = int.MinValue;
            int maxy = int.MinValue;
            if (validResult == true)
            {
                for (int j = 0; j < imdiff.Width; j++)
                {
                    for (int i = 0; i < imdiff.Height; i++)
                    {
                        double sum = imdiff[j, i].Blue + imdiff[j, i].Red + imdiff[j, i].Green;
                        if (sum > 1e-6)
                        {
                            if (j < minx)
                            {
                                minx = j;
                            }
                            if (i < miny)
                            {
                                miny = i;
                            }
                            if (j > maxx)
                            {
                                maxx = j;
                            }
                            if (i > maxy)
                            {
                                maxy = i;
                            }
                        }
                    }
                }

                // bounding box
                maxx -= minx;
                maxy -= miny;
                Console.WriteLine("<Target> found at " + minx + " " + miny + "  " + maxx + " " + maxy);
                ImageUtility.ToImageFile(imdiff, "Recpos.png");

                // synthetize content into contentOff
                IImageAdapter iafcomp = new ImageAdapter(contentOff.Width, contentOff.Height);
                double        dx      = minx - _imageAdapterXmin;
                double        dy      = miny - _imageAdapterYmin;

                // translate results
                for (int j = 0; j < 4; j++)
                {
                    _contentLocation[j, 0] += dx;
                    _contentLocation[j, 1] += dy;
                }

                // copy the background
                iafcomp = (IImageAdapter)contentOff.Clone();

                // add the transformed content
                for (int j = 0; j < res.Width; j++)
                {
                    for (int i = 0; i < res.Height; i++)
                    {
                        if (j + minx < iafcomp.Width && i + miny < iafcomp.Height)
                        {
                            if (res[j, i].Alpha > 0)
                            {
                                double lalp = res[j, i].Alpha;
                                int    jid  = j + minx;
                                int    iid  = i + miny;
                                IColor lvc  = iafcomp[jid, iid];
                                lvc.Red           = lalp * res[j, i].Red + (1 - lalp) * iafcomp[jid, iid].Red;
                                lvc.Green         = lalp * res[j, i].Green + (1 - lalp) * iafcomp[jid, iid].Green;
                                lvc.Blue          = lalp * res[j, i].Blue + (1 - lalp) * iafcomp[jid, iid].Blue;
                                iafcomp[jid, iid] = lvc;
                            }
                        }
                    }
                }
#if DEBUG
                ImageUtility.ToImageFile(iafcomp, "SynthGlyph.png", ImageFormat.Png);
#endif

/*
 *                      if (Tolerance != null)
 *                      {
 *                          imageComparator.Tolerance.Clear();
 *                          double x = double.NaN;
 *                          double y = double.NaN;
 *                          for(int t=0;t<Tolerance.Count;t++)
 *                          {
 *                              x = (double)Tolerance.GetKey(t);
 *                              y = (double)Tolerance[x];
 *                              imageComparator.Tolerance.Add(x,y);
 *                          }
 *                      }
 */
                validResult = imageComparator.Compare(iafcomp, contentOn);
                string toluse = "No Tolerance used - strict comparison";
                if (_tolerance != null)
                {
                    toluse = "with the given Tolerance";
                }
                if (validResult == false)
                {
                    log = "<Fail> Computed Content does not match actual content -- " + toluse;
                }
                else
                {
                    log = "<Pass>";
                }

                Console.WriteLine("Final comp pass " + validResult);
                ImageUtility.ToImageFile(imageComparator.GetErrorDifference(ErrorDifferenceType.FilterEdge), "SynthError.png");
#if DEBUG
                using (Bitmap fbmp = ImageUtility.ToBitmap(contentOn))
                {
                    using (Graphics graphics = Graphics.FromImage(fbmp))
                    {
                        using (Brush brush = new SolidBrush(Color.FromArgb(40, 255, 0, 0)))
                        {
                            graphics.FillRectangle(brush, minx, miny, maxx, maxy);
                        }
                        using (Pen pen = new Pen(Color.Red, 2))
                        {
                            graphics.DrawRectangle(pen, minx, miny, maxx, maxy);
                        }
                        using (Font fnt = new Font("Arial", 10))
                        {
                            SizeF sz = graphics.MeasureString("TL", fnt);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[0, 0], (float)_contentLocation[0, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[1, 0], (float)_contentLocation[1, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[2, 0], (float)_contentLocation[2, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[3, 0], (float)_contentLocation[3, 1], sz.Width, sz.Height);
                            graphics.DrawString("TL", fnt, Brushes.Red, (float)_contentLocation[0, 0], (float)_contentLocation[0, 1]);
                            graphics.DrawString("TR", fnt, Brushes.Red, (float)_contentLocation[1, 0], (float)_contentLocation[1, 1]);
                            graphics.DrawString("BL", fnt, Brushes.Red, (float)_contentLocation[2, 0], (float)_contentLocation[2, 1]);
                            graphics.DrawString("BR", fnt, Brushes.Red, (float)_contentLocation[3, 0], (float)_contentLocation[3, 1]);
                        }
                        fbmp.Save("TrackMatch.png");
                    }
                }
#endif
            }
            Result dcres = new Result(validResult, log, _contentLocation);

            return(dcres);
        }
Esempio n. 3
0
        /******************************************************************************
        * Function:         Verify
        ******************************************************************************/
        /// <summary>
        /// Side by Side verification call to perform verifcation for a specific time
        /// </summary>
        public bool Verify(double curTime)
        {
            // if there is no window registered, we can't proceede
            if (!windowRegistered)
            {
                verboseLog += "\n COULD NOT DO VISUAL VALIDATION!!!!!!!!!!! No Window was specified. Defaulting to ValuesOnlyVerify. For VisualValidation, use the following constructor: SideBySideVerifier(Window win) ";
                return(ValuesOnlyVerify(curTime));
            }

            bool passResult    = true;
            bool valuesCheckOK = true;

            // Snapshot

            System.Threading.Thread.Sleep(150);
            this.aniWin.Title = windowTitle;

            System.Drawing.Bitmap animatedCapture = ImageUtility.CaptureScreen(hWnd, true);

            // Set BaseValue to AnimationCalculator Value
            verboseLog = "\n Side by Side Verifier for time " + curTime;

            if ((registeredAnimatableCount == 0) && (registeredUIElementCount == 0))
            {
                verboseLog += " No animations could be registered - nothing to verify";
                return(false);
            }


            // walk all registered animatables, obtain expected and actual values and compare
            for (int i = 0; i < registeredAnimatableCount; i++)
            {
                object           expectedValue = myValidator.Verify(registeredAnimatableObjs[i].animClock, registeredAnimatableObjs[i].baseValue, curTime);
                DependencyObject current       = registeredAnimatableObjs[i].animObj;
                object           currentValue  = current.GetValue(registeredAnimatableObjs[i].animProp);

                verboseLog += "\n" + current.GetType().ToString().Substring(current.GetType().ToString().LastIndexOf(".") + 1);
                verboseLog += " " + registeredAnimatableObjs[i].animProp.ToString().Substring(registeredAnimatableObjs[i].animProp.ToString().LastIndexOf(".") + 1) + " Anim:: prog: ";
                verboseLog += registeredAnimatableObjs[i].animClock.CurrentProgress.ToString() + "   exp : " + expectedValue.ToString() + " act: " + currentValue.ToString();

                if (!myValidator.WithinTolerance(currentValue, expectedValue, this.toleranceInPercent))
                {
                    passResult = false; verboseLog += " <-----"; valuesCheckOK = false;
                }

                current.SetValue(registeredAnimatableObjs[i].animProp, expectedValue);
                ((System.Windows.Media.Animation.Animatable)current).ApplyAnimationClock(registeredAnimatableObjs[i].animProp, null);
            }


            // walk all registered UI Elements, obtain expected and actual values and compare
            for (int i = 0; i < registeredUIElementCount; i++)
            {
                object    expectedValue = myValidator.Verify(registeredUIElementObjs[i].animClock, registeredUIElementObjs[i].baseValue, curTime);
                object    currentValue  = registeredUIElementObjs[i].animObj.GetValue(registeredUIElementObjs[i].animProp);
                UIElement current       = registeredUIElementObjs[i].animObj;


                verboseLog += "\n" + current.GetType().ToString().Substring(current.GetType().ToString().LastIndexOf(".") + 1);
                verboseLog += registeredUIElementObjs[i].animProp.ToString().Substring(registeredUIElementObjs[i].animProp.ToString().LastIndexOf(".") + 1) + " Anim:: prog: ";
                verboseLog += registeredUIElementObjs[i].animClock.CurrentProgress.ToString() + "   exp : " + expectedValue.ToString() + " act: " + currentValue.ToString();


                // check to make sure the values match. If they do, setting valuesCheckOk to
                if (!myValidator.WithinTolerance(currentValue, expectedValue, this.toleranceInPercent))
                {
                    passResult = false; verboseLog += " <-----"; valuesCheckOK = false;
                }

                current.SetValue(registeredUIElementObjs[i].animProp, expectedValue);
                current.ApplyAnimationClock(registeredUIElementObjs[i].animProp, null);
            }

            // obviously, if the calculated and actual values do not match, there is no need for visual validation
            if (valuesCheckOK)
            {
                // Snapshot
                System.Drawing.Bitmap staticCapture = ImageUtility.CaptureScreen(hWnd, true);

                // Visual Verification
                passResult = imageCompare.Compare(new ImageAdapter(staticCapture), new ImageAdapter(animatedCapture));
                if (!passResult)
                {
                    verboseLog += "\n The comparison has failed. Anim_" + curTime.ToString() + ".bmp  and  Static_" + curTime.ToString() + ".bmp have been written out";
                    animatedCapture.Save("Anim_" + curTime.ToString() + ".bmp");
                    staticCapture.Save("Static_" + curTime.ToString() + ".bmp");
                }
            }
            else
            {
                verboseLog += "\n VISUAL VALIDATION SKIPPED!!!!!!!!!!! The values did not match, no sense comparing";
                passResult  = false;
            }

            // Now lets put the animations back
            for (int i = 0; i < registeredAnimatableCount; i++)
            {
                DependencyObject current = registeredAnimatableObjs[i].animObj;
                current.SetValue(registeredAnimatableObjs[i].animProp, registeredAnimatableObjs[i].baseValue);
                ((System.Windows.Media.Animation.Animatable)current).ApplyAnimationClock(registeredAnimatableObjs[i].animProp, registeredAnimatableObjs[i].animClock);
            }
            for (int i = 0; i < registeredUIElementCount; i++)
            {
                UIElement current = registeredUIElementObjs[i].animObj;
                current.SetValue(registeredUIElementObjs[i].animProp, registeredUIElementObjs[i].baseValue);
                current.ApplyAnimationClock(registeredUIElementObjs[i].animProp, registeredUIElementObjs[i].animClock);
            }

            return(passResult);
        }
Esempio n. 4
0
        private void DoVscanCompare(object asyncData)
        {
            AsyncData data = asyncData as AsyncData;

            if (data == null)
            {
                throw new ArgumentException("Parameter passed in to the Method not of type AsyncData (or null)", "asyncData");
            }

            ImageComparator ic = new ImageComparator();

            ic.Curve.CurveTolerance.LoadTolerance(data.ToleranceSettings.XmlNodeTolerance);

            IImageAdapter masterAdapter   = new ImageAdapter(data.MasterImage);
            IImageAdapter capturedAdapter = new ImageAdapter(data.CapturedImage);

            // compare Master to the Capture image using the Compare overload that will scale the images size accounting for the DPI
            data.Result.Succeeded = ic.Compare(masterAdapter, MetadataInfoHelper.GetDpi(masterAdapter), capturedAdapter, MetadataInfoHelper.GetDpi(capturedAdapter), false);
            if (data.Result.Succeeded == false)
            {
                Microsoft.Test.Logging.GlobalLog.LogStatus("Regular comparison failed");
            }
            // On filaure, check if user whats to filter the image ( IgnoreAntiAliasing will do )
            IImageAdapter masterFiltered  = null;
            IImageAdapter captureFiltered = null;

            if (data.Result.Succeeded == false && data.ToleranceSettings.Filter != null)
            {
                // first save error diff image
                string errorDiffName = ".\\ErrorDiff_" + data.Index + IMAGE_EXTENSION;
                ImageUtility.ToImageFile(ic.GetErrorDifference(ErrorDifferenceType.IgnoreAlpha), errorDiffName);
                Microsoft.Test.Logging.GlobalLog.LogFile(errorDiffName);

                // Compare failed, filter the images and retry
                Microsoft.Test.Logging.GlobalLog.LogStatus("Filtering and recompare");
                masterFiltered        = data.ToleranceSettings.Filter.Process(masterAdapter);
                captureFiltered       = data.ToleranceSettings.Filter.Process(capturedAdapter);
                data.Result.Succeeded = ic.Compare(masterFiltered, captureFiltered, false);
                if (data.Result.Succeeded == false)
                {
                    Microsoft.Test.Logging.GlobalLog.LogStatus("==> Filtered comparison failed as well");
                }
            }

            if (data.Result.Succeeded)
            {
                Microsoft.Test.Logging.GlobalLog.LogStatus("Comparison SUCCEEDED.");
            }
            else
            {
                // Save Masters * filtered master for easy analysis
                string masterName = ".\\Master_" + data.Index + IMAGE_EXTENSION;
                data.MasterImage.Save(masterName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(masterName);
                if (masterFiltered != null)
                {
                    string filteredMasterName = ".\\MasterFiltered_" + data.Index + IMAGE_EXTENSION;
                    using (Bitmap filteredMaster = ImageUtility.ToBitmap(masterFiltered))
                    {
                        SetMetadataToImage(filteredMaster);
                        filteredMaster.Save(filteredMasterName, System.Drawing.Imaging.ImageFormat.Tiff);
                    }
                    Microsoft.Test.Logging.GlobalLog.LogFile(filteredMasterName);
                }

                // Save rendered image (as "Actual_n") for easy analysis
                string capturedName = ".\\Actual_" + data.Index + IMAGE_EXTENSION;
                data.CapturedImage.Save(capturedName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(capturedName);
                // Save actual filtered for easy analysis
                if (captureFiltered != null)
                {
                    string filteredRenderedName = ".\\ActualFiltered_" + data.Index + IMAGE_EXTENSION;
                    using (Bitmap filteredRendered = ImageUtility.ToBitmap(captureFiltered))
                    {
                        SetMetadataToImage(filteredRendered);
                        filteredRendered.Save(filteredRenderedName, System.Drawing.Imaging.ImageFormat.Tiff);
                    }
                    Microsoft.Test.Logging.GlobalLog.LogFile(filteredRenderedName);
                }

                // Master might need to be updated, save with correct name and metadata
                //
                // In this image, encode full criteria
                string name         = System.IO.Path.GetFileName(data.MasterName);
                string originalName = name.Replace(IMAGE_EXTENSION, "_FullCtriteria" + IMAGE_EXTENSION);
                Microsoft.Test.Logging.GlobalLog.LogStatus("Saving master with all criteria (new master) as '" + originalName + "'");
                SetMetadataToImage(data.CapturedImage);
                data.CapturedImage.Save(originalName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(originalName);
                //
                // In this image, encode only criteria that match the master
                string originalNameFull = name.Replace(IMAGE_EXTENSION, "_MatchingCriteria" + IMAGE_EXTENSION);
                Microsoft.Test.Logging.GlobalLog.LogStatus("Saving master with matching criteria encoded (to replace previous master) as '" + originalNameFull + "'");
                MasterMetadata metadata = ImageMetadata.MetadataFromImage(data.MasterImage);
                // Keep master Criteria but update its Description.
                IMasterDimension[] keys = new IMasterDimension[metadata.Description.Count];
                metadata.Description.Keys.CopyTo(keys, 0);
                for (int t = 0; t < keys.Length; t++)
                {
                    metadata.Description[keys[t]] = keys[t].GetCurrentValue();
                }
                ImageMetadata.SetMetadataToImage(metadata, data.CapturedImage);
                data.CapturedImage.Save(originalNameFull, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(originalNameFull);

                // first save error diff image
                string errorDiffFilterName = ".\\ErrorDiffFiltered_" + data.Index + IMAGE_EXTENSION;
                if (data.ToleranceSettings.Filter == null)
                {
                    // Not filter were applied, change name (so it's not confusing)
                    errorDiffFilterName = ".\\ErrorDiff_" + data.Index + IMAGE_EXTENSION;
                }
                ImageUtility.ToImageFile(ic.GetErrorDifference(ErrorDifferenceType.IgnoreAlpha), errorDiffFilterName);
                Microsoft.Test.Logging.GlobalLog.LogFile(errorDiffFilterName);
            }

            data.Result.IsCompleted = true;

            if (data.SynchronizationObject != null)
            {
                data.SynchronizationObject.Set();
            }
        }