Пример #1
0
        private IColor ComputeClosestColor(IColor currentColor)
        {
            IColor retVal = (IColor)currentColor.Clone();

            if (Automatic)
            {
                retVal.R = (byte)(_delta[0] * ((int)(currentColor.R / _delta[0]) + .5f) - 1);
                retVal.G = (byte)(_delta[1] * ((int)(currentColor.G / _delta[1]) + .5f) - 1);
                retVal.B = (byte)(_delta[2] * ((int)(currentColor.B / _delta[2]) + .5f) - 1);
            }
            else
            {
                double minDelta  = double.MaxValue;
                IColor diffColor = ColorByte.Empty;
                for (int index = 0; index < Attractors.Length; index++)
                {
                    double diff = ImageComparator.ColorDifferenceARGB(new ColorByte(Attractors[index]), currentColor, out diffColor);
                    if (diff < ColorAttractorThreshold && diff < minDelta)
                    {
                        minDelta    = diff;
                        retVal.ARGB = Attractors[index].ToArgb();
                    }
                    else
                    {
                        retVal.IsEmpty = true;
                    }
                }
            }
            return(retVal);
        }
Пример #2
0
        public void ProcessImages()
        {
            var imageComparator = new ImageComparator();

            for (int index = 0; index < _imagesToProcess.Count; index++)
            {
                ImageComparisonInfo currentImageToProcess;

                var attemptTakeImage = 0;
                do
                {
                    if (!_imagesToProcess.TryTake(out currentImageToProcess))
                    {
                        attemptTakeImage++;
                    }
                    else
                    {
                        break;
                    }
                } while (attemptTakeImage < 3);

                imageComparator.ImageComparisonInfo = currentImageToProcess;

                var imageDiff = GetImageDiff(imageComparator);
                lock (_processedImages)
                {
                    _processedImages.Add(imageDiff);
                }
            }
        }
Пример #3
0
        private void CompareImages(Bitmap masterImage, Bitmap capturedImage, string toleranceFile)
        {
            // Create Comparator
            ImageComparator comparator = new ImageComparator();

            comparator.ChannelsInUse = ChannelCompareMode.ARGB;

            // Set tolerance if one is provided.
            if (!string.IsNullOrEmpty(toleranceFile))
            {
                _suite.printStatus("VisualScan: Loading tolerance.");
                comparator.Curve.CurveTolerance.LoadTolerance(toleranceFile);
            }

            // Perform Comparison.
            _suite.printStatus("VisualScan: Comparing images.");
            if (comparator.Compare(new ImageAdapter(masterImage), new ImageAdapter(capturedImage), false))
            {
                _suite.printStatus("VisualScan: Succeeded.");
            }
            else
            {
                _suite.printStatus("VisualScan: Failed.");
                GenerateAndWriteDebuggingOutput(comparator, masterImage, capturedImage);
                _suite.failTest("Captured visual is different from Master.");
            }
        }
Пример #4
0
        /// <summary>
        /// Given a difference buffer and an array of failed points, this method uses VScan utility to go through
        /// the failed points and determine how many of them can be ignored.
        /// </summary>
        /// <param name="diffBuffer">The difference buffer.</param>
        /// <param name="failedPoints">The array of failed points.</param>
        /// <returns>Number of pixels that CANNOT be ignored. Pass == 0.</returns>
        private static int VerifyDifferenceUsingVScan(RenderBuffer diffBuffer, string VScanToleranceFile)
        {
            int             failures = 0;
            ImageComparator comparator;

            if (VScanToleranceFile != null && File.Exists(VScanToleranceFile)) // using custom tolerance
            {
                CurveTolerance tolerance = new CurveTolerance();
                tolerance.LoadTolerance(VScanToleranceFile);
                comparator = new ImageComparator(tolerance);
            }
            else // using default tolerance;
            {
                comparator = new ImageComparator();
            }

            ImageAdapter blackImageAdapter = new ImageAdapter(diffBuffer.Width, diffBuffer.Height, ColorToIColor(Colors.Black));
            ImageAdapter diffImageAdapter  = new ImageAdapter(diffBuffer.Width, diffBuffer.Height);

            for (int x = 0; x < diffBuffer.Width; x++)
            {
                for (int y = 0; y < diffBuffer.Height; y++)
                {
                    diffImageAdapter[x, y] = ColorToIColor(diffBuffer.FrameBuffer[x, y]);
                }
            }

            bool passed = comparator.Compare(blackImageAdapter, diffImageAdapter, true);

            failures = (passed == false && comparator.MismatchingPoints != null) ? comparator.MismatchingPoints.NumMismatchesAboveLevel(1) : 0;

            return(failures);
        }
Пример #5
0
        private void GenerateAndWriteDebuggingOutput(ImageComparator comparator, Bitmap masterImage, Bitmap capturedImage)
        {
            // Unique identifier.
            long timestamp = DateTime.Now.Ticks;

            // Create a Package for analysis.
            string  packageName = Path.GetTempPath() + _suite.CaseNumber + "_error_" + timestamp + ".vscan";
            Package package     = Package.Create(packageName, masterImage, capturedImage);

            if (comparator.Curve.CurveTolerance != null)
            {
                package.Tolerance = comparator.Curve.CurveTolerance.WriteToleranceToNode();
            }
            package.Save();
            _suite.LogToFile(packageName); // Copy package to Log location.

            // Output the captured image for updating the masters.
            string captureDump = _suite.CaseNumber + "_capture_" + timestamp + ".bmp";

            _suite.printStatus("Captured image: " + captureDump);
            Stream captureStream = new MemoryStream();

            ImageUtility.ToImageStream(new ImageAdapter(capturedImage), captureStream, System.Drawing.Imaging.ImageFormat.Bmp);
            _suite.LogToFile(captureDump, captureStream);
        }
Пример #6
0
        /// <summary>
        /// Perform a comparison between the generated image and the base one.
        /// </summary>
        /// <param name="receivedImage">The received image.</param>
        /// <returns>True if the tests were correctly performed.</returns>
        private bool CompareImage(TestResultServerImage receivedImage, string resultTempFileName)
        {
            var comparer = new ImageComparator {
                SaveJson = true
            };

            return(comparer.Compare_RMSE(receivedImage, resultTempFileName));
        }
Пример #7
0
    private int SimilarityCheck()
    {
        Sprite    _sprite         = originalSprite.sprite;
        Texture2D drawingTex      = paintedImage.sharedMaterial.mainTexture as Texture2D;
        float     similarityValue = ImageComparator.CheckSimilarity(drawingTex, _sprite.texture);

        //Gera e apresenta a porcentagem de semelhança
        int similarityPercent = ProcessSimilarityValue(similarityValue);

        if (resemblanceValue)
        {
            resemblanceValue.text = similarityPercent + "%";
        }
        return(similarityPercent);
    }
Пример #8
0
        /// <summary>
        /// Perform the comparison operation
        /// This method abstracts out all the details of image comparison to a boolean result
        /// The basic assumption is that the default set of tolerances is adequate
        /// </summary>
        /// <param name="testImageAdapter"></param>
        /// <param name="masterImageAdapter"></param>
        /// <returns></returns>
        private static bool Compare(IImageAdapter testImageAdapter, IImageAdapter masterImageAdapter)
        {
            bool            TestPassed = false;
            ImageComparator comparator = null;

            if (File.Exists(toleranceFilePath))
            {
                CurveTolerance tolerance = new CurveTolerance();
                tolerance.LoadTolerance(toleranceFilePath);
                comparator = new ImageComparator(tolerance);
                TestLog.Current.LogStatus("Using custom tolerance (" + toleranceFilePath + ")");
            }
            else
            {
                comparator = new ImageComparator();
                TestLog.Current.LogStatus("Using default tolerance");
            }

            if (!xtcContainsDpiInfo)
            {
                // No master image dpi info found in test definition
                TestPassed = comparator.Compare(masterImageAdapter, testImageAdapter, true);
            }
            else
            {
                TestPassed = comparator.Compare(masterImageAdapter,                                                                            // master image adapter
                                                new Point((int)Math.Round(masterImageAdapter.DpiX), (int)Math.Round(masterImageAdapter.DpiY)), // master image dpi info
                                                testImageAdapter,                                                                              // test image adapter
                                                new Point((int)Math.Round(testImageAdapter.DpiX), (int)Math.Round(testImageAdapter.DpiY)),     // test image dpi info
                                                true);                                                                                         // populateMismatchingPoints
            }

            if (!TestPassed)
            {
                Package package = Package.Create(".\\FailurePackage.vscan",
                                                 ImageUtility.ToBitmap(masterImageAdapter),
                                                 ImageUtility.ToBitmap(testImageAdapter),
                                                 comparator.Curve.CurveTolerance.WriteToleranceToNode());
                package.Save();
                TestLog.Current.LogFile(package.PackageName);
            }

            return(TestPassed);
        }
Пример #9
0
        private void Button2_Click(object sender, EventArgs e)
        {
            this.Invalidate();
            OpenFileDialog image2 = new OpenFileDialog
            {
                Title  = "Open Image 2",
                Filter = supportedImagesTypes
            };

            if (image2.ShowDialog() == DialogResult.OK)
            {
                image2Comp        = new ImageComparator(image2.FileName);
                pictureBox2.Image = image2Comp.Source;
                pictureBox4.Image = image2Comp.imageGrayscaled;
                if (image1Comp != null)
                {
                    image2Comp.DrawDifference(image2Comp.CompareImagesList(image1Comp.GetBrightness()), image2.FileName, 664, 12, this.CreateGraphics());
                    richTextBox1.AppendText("\nResult: " + Math.Round(image1Comp.CompareImages(image2Comp.GetBrightness()) / 256, 2).ToString() + "%");
                }
            }
        }
Пример #10
0
        public void DiffImageTest()
        {
            var imageProcessor = new ImageComparator();

            var fileNames = Directory.GetFiles(@"e:\Temp\Screen\");

            for (int index = 0; index < fileNames.Length - 3;)
            {
                var imageInfo = new ImageComparisonInfo()
                {
                    FirstImagePath  = fileNames[index],
                    SecondImagePath = fileNames[index + 1],
                    DiffImagePath   = outputFile + "_" + index + ".png"
                };

                imageProcessor.ImageComparisonInfo = imageInfo;

                var isImageDifferent = imageProcessor.GetCompositeImageDiff();
                index += 2;
            }
        }
Пример #11
0
        private ImageComparisonInfo GetImageDiff(ImageComparator imageComparator)
        {
            ImageComparisonInfo diffImage;

            try
            {
                diffImage = imageComparator.GetCompositeImageDiff();
            }
            catch (Exception ex)
            {
                var errorMessage = $"{ex.Message} {ex.StackTrace}";

                diffImage = new ImageComparisonInfo()
                {
                    ErrorMessage = errorMessage,
                    ScreenShotId = imageComparator.ImageComparisonInfo.ScreenShotId
                };

                new RestKeplerServiceClient().LogError(errorMessage);
            }

            return(diffImage);
        }
Пример #12
0
        /// <summary>
        /// The scanner attemps to locate content assuming that:
        ///   contentOff is the imagaeadapter of the scene before content is shown
        ///   contentOn  is the imagaeadapter of the scene with the content shown
        /// Note: content may have a variable alpha map
        /// </summary>
        public Result Process(IImageAdapter content, IImageAdapter contentOff, IImageAdapter contentOn, double[] transformMatrix)
        {
            string paramEx = string.Empty;

            if (content == null)
            {
                paramEx = "<IImageAdapter content> ";
            }
            if (contentOff == null)
            {
                paramEx += "<IImageAdapter contentOff>";
            }
            if (contentOn == null)
            {
                paramEx += "<IImageAdapter contentOn>";
            }
            if (content == null)
            {
                paramEx += "<IImageAdapter content>";
            }
            if (transformMatrix == null)
            {
                paramEx += "<double []transformMatrix>";
            }

/*
 *                  if (transformMatrix.Length != Interpolator.TransformLength)
 *                  {
 *                      throw new Exception("Transform array length != " + Interpolator.TransformLength);
 *                  }
 */
            //sentinel
            bool         validResult = true;
            string       log         = string.Empty;
            ImageAdapter res         = GetTransformedContent(content, transformMatrix, contentOff.Width, contentOff.Height);

            // locate rec
            //  1) compute the diff image
            //  2) synthetise the transformed content
            //  3) compare with the sourceOn
            ImageComparator imageComparator = new ImageComparator();

            imageComparator.Compare(contentOn, contentOff);
            IImageAdapter imdiff = imageComparator.GetErrorDifference(ErrorDifferenceType.FilterEdge);

            // make sure there is only one descriptor and the background.
            //convert the diff image to a binary image (black-bg and white-fg)
            ImageAdapter imbinmap = new ImageAdapter(imdiff.Width, imdiff.Height);
            IColor       black    = new ColorByte(Color.Black);
            IColor       white    = new ColorByte(Color.White);

            for (int j = 0; j < imbinmap.Width; j++)
            {
                for (int i = 0; i < imbinmap.Height; i++)
                {
                    if (imdiff[j, i].Red + imdiff[j, i].Green + imdiff[j, i].Blue > 1e-6)
                    {
                        imdiff[j, i] = white;
                    }
                    else
                    {
                        imdiff[j, i] = black;
                    }
                }
            }
#if DEBUG
            ImageUtility.ToImageFile(imdiff, "bwmap.png", ImageFormat.Png);
#endif
            //Analyze the bin-diff-image
            VScan lvsc = new VScan(imdiff);
            lvsc.OriginalData.Analyze();

            //topological check
            //root nodes: either a pair of white and black descriptor
            //or a tree.
            // all further descendant must be children of the white cell

/*
 *                  int[] descriptorCounter = new int[2];
 *
 *                  //loop on the descriptors
 *                  foreach (IDescriptor desc in lvsc.OriginalData.Descriptors.ActiveDescriptors)
 *                  {
 *                      if (desc.Depth <= 1)
 *                      {
 *                          descriptorCounter[desc.Depth]++;
 *                      }
 *
 *                      Console.WriteLine("Descr " + desc.BoundingBox + "   " + desc.Depth);
 *                  }
 *
 *                  //check
 *                  int summ = descriptorCounter[0] + descriptorCounter[1];
 *                  if (summ != 2)
 *                  {
 *                      validResult = false;
 *                      if (summ == 0)
 *                      {
 *                          log = "<Fail> No top level descriptors found";
 *                      }
 *                      else
 *                      {
 *                          log = "<Fail> Too many top level descriptors found (should be two) :" + summ;
 *                      }
 *                  }
 */
            // topology is good to go, time to find the bounding box of the dynamic content
            int minx = int.MaxValue;
            int miny = int.MaxValue;
            int maxx = int.MinValue;
            int maxy = int.MinValue;
            if (validResult == true)
            {
                for (int j = 0; j < imdiff.Width; j++)
                {
                    for (int i = 0; i < imdiff.Height; i++)
                    {
                        double sum = imdiff[j, i].Blue + imdiff[j, i].Red + imdiff[j, i].Green;
                        if (sum > 1e-6)
                        {
                            if (j < minx)
                            {
                                minx = j;
                            }
                            if (i < miny)
                            {
                                miny = i;
                            }
                            if (j > maxx)
                            {
                                maxx = j;
                            }
                            if (i > maxy)
                            {
                                maxy = i;
                            }
                        }
                    }
                }

                // bounding box
                maxx -= minx;
                maxy -= miny;
                Console.WriteLine("<Target> found at " + minx + " " + miny + "  " + maxx + " " + maxy);
                ImageUtility.ToImageFile(imdiff, "Recpos.png");

                // synthetize content into contentOff
                IImageAdapter iafcomp = new ImageAdapter(contentOff.Width, contentOff.Height);
                double        dx      = minx - _imageAdapterXmin;
                double        dy      = miny - _imageAdapterYmin;

                // translate results
                for (int j = 0; j < 4; j++)
                {
                    _contentLocation[j, 0] += dx;
                    _contentLocation[j, 1] += dy;
                }

                // copy the background
                iafcomp = (IImageAdapter)contentOff.Clone();

                // add the transformed content
                for (int j = 0; j < res.Width; j++)
                {
                    for (int i = 0; i < res.Height; i++)
                    {
                        if (j + minx < iafcomp.Width && i + miny < iafcomp.Height)
                        {
                            if (res[j, i].Alpha > 0)
                            {
                                double lalp = res[j, i].Alpha;
                                int    jid  = j + minx;
                                int    iid  = i + miny;
                                IColor lvc  = iafcomp[jid, iid];
                                lvc.Red           = lalp * res[j, i].Red + (1 - lalp) * iafcomp[jid, iid].Red;
                                lvc.Green         = lalp * res[j, i].Green + (1 - lalp) * iafcomp[jid, iid].Green;
                                lvc.Blue          = lalp * res[j, i].Blue + (1 - lalp) * iafcomp[jid, iid].Blue;
                                iafcomp[jid, iid] = lvc;
                            }
                        }
                    }
                }
#if DEBUG
                ImageUtility.ToImageFile(iafcomp, "SynthGlyph.png", ImageFormat.Png);
#endif

/*
 *                      if (Tolerance != null)
 *                      {
 *                          imageComparator.Tolerance.Clear();
 *                          double x = double.NaN;
 *                          double y = double.NaN;
 *                          for(int t=0;t<Tolerance.Count;t++)
 *                          {
 *                              x = (double)Tolerance.GetKey(t);
 *                              y = (double)Tolerance[x];
 *                              imageComparator.Tolerance.Add(x,y);
 *                          }
 *                      }
 */
                validResult = imageComparator.Compare(iafcomp, contentOn);
                string toluse = "No Tolerance used - strict comparison";
                if (_tolerance != null)
                {
                    toluse = "with the given Tolerance";
                }
                if (validResult == false)
                {
                    log = "<Fail> Computed Content does not match actual content -- " + toluse;
                }
                else
                {
                    log = "<Pass>";
                }

                Console.WriteLine("Final comp pass " + validResult);
                ImageUtility.ToImageFile(imageComparator.GetErrorDifference(ErrorDifferenceType.FilterEdge), "SynthError.png");
#if DEBUG
                using (Bitmap fbmp = ImageUtility.ToBitmap(contentOn))
                {
                    using (Graphics graphics = Graphics.FromImage(fbmp))
                    {
                        using (Brush brush = new SolidBrush(Color.FromArgb(40, 255, 0, 0)))
                        {
                            graphics.FillRectangle(brush, minx, miny, maxx, maxy);
                        }
                        using (Pen pen = new Pen(Color.Red, 2))
                        {
                            graphics.DrawRectangle(pen, minx, miny, maxx, maxy);
                        }
                        using (Font fnt = new Font("Arial", 10))
                        {
                            SizeF sz = graphics.MeasureString("TL", fnt);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[0, 0], (float)_contentLocation[0, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[1, 0], (float)_contentLocation[1, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[2, 0], (float)_contentLocation[2, 1], sz.Width, sz.Height);
                            graphics.FillRectangle(Brushes.Yellow, (float)_contentLocation[3, 0], (float)_contentLocation[3, 1], sz.Width, sz.Height);
                            graphics.DrawString("TL", fnt, Brushes.Red, (float)_contentLocation[0, 0], (float)_contentLocation[0, 1]);
                            graphics.DrawString("TR", fnt, Brushes.Red, (float)_contentLocation[1, 0], (float)_contentLocation[1, 1]);
                            graphics.DrawString("BL", fnt, Brushes.Red, (float)_contentLocation[2, 0], (float)_contentLocation[2, 1]);
                            graphics.DrawString("BR", fnt, Brushes.Red, (float)_contentLocation[3, 0], (float)_contentLocation[3, 1]);
                        }
                        fbmp.Save("TrackMatch.png");
                    }
                }
#endif
            }
            Result dcres = new Result(validResult, log, _contentLocation);

            return(dcres);
        }
Пример #13
0
        private void DoVscanCompare(object asyncData)
        {
            AsyncData data = asyncData as AsyncData;

            if (data == null)
            {
                throw new ArgumentException("Parameter passed in to the Method not of type AsyncData (or null)", "asyncData");
            }

            ImageComparator ic = new ImageComparator();

            ic.Curve.CurveTolerance.LoadTolerance(data.ToleranceSettings.XmlNodeTolerance);

            IImageAdapter masterAdapter   = new ImageAdapter(data.MasterImage);
            IImageAdapter capturedAdapter = new ImageAdapter(data.CapturedImage);

            // compare Master to the Capture image using the Compare overload that will scale the images size accounting for the DPI
            data.Result.Succeeded = ic.Compare(masterAdapter, MetadataInfoHelper.GetDpi(masterAdapter), capturedAdapter, MetadataInfoHelper.GetDpi(capturedAdapter), false);
            if (data.Result.Succeeded == false)
            {
                Microsoft.Test.Logging.GlobalLog.LogStatus("Regular comparison failed");
            }
            // On filaure, check if user whats to filter the image ( IgnoreAntiAliasing will do )
            IImageAdapter masterFiltered  = null;
            IImageAdapter captureFiltered = null;

            if (data.Result.Succeeded == false && data.ToleranceSettings.Filter != null)
            {
                // first save error diff image
                string errorDiffName = ".\\ErrorDiff_" + data.Index + IMAGE_EXTENSION;
                ImageUtility.ToImageFile(ic.GetErrorDifference(ErrorDifferenceType.IgnoreAlpha), errorDiffName);
                Microsoft.Test.Logging.GlobalLog.LogFile(errorDiffName);

                // Compare failed, filter the images and retry
                Microsoft.Test.Logging.GlobalLog.LogStatus("Filtering and recompare");
                masterFiltered        = data.ToleranceSettings.Filter.Process(masterAdapter);
                captureFiltered       = data.ToleranceSettings.Filter.Process(capturedAdapter);
                data.Result.Succeeded = ic.Compare(masterFiltered, captureFiltered, false);
                if (data.Result.Succeeded == false)
                {
                    Microsoft.Test.Logging.GlobalLog.LogStatus("==> Filtered comparison failed as well");
                }
            }

            if (data.Result.Succeeded)
            {
                Microsoft.Test.Logging.GlobalLog.LogStatus("Comparison SUCCEEDED.");
            }
            else
            {
                // Save Masters * filtered master for easy analysis
                string masterName = ".\\Master_" + data.Index + IMAGE_EXTENSION;
                data.MasterImage.Save(masterName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(masterName);
                if (masterFiltered != null)
                {
                    string filteredMasterName = ".\\MasterFiltered_" + data.Index + IMAGE_EXTENSION;
                    using (Bitmap filteredMaster = ImageUtility.ToBitmap(masterFiltered))
                    {
                        SetMetadataToImage(filteredMaster);
                        filteredMaster.Save(filteredMasterName, System.Drawing.Imaging.ImageFormat.Tiff);
                    }
                    Microsoft.Test.Logging.GlobalLog.LogFile(filteredMasterName);
                }

                // Save rendered image (as "Actual_n") for easy analysis
                string capturedName = ".\\Actual_" + data.Index + IMAGE_EXTENSION;
                data.CapturedImage.Save(capturedName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(capturedName);
                // Save actual filtered for easy analysis
                if (captureFiltered != null)
                {
                    string filteredRenderedName = ".\\ActualFiltered_" + data.Index + IMAGE_EXTENSION;
                    using (Bitmap filteredRendered = ImageUtility.ToBitmap(captureFiltered))
                    {
                        SetMetadataToImage(filteredRendered);
                        filteredRendered.Save(filteredRenderedName, System.Drawing.Imaging.ImageFormat.Tiff);
                    }
                    Microsoft.Test.Logging.GlobalLog.LogFile(filteredRenderedName);
                }

                // Master might need to be updated, save with correct name and metadata
                //
                // In this image, encode full criteria
                string name         = System.IO.Path.GetFileName(data.MasterName);
                string originalName = name.Replace(IMAGE_EXTENSION, "_FullCtriteria" + IMAGE_EXTENSION);
                Microsoft.Test.Logging.GlobalLog.LogStatus("Saving master with all criteria (new master) as '" + originalName + "'");
                SetMetadataToImage(data.CapturedImage);
                data.CapturedImage.Save(originalName, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(originalName);
                //
                // In this image, encode only criteria that match the master
                string originalNameFull = name.Replace(IMAGE_EXTENSION, "_MatchingCriteria" + IMAGE_EXTENSION);
                Microsoft.Test.Logging.GlobalLog.LogStatus("Saving master with matching criteria encoded (to replace previous master) as '" + originalNameFull + "'");
                MasterMetadata metadata = ImageMetadata.MetadataFromImage(data.MasterImage);
                // Keep master Criteria but update its Description.
                IMasterDimension[] keys = new IMasterDimension[metadata.Description.Count];
                metadata.Description.Keys.CopyTo(keys, 0);
                for (int t = 0; t < keys.Length; t++)
                {
                    metadata.Description[keys[t]] = keys[t].GetCurrentValue();
                }
                ImageMetadata.SetMetadataToImage(metadata, data.CapturedImage);
                data.CapturedImage.Save(originalNameFull, System.Drawing.Imaging.ImageFormat.Tiff);
                Microsoft.Test.Logging.GlobalLog.LogFile(originalNameFull);

                // first save error diff image
                string errorDiffFilterName = ".\\ErrorDiffFiltered_" + data.Index + IMAGE_EXTENSION;
                if (data.ToleranceSettings.Filter == null)
                {
                    // Not filter were applied, change name (so it's not confusing)
                    errorDiffFilterName = ".\\ErrorDiff_" + data.Index + IMAGE_EXTENSION;
                }
                ImageUtility.ToImageFile(ic.GetErrorDifference(ErrorDifferenceType.IgnoreAlpha), errorDiffFilterName);
                Microsoft.Test.Logging.GlobalLog.LogFile(errorDiffFilterName);
            }

            data.Result.IsCompleted = true;

            if (data.SynchronizationObject != null)
            {
                data.SynchronizationObject.Set();
            }
        }
Пример #14
0
    void Update()
    {
        switch (stageState)
        {
        case StageState.Intro:
            if (Input.GetKeyDown(KeyCode.Space) || getSendTrigger())
            {
                int similarityPercent = SimilarityCheck();

                //show result feedback
                if (similarityPercent < 40)
                {
                    //miss

                    //Pisca imagem de overlap e resetta session
                    StartCoroutine(SessionTransition(1.2f));
                }
                else
                {
                    StartCoroutine(SetStageTimer(startingStageTimeValue));
                    SetSession();
                    StartCoroutine(IntroScreenAnimation());
                }
            }
            break;

        case StageState.Playing:
            /*
             * Input de teste, aperte Y para ver como o jogo está gerando o HandicapMap (RegionMap) (ProximityMap) (THICCmap)
             */
            if (Input.GetKeyDown(KeyCode.Y))
            {
                Texture2D _aux = paintedImage.sharedMaterial.mainTexture as Texture2D;
                _aux = ImageComparator.CreateTextureMap(originalSprite.sprite.texture, Color.black);
                paintedImage.material.mainTexture = _aux;
                _aux.Apply();
            }


            if (Input.GetKeyDown(KeyCode.Space) || getSendTrigger())
            {
                int similarityPercent = SimilarityCheck();

                //Adiciona pontiação ao score da fase
                scoreboard.AddScore(similarityPercent / 2, bonusBar.getValue());

                //adiciona tempo
                int timeAdd = (int)(similarityPercent * 0.01f * timeForHundredPercent);
                timeValue += timeAdd;
                StartCoroutine(MoreTime(timeAdd));

                //Pisca imagem de overlap e resetta session
                StartCoroutine(SessionTransition(1.2f));

                stageState = StageState.Transition;
            }

            break;

        default:
            break;
        }
    }