public void WhenEqualBmpPng() { ImageAssert.AreEqual(Properties.Resources.SquareBmp, Properties.Resources.SquarePng); ImageAssert.AreEqual(Properties.Resources.SquarePng, Properties.Resources.SquareBmp); }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Arbitrary wait for 5 frames for the scene to load, and other stuff to happen (like Realtime GI to appear ...) for (int i = 0; i < 5; ++i) { yield return(null); } // Load the test settings var settings = GameObject.FindObjectOfType <HDRP_TestSettings>(); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera == null) { camera = GameObject.FindObjectOfType <Camera>(); } if (camera == null) { Assert.Fail("Missing camera for graphic tests."); } Time.captureFramerate = settings.captureFramerate; // Skip incompatible XR tests (layout set to None in the scene) if (XRSystem.testModeEnabled && settings.xrLayout == XRLayoutOverride.None) { yield break; } if (settings.doBeforeTest != null) { settings.doBeforeTest.Invoke(); // Wait again one frame, to be sure. yield return(null); } for (int i = 0; i < settings.waitFrames; ++i) { yield return(null); } var settingsSG = (GameObject.FindObjectOfType <HDRP_TestSettings>() as HDRP_ShaderGraph_TestSettings); if (settingsSG == null || !settingsSG.compareSGtoBI) { // Standard Test ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); #if CHECK_ALLOCATIONS_WHEN_RENDERING // Does it allocate memory when it renders what's on camera? bool allocatesMemory = false; try { ImageAssert.AllocatesMemory(camera, 512, 512); // 512 used for height and width to render } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on camera"); } #endif } else { if (settingsSG.sgObjs == null) { Assert.Fail("Missing Shader Graph objects in test scene."); } if (settingsSG.biObjs == null) { Assert.Fail("Missing comparison objects in test scene."); } settingsSG.sgObjs.SetActive(true); settingsSG.biObjs.SetActive(false); yield return(null); // Wait a frame yield return(null); bool sgFail = false; bool biFail = false; // First test: Shader Graph try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { sgFail = true; } settingsSG.sgObjs.SetActive(false); settingsSG.biObjs.SetActive(true); settingsSG.biObjs.transform.position = settingsSG.sgObjs.transform.position; // Move to the same location. yield return(null); // Wait a frame yield return(null); // Second test: HDRP/Lit Materials try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { biFail = true; } // Informs which ImageAssert failed, if any. if (sgFail && biFail) { Assert.Fail("Both Shader Graph and Non-Shader Graph Objects failed to match the reference image"); } else if (sgFail) { Assert.Fail("Shader Graph Objects failed."); } else if (biFail) { Assert.Fail("Non-Shader Graph Objects failed to match Shader Graph objects."); } } }
public void When_Text_is_Constrained_Then_Clipping_is_Applied() { Run("Uno.UI.Samples.Content.UITests.TextBlockControl.TextBlock_ConstrainedByContainer"); // 1) Take a screenshot of the whole sample // 2) Switch opacity of text to zero // 3) Take new screenshot // 4) Compare Right zone -- must be identical // 5) Compare Bottom zone -- must be identical // 6) Do the same for subsequent text block in sample (1 to 5) // // +-sampleRootPanel------------+--------------+ // | | | // | +-borderX---------------+ | // | | [textX]Lorem ipsum... | | // | +-----------------------+ Right zone | // | | | | // | | Bottom zone | | // | | | | // +----+-----------------------+--------------+ // (1) using var sampleScreenshot = this.TakeScreenshot("fullSample", ignoreInSnapshotCompare: true); var sampleRect = _app.GetPhysicalRect("sampleRootPanel"); using var _ = new AssertionScope(); Test("text1", "border1"); Test("text2", "border2"); Test("text3", "border3"); Test("text4", "border4"); Test("text5", "border5"); void Test(string textControl, string borderControl) { var textRect = _app.GetPhysicalRect(borderControl); // (2) _app.Marked(textControl).SetDependencyPropertyValue("Opacity", "0"); // (3) using var afterScreenshot = this.TakeScreenshot("sample-" + textControl, ignoreInSnapshotCompare: true); // (4) using (var s = new AssertionScope("Right zone")) { var rect1 = new AppRect( x: textRect.Right, y: sampleRect.Y, width: sampleRect.Right - textRect.Right, height: sampleRect.Height) .DeflateBy(1f); ImageAssert.AreEqual(sampleScreenshot, rect1, afterScreenshot, rect1); } // (5) using (var s = new AssertionScope("Bottom zone")) { var rect2 = new AppRect( x: textRect.X, y: textRect.Bottom, width: textRect.Width, height: sampleRect.Height - textRect.Bottom) .DeflateBy(1f); ImageAssert.AreEqual(sampleScreenshot, rect2, afterScreenshot, rect2); } } }
public void CompareQuantizers() { ReportStart(); string fileName; DateTime startTime; DateTime endTime; TimeSpan encodingTime; // Test actual quantization using image with 256+ colours. string bitmapFileName = "images/" + TestFixtureName + "." + TestCaseName + ".bmp"; Bitmap b = new Bitmap(bitmapFileName); for (int q = 1; q <= 20; q++) { _e = new AnimatedGifEncoder(); _e.QuantizerType = QuantizerType.NeuQuant; _e.SamplingFactor = q; _e.AddFrame(new GifFrame(b)); fileName = TestFixtureName + "." + TestCaseName + ".NeuQuant." + q + ".gif"; startTime = DateTime.Now; _e.WriteToFile(fileName); endTime = DateTime.Now; encodingTime = endTime - startTime; WriteMessage("Encoding with quantization using NeuQuant, quality=" + q + " took " + encodingTime); _d = new GifDecoder(fileName); _d.Decode(); Assert.AreEqual(ErrorState.Ok, _d.ConsolidatedState, "Quality " + q); Assert.AreEqual(1, _d.Frames.Count, "Quality " + q); // FIXME: NeuQuant quantizer reducing to 180 colours instead of 256 colours // TODO: Check for exactly 256 colours once Octree quantizer returns 256-colour images // Assert.AreEqual( 256, ImageTools.GetDistinctColours( colours ).Count ); Assert.LessOrEqual(ImageTools.GetDistinctColours(_d.Frames[0].TheImage).Count, 256); for (int tolerance = 0; tolerance < 256; tolerance++) { try { ImageAssert.AreEqual(b, _d.Frames[0].TheImage, tolerance, "Quality " + q); WriteMessage("Quality " + q + " required tolerance " + tolerance); break; } catch (AssertionExtensionException) { if (tolerance == 255) { throw; } } } } _e = new AnimatedGifEncoder(); _e.QuantizerType = QuantizerType.Octree; _e.AddFrame(new GifFrame(b)); fileName = TestFixtureName + "." + TestCaseName + ".Octree.gif"; startTime = DateTime.Now; _e.WriteToFile(fileName); endTime = DateTime.Now; encodingTime = endTime - startTime; WriteMessage("Encoding with quantization using Octree took " + encodingTime); _d = new GifDecoder(fileName); _d.Decode(); Assert.AreEqual(ErrorState.Ok, _d.ConsolidatedState); Assert.AreEqual(1, _d.Frames.Count); // FIXME: Octree quantizer should return a 256-colour image here // Assert.AreEqual( 256, ImageTools.GetDistinctColours( colours2 ).Count ); Assert.LessOrEqual(ImageTools.GetDistinctColours(_d.Frames[0].TheImage).Count, 256); for (int tolerance = 0; tolerance < 256; tolerance++) { try { ImageAssert.AreEqual(b, _d.Frames[0].TheImage, tolerance, "Octree"); WriteMessage("Octree quantization required tolerance " + tolerance); break; } catch (AssertionExtensionException) { if (tolerance == 255) { throw; } } } // re-encoding an existing GIF should not cause quantization _d = new GifDecoder(@"images\globe\spinning globe better 200px transparent background.gif"); _d.Decode(); _e = new AnimatedGifEncoder(); // NB OctreeQuantizer does not support global colour tables (yet!) _e.ColourTableStrategy = ColourTableStrategy.UseLocal; foreach (GifFrame f in _d.Frames) { _e.AddFrame(new GifFrame(f.TheImage)); } fileName = "NeuQuant.gif"; _e.QuantizerType = QuantizerType.NeuQuant; startTime = DateTime.Now; _e.WriteToFile(fileName); endTime = DateTime.Now; encodingTime = endTime - startTime; WriteMessage("Encoding without quantization using NeuQuant took " + encodingTime); fileName = "Octree.gif"; _e.QuantizerType = QuantizerType.Octree; startTime = DateTime.Now; _e.WriteToFile(fileName); endTime = DateTime.Now; encodingTime = endTime - startTime; WriteMessage("Encoding without quantization using Octree took " + encodingTime); GifDecoder nqDecoder = new GifDecoder("NeuQuant.gif"); nqDecoder.Decode(); GifDecoder otDecoder = new GifDecoder("Octree.gif"); otDecoder.Decode(); Assert.AreEqual(nqDecoder.Frames.Count, otDecoder.Frames.Count); for (int i = 0; i < nqDecoder.Frames.Count; i++) { ImageAssert.AreEqual(nqDecoder.Frames[i].TheImage, otDecoder.Frames[i].TheImage, "frame " + i); } ReportEnd(); }
public void Border_CornerRadius_BorderThickness() { // White Background color underneath const string white = "#FFFFFF"; //Colors with 50% Opacity const string red50 = "#80FF0000"; const string blue50 = "#800000FF"; //Same colors but with the addition of a White background color underneath const string lightPink = "#FF7F7F"; const string lightBlue = "#7F7FFF"; var expectedColors = new[] { new ExpectedColor { Thicknesses = new [] { 10, 10, 10, 10 }, Colors = new [] { lightPink, lightPink, lightPink, lightPink } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 10, 10 }, Colors = new [] { lightPink, lightBlue, lightPink, lightPink } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 0, 10 }, Colors = new [] { lightPink, lightBlue, lightBlue, lightPink } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 0, 0 }, Colors = new [] { lightPink, lightBlue, lightBlue, lightBlue } }, new ExpectedColor { Thicknesses = new [] { 0, 0, 0, 0 }, Colors = new [] { lightBlue, lightBlue, lightBlue, lightBlue } }, }; Run("UITests.Windows_UI_Xaml_Controls.BorderTests.Border_CornerRadius_BorderThickness"); _app.WaitForElement("MyBackgroundUnderneath"); SetBorderProperty("MyBackgroundUnderneath", "Background", white); _app.WaitForElement("MyBorder"); var leftTarget = _app.GetPhysicalRect("LeftTarget"); var topTarget = _app.GetPhysicalRect("TopTarget"); var rightTarget = _app.GetPhysicalRect("RightTarget"); var bottomTarget = _app.GetPhysicalRect("BottomTarget"); var centerTarget = _app.GetPhysicalRect("CenterTarget"); SetBorderProperty("MyBorder", "CornerRadius", "10"); SetBorderProperty("MyBorder", "BorderBrush", red50); SetBorderProperty("MyBorder", "Background", blue50); foreach (var expected in expectedColors) { SetBorderProperty("MyBorder", "BorderThickness", expected.ToString()); using var snapshot = TakeScreenshot($"Border-CornerRadius-10-BorderThickness-{expected}"); ImageAssert.HasPixels( snapshot, ExpectedPixels .At($"left-{expected}", leftTarget.CenterX, leftTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[0]), ExpectedPixels .At($"top-{expected}", topTarget.CenterX, topTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[1]), ExpectedPixels .At($"right-{expected}", rightTarget.CenterX, rightTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[2]), ExpectedPixels .At($"bottom-{expected}", bottomTarget.CenterX, bottomTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[3]), ExpectedPixels .At($"center-{expected}", centerTarget.CenterX, centerTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(lightBlue) ); } }
public IEnumerator Run(GraphicsTestCase testCase) { CleanUp(); if (!testCase.ScenePath.Contains("GraphicsTest")) { Assert.Ignore("Ignoring this test because the scene is not under GraphicsTest folder, or not named with GraphicsTest."); } SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); //Get Test settings //ignore instead of failing, because some scenes might not be used for GraphicsTest var settings = Object.FindObjectOfType <GraphicsTestSettingsCustom>(); if (settings == null) { Assert.Ignore("Ignoring this test for GraphicsTest because couldn't find GraphicsTestSettingsCustom"); } #if !UNITY_EDITOR Screen.SetResolution(settings.ImageComparisonSettings.TargetWidth, settings.ImageComparisonSettings.TargetHeight, false); #endif var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); //var settings = Object.FindObjectOfType<UniversalGraphicsTestSettings>(); //Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); //Scene scene = SceneManager.GetActiveScene(); yield return(null); int waitFrames = settings.WaitFrames; if (settings.ImageComparisonSettings.UseBackBuffer && settings.WaitFrames < 1) { waitFrames = 1; } for (int i = 0; i < waitFrames; i++) { yield return(new WaitForEndOfFrame()); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(null); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), settings.ImageComparisonSettings); }
[Timeout(450 * 1000)] // Increase timeout to handle complex scenes with many shaders and XR variants public IEnumerator Run(GraphicsTestCase testCase) { #if UNITY_EDITOR while (SceneView.sceneViews.Count > 0) { var sceneView = SceneView.sceneViews[0] as SceneView; sceneView.Close(); } #endif SceneManagement.SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var testSettingsInScene = Object.FindObjectOfType <GraphicsTestSettings>(); var vfxTestSettingsInScene = Object.FindObjectOfType <VFXGraphicsTestSettings>(); if (XRGraphicsAutomatedTests.enabled) { if (vfxTestSettingsInScene == null || vfxTestSettingsInScene.xrCompatible) { XRGraphicsAutomatedTests.running = true; } else { Assert.Ignore("Test scene is not compatible with XR and will be skipped."); } } //Setup frame rate capture float simulateTime = VFXGraphicsTestSettings.defaultSimulateTime; int captureFrameRate = VFXGraphicsTestSettings.defaultCaptureFrameRate; if (vfxTestSettingsInScene != null) { simulateTime = vfxTestSettingsInScene.simulateTime; captureFrameRate = vfxTestSettingsInScene.captureFrameRate; } float period = 1.0f / captureFrameRate; Time.captureFramerate = captureFrameRate; UnityEngine.VFX.VFXManager.fixedTimeStep = period; UnityEngine.VFX.VFXManager.maxDeltaTime = period; //Waiting for the capture frame rate to be effective const int maxFrameWaiting = 8; int maxFrame = maxFrameWaiting; while (Time.deltaTime != period && maxFrame-- > 0) { yield return(null); } Assert.Greater(maxFrame, 0); int captureSizeWidth = 512; int captureSizeHeight = 512; if (testSettingsInScene != null) { captureSizeWidth = testSettingsInScene.ImageComparisonSettings.TargetWidth; captureSizeHeight = testSettingsInScene.ImageComparisonSettings.TargetHeight; } var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera) { var vfxComponents = Resources.FindObjectsOfTypeAll <VisualEffect>(); var rt = RenderTexture.GetTemporary(captureSizeWidth, captureSizeHeight, 24); camera.targetTexture = rt; //Waiting for the rendering to be ready, if at least one component has been culled, camera is ready maxFrame = maxFrameWaiting; while (vfxComponents.All(o => o.culled) && maxFrame-- > 0) { yield return(null); } Assert.Greater(maxFrame, 0); foreach (var component in vfxComponents) { component.Reinit(); } #if UNITY_EDITOR //When we change the graph, if animator was already enable, we should reinitialize animator to force all BindValues var animators = Resources.FindObjectsOfTypeAll <Animator>(); foreach (var animator in animators) { animator.Rebind(); } var audioSources = Resources.FindObjectsOfTypeAll <AudioSource>(); #endif var paramBinders = Resources.FindObjectsOfTypeAll <VFXPropertyBinder>(); foreach (var paramBinder in paramBinders) { var binders = paramBinder.GetPropertyBinders <VFXBinderBase>(); foreach (var binder in binders) { binder.Reset(); } } int waitFrameCount = (int)(simulateTime / period); int startFrameIndex = Time.frameCount; int expectedFrameIndex = startFrameIndex + waitFrameCount; while (Time.frameCount != expectedFrameIndex) { yield return(null); #if UNITY_EDITOR foreach (var audioSource in audioSources) { if (audioSource.clip != null && audioSource.playOnAwake) { audioSource.PlayDelayed(Mathf.Repeat(simulateTime, audioSource.clip.length)); } } #endif } Texture2D actual = null; try { camera.targetTexture = null; actual = new Texture2D(captureSizeWidth, captureSizeHeight, TextureFormat.RGB24, false); RenderTexture.active = rt; actual.ReadPixels(new Rect(0, 0, captureSizeWidth, captureSizeHeight), 0, 0); RenderTexture.active = null; actual.Apply(); var imageComparisonSettings = new ImageComparisonSettings() { AverageCorrectnessThreshold = VFXGraphicsTestSettings.defaultAverageCorrectnessThreshold }; if (testSettingsInScene != null) { imageComparisonSettings.AverageCorrectnessThreshold = testSettingsInScene.ImageComparisonSettings.AverageCorrectnessThreshold; } ImageAssert.AreEqual(testCase.ReferenceImage, actual, imageComparisonSettings); } finally { RenderTexture.ReleaseTemporary(rt); if (actual != null) { UnityEngine.Object.Destroy(actual); } } } }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); var settings = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); int waitFrames = Unity.Testing.XR.Runtime.ConfigureMockHMD.SetupTest(settings.XRCompatible, settings.WaitFrames, settings.ImageComparisonSettings); Scene scene = SceneManager.GetActiveScene(); yield return(null); if (settings.ImageComparisonSettings.UseBackBuffer && waitFrames < 1) { waitFrames = 1; } for (int i = 0; i < waitFrames; i++) { yield return(new WaitForEndOfFrame()); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(new WaitForEndOfFrame()); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), settings.ImageComparisonSettings); // Does it allocate memory when it renders what's on the main camera? bool allocatesMemory = false; var mainCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); try { ImageAssert.AllocatesMemory(mainCamera, settings?.ImageComparisonSettings); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on main camera"); } }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Arbitrary wait for 5 frames for the scene to load, and other stuff to happen (like Realtime GI to appear ...) for (int i = 0; i < 5; ++i) { yield return(null); } // Load the test settings var settings = GameObject.FindObjectOfType <HDRP_TestSettings>(); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera == null) { camera = GameObject.FindObjectOfType <Camera>(); } if (camera == null) { Assert.Fail("Missing camera for graphic tests."); } Time.captureFramerate = settings.captureFramerate; if (XRSystem.testModeEnabled) { if (settings.xrCompatible) { XRSystem.automatedTestRunning = true; // Increase tolerance to account for slight changes due to float precision settings.ImageComparisonSettings.AverageCorrectnessThreshold *= settings.xrThresholdMultiplier; settings.ImageComparisonSettings.PerPixelCorrectnessThreshold *= settings.xrThresholdMultiplier; } else { // Skip incompatible XR tests yield break; } } if (settings.doBeforeTest != null) { settings.doBeforeTest.Invoke(); // Wait again one frame, to be sure. yield return(null); } for (int i = 0; i < settings.waitFrames; ++i) { yield return(null); } var settingsSG = (GameObject.FindObjectOfType <HDRP_TestSettings>() as HDRP_ShaderGraph_TestSettings); if (settingsSG == null || !settingsSG.compareSGtoBI) { // Standard Test ImageAssert.AreEqual(testCase.ReferenceImage, camera, settings?.ImageComparisonSettings); if (settings.checkMemoryAllocation) { // Does it allocate memory when it renders what's on camera? bool allocatesMemory = false; try { // GC alloc from Camera.CustomRender (case 1206364) int gcAllocThreshold = 2; #if UNITY_2019_3 // In case playmode tests for XR are enabled in 2019.3 we allow one GC alloc from XRSystem:120 if (XRSystem.testModeEnabled) { gcAllocThreshold += 1; } #endif ImageAssert.AllocatesMemory(camera, settings?.ImageComparisonSettings, gcAllocThreshold); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on camera"); } } } else { if (settingsSG.sgObjs == null) { Assert.Fail("Missing Shader Graph objects in test scene."); } if (settingsSG.biObjs == null) { Assert.Fail("Missing comparison objects in test scene."); } settingsSG.sgObjs.SetActive(true); settingsSG.biObjs.SetActive(false); yield return(null); // Wait a frame yield return(null); bool sgFail = false; bool biFail = false; // First test: Shader Graph try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { sgFail = true; } settingsSG.sgObjs.SetActive(false); settingsSG.biObjs.SetActive(true); settingsSG.biObjs.transform.position = settingsSG.sgObjs.transform.position; // Move to the same location. yield return(null); // Wait a frame yield return(null); // Second test: HDRP/Lit Materials try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { biFail = true; } // Informs which ImageAssert failed, if any. if (sgFail && biFail) { Assert.Fail("Both Shader Graph and Non-Shader Graph Objects failed to match the reference image"); } else if (sgFail) { Assert.Fail("Shader Graph Objects failed."); } else if (biFail) { Assert.Fail("Non-Shader Graph Objects failed to match Shader Graph objects."); } } }
private void TestOpacityFinalState([CallerMemberName] string testName = null) { var match = Regex.Match(testName, @"When_Opacity_(?<type>\w+)_With_FillBehavior(?<fill>\w+)_Then_(?<expected>\w+)"); if (!match.Success) { throw new InvalidOperationException("Invalid test name."); } var type = match.Groups["type"].Value; var fill = match.Groups["fill"].Value; var expected = match.Groups["expected"].Value; bool isSame = false, isGray = false, isDifferent = false; switch (type) { case "Completed" when expected == "Hold": isGray = true; break; case "Completed" when expected == "Rollback": isSame = true; break; case "Paused": isDifferent = true; break; case "Canceled": isSame = true; break; default: throw new InvalidOperationException("Invalid test name."); } Run(_finalStateOpacityTestControl, skipInitialScreenshot: true); var initial = TakeScreenshot("Initial", ignoreInSnapshotCompare: true); var element = _app.WaitForElement($"{type}AnimationHost_{fill}").Single().Rect; _app.Marked("StartButton").FastTap(); _app.WaitForDependencyPropertyValue(_app.Marked("Status"), "Text", "Completed"); // Assert var final = TakeScreenshot("Final", ignoreInSnapshotCompare: true); if (isSame) { ImageAssert.AreEqual(initial, final, element); } else if (isGray) { ImageAssert.HasColorAt(final, element.CenterX, element.CenterY, Color.LightGray); } else if (isDifferent) { ImageAssert.AreNotEqual(initial, final, element); ImageAssert.DoesNotHaveColorAt(final, element.CenterX, element.CenterY, Color.LightGray); } }
public void When_TransformGroup_Translate() { const string color = color1; const int trY = 50; var(host, scale, half, final) = BeginTransformGroupTest("TranslateHost"); // "Half" is approximative, we only validate that the element has move bellow ImageAssert.HasPixels( half, ExpectedPixels .At("Top left", host.X - 1, host.Y - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { defaultColor, defaultColor }, { defaultColor, bgColor }, }), ExpectedPixels .At("Top right", host.Right - 1, host.Y - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { defaultColor, defaultColor }, { bgColor, defaultColor }, }), ExpectedPixels .At("Bottom right", host.Right - 1, host.Bottom - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { color, defaultColor }, { color, defaultColor }, }), ExpectedPixels .At("Bottom left", host.X - 1, host.Bottom - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { defaultColor, color }, { defaultColor, color }, }) ); //// Complete animation var finalTrY = trY * scale; ImageAssert.HasPixels( final, ExpectedPixels .At("Top left", host.X - 1, host.Y + finalTrY - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { defaultColor, bgColor }, { defaultColor, color } }), ExpectedPixels .At("Top right", host.Right - 1, host.Y + finalTrY - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ] { { bgColor, defaultColor }, { color, defaultColor }, }), ExpectedPixels .At("Bottom right", host.Right - 1, host.Bottom + finalTrY - 1) .WithPixelTolerance(x: 1, y: 1) .Pixels(new[, ]
public IEnumerator Run(GraphicsTestCase testCase) { // XRTODO: Fix XR tests on macOS or disable them from Yamato directly if (XRSystem.testModeEnabled && (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.OSXPlayer)) { Assert.Ignore("Universal XR tests do not run on macOS."); } SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); var settings = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); if (XRSystem.testModeEnabled) { if (settings.XRCompatible) { XRSystem.automatedTestRunning = true; } else { Assert.Ignore("Test scene is not compatible with XR and will be skipped."); } } Scene scene = SceneManager.GetActiveScene(); yield return(null); int waitFrames = settings.WaitFrames; if (settings.ImageComparisonSettings.UseBackBuffer && settings.WaitFrames < 1) { waitFrames = 1; } for (int i = 0; i < waitFrames; i++) { yield return(new WaitForEndOfFrame()); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(null); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), settings.ImageComparisonSettings); // Does it allocate memory when it renders what's on the main camera? bool allocatesMemory = false; var mainCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); // 2D Renderer is currently allocating memory, skip it as it will always fail GC alloc tests. var additionalCameraData = mainCamera.GetUniversalAdditionalCameraData(); bool is2DRenderer = additionalCameraData.scriptableRenderer is Renderer2D; if (!is2DRenderer) { try { ImageAssert.AllocatesMemory(mainCamera, settings?.ImageComparisonSettings); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on main camera"); } } }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Arbitrary wait for 5 frames for the scene to load, and other stuff to happen (like Realtime GI to appear ...) for (int i = 0; i < 5; ++i) { yield return(null); } // Load the test settings var settings = GameObject.FindObjectOfType <HDRP_TestSettings>(); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera == null) { camera = GameObject.FindObjectOfType <Camera>(); } if (camera == null) { Assert.Fail("Missing camera for graphic tests."); } Time.captureFramerate = settings.captureFramerate; if (settings.doBeforeTest != null) { settings.doBeforeTest.Invoke(); // Wait again one frame, to be sure. yield return(null); } for (int i = 0; i < settings.waitFrames; ++i) { yield return(null); } var settingsSG = (GameObject.FindObjectOfType <HDRP_TestSettings>() as HDRP_ShaderGraph_TestSettings); if (settingsSG == null || !settingsSG.compareSGtoBI) { // Standard Test ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } else { if (settingsSG.sgObjs == null) { Assert.Fail("Missing Shader Graph objects in test scene."); } if (settingsSG.biObjs == null) { Assert.Fail("Missing comparison objects in test scene."); } settingsSG.sgObjs.SetActive(true); settingsSG.biObjs.SetActive(false); yield return(null); // Wait a frame yield return(null); bool sgFail = false; bool biFail = false; // First test: Shader Graph try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { sgFail = true; } settingsSG.sgObjs.SetActive(false); settingsSG.biObjs.SetActive(true); settingsSG.biObjs.transform.position = settingsSG.sgObjs.transform.position; // Move to the same location. yield return(null); // Wait a frame yield return(null); // Second test: HDRP/Lit Materials try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { biFail = true; } // Informs which ImageAssert failed, if any. if (sgFail && biFail) { Assert.Fail("Both Shader Graph and Non-Shader Graph Objects failed to match the reference image"); } else if (sgFail) { Assert.Fail("Shader Graph Objects failed."); } else if (biFail) { Assert.Fail("Non-Shader Graph Objects failed to match Shader Graph objects."); } } }
public IEnumerator Run(GraphicsTestCase testCase) { // #if ENABLE_VR // // XRTODO: Fix XR tests on macOS or disable them from Yamato directly // if (XRGraphicsAutomatedTests.enabled && (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.OSXPlayer)) // Assert.Ignore("Universal XR tests do not run on macOS."); // #endif SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var settings = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); var gltf = Object.FindObjectOfType <GltfBoundsAsset>(); Assert.IsNotNull(gltf, "Invalid test scene, couldn't find GltfAsset"); var task = gltf.Load(gltf.FullUrl); while (!task.IsCompleted) { yield return(null); } var success = task.Result; if (success && !gltf.currentSceneId.HasValue) { // glTF has no default scene. Fallback to the first scene success = gltf.InstantiateScene(0); } IEnumerable <Camera> cameras; Camera testCamera = null; if (success && gltf.sceneInstance?.cameras != null && gltf.sceneInstance.cameras.Count > 0) { // Look for glTF cameras cameras = gltf.sceneInstance.cameras; foreach (var cam in cameras) { if (testCamera == null) { testCamera = cam; } cam.backgroundColor = new Color(1f, 1f, 1f, 0f); cam.clearFlags = CameraClearFlags.SolidColor; #if USING_HDRP var hdAdd = cam.gameObject.AddComponent <HDAdditionalCameraData>(); if (hdAdd != null) { hdAdd.clearColorMode = HDAdditionalCameraData.ClearColorMode.Color; hdAdd.backgroundColorHDR = new Color(1, 1, 1, 0); } #endif } } else { // position main camera based on AABB cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()).Where(x => x != null); testCamera = cameras.First(); FrameBoundsCamera.FrameBounds(testCamera, gltf.transform, gltf.bounds); } if (success) { var animation = gltf.gameObject.transform.GetComponentInChildren <Animation>(); if (animation != null && animation.clip != null) { animation.Stop(); var clip = animation.clip; animation.clip.SampleAnimation(animation.gameObject, clip.length * .5f); // animation.Sample(); } } // #if ENABLE_VR // if (XRGraphicsAutomatedTests.enabled) // { // if (settings.XRCompatible) // { // XRGraphicsAutomatedTests.running = true; // } // else // { // Assert.Ignore("Test scene is not compatible with XR and will be skipped."); // } // } // #endif Scene scene = SceneManager.GetActiveScene(); yield return(null); int waitFrames = settings.WaitFrames; if (settings.ImageComparisonSettings.UseBackBuffer && settings.WaitFrames < 1) { waitFrames = 1; } for (int i = 0; i < waitFrames; i++) { yield return(new WaitForEndOfFrame()); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(null); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, testCamera, settings.ImageComparisonSettings); // Does it allocate memory when it renders what's on the main camera? // bool allocatesMemory = false; // var mainCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent<Camera>(); // try // { // ImageAssert.AllocatesMemory(mainCamera, settings?.ImageComparisonSettings); // } // catch (AssertionException) // { // allocatesMemory = true; // } // if (allocatesMemory) // Assert.Fail("Allocated memory when rendering what is on main camera"); }
public void ValidateShape(string shapeName, PixelTolerance?tolerance = null) { Run("UITests.Windows_UI_Xaml_Shapes.Basic_Shapes", skipInitialScreenshot: true); var ctrl = new QueryEx(q => q.Marked("_basicShapesTestRoot")); var expectedDirectory = Path.Combine( TestContext.CurrentContext.TestDirectory, "Windows_UI_Xaml_Shapes/Basics_Shapes_Tests_EpectedResults"); var actualDirectory = Path.Combine( TestContext.CurrentContext.WorkDirectory, nameof(Windows_UI_Xaml_Shapes), nameof(Basics_Shapes_Tests), shapeName); tolerance = tolerance ?? (new PixelTolerance() .WithColor(132) // We are almost only trying to detect edges .WithOffset(3, 3, LocationToleranceKind.PerPixel) .Discrete(2)); var failures = new List <(string test, Exception error)>(); // To improve performance, we run all test for a given stretch at once. var testGroups = _tests .Where(t => t.StartsWith(shapeName)) .GroupBy(t => string.Join("_", t.Split(new[] { '_' }, 3, StringSplitOptions.RemoveEmptyEntries).Take(2))); foreach (var testGroup in testGroups) { ctrl.SetDependencyPropertyValue("RunTest", string.Join(";", testGroup)); _app.WaitFor(() => !string.IsNullOrWhiteSpace(ctrl.GetDependencyPropertyValue <string>("TestResult"))); var testResultsRaw = ctrl.GetDependencyPropertyValue <string>("TestResult"); var testResults = testResultsRaw .Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries) .Select(line => line.Split(new[] { ';' }, 3, StringSplitOptions.RemoveEmptyEntries)) .Where(line => line.Length == 3) .ToDictionary( line => line[0], line => { var testName = line[0]; var isSuccess = line[1] == "SUCCESS"; var data = Convert.FromBase64String(line[2]); var target = Path .Combine(actualDirectory, testName + (isSuccess ? ".png" : ".txt")) .GetNormalizedLongPath(); var targetFile = new FileInfo(target); targetFile.Directory.Create(); File.WriteAllBytes(target, data); SetOptions(targetFile, new ScreenshotOptions { IgnoreInSnapshotCompare = true }); TestContext.AddTestAttachment(target, testName); return(isSuccess ? new Bitmap(new MemoryStream(Convert.FromBase64String(line[2]))) : new Exception(Encoding.UTF8.GetString(Convert.FromBase64String(line[2]))) as object); }); foreach (var test in testGroup) { try { var expected = new FileInfo(Path.Combine(expectedDirectory, $"{test}.png")); if (!expected.Exists) { Assert.Fail($"Expected screenshot does not exists ({expected.FullName})"); } if (!testResults.TryGetValue(test, out var testResult)) { Assert.Fail($"No test result for {test}."); } if (testResult is Exception error) { Assert.Fail($"Test failed: {error.Message}"); } using (var actual = (Bitmap)testResult) { var scale = 2.0; ImageAssert.AreAlmostEqual(expected, ImageAssert.FirstQuadrant, actual, ImageAssert.FirstQuadrant, scale, tolerance.Value); } } catch (Exception e) { Console.Error.WriteLine(e); // Ease debug while reading log from CI failures.Add((test, e)); } } } if (failures.Any()) { throw new AggregateException( $"Failed tests ({failures.Count} of {testGroups.Sum(g => g.Count())}):\r\n{string.Join("\r\n", failures.Select(t => t.test))}\r\n", failures.Select(t => t.error)); } else { Console.WriteLine($"All {testGroups.Sum(g => g.Count())} ran successfully."); } }
public void WhenEqualBmp() { ImageAssert.AreEqual(Properties.Resources.SquareBmp, Properties.Resources.SquareBmp); ImageAssert.AreEqual(Properties.Resources.SquareBmp, Properties.Resources.SquareBmp, _ => { }); }
private void TestTransformsFinalState([CallerMemberName] string testName = null) { var match = Regex.Match(testName, @"When_Transforms_(?<type>\w+)_With_FillBehavior(?<fill>\w+)_Then_(?<expected>\w+)"); if (!match.Success) { throw new InvalidOperationException("Invalid test name."); } var type = match.Groups["type"].Value; var fill = match.Groups["fill"].Value; var expected = match.Groups["expected"].Value; int expectedDelta, tolerance = 0; switch (type) { case "Completed" when expected == "Hold": expectedDelta = 150; break; case "Completed" when expected == "Rollback": expectedDelta = 0; break; case "Paused": expectedDelta = 150 / 2; tolerance = 50; // We only want to validate that the element is not at 0 or 150 break; case "Canceled": expectedDelta = 0; break; default: throw new InvalidOperationException("Invalid test name."); } Run(_finalStateTransformsTestControl, skipInitialScreenshot: true); var initial = TakeScreenshot("Initial", ignoreInSnapshotCompare: true); var initialLocation = _app.WaitForElement($"{type}AnimationHost_{fill}").Single().Rect; var scale = ((int)initialLocation.Width) / 50; expectedDelta *= scale; tolerance *= scale; _app.Marked("StartButton").FastTap(); _app.WaitForDependencyPropertyValue(_app.Marked("Status"), "Text", "Completed"); // Assert var final = TakeScreenshot("Final", ignoreInSnapshotCompare: true); var finalLocation = _app.WaitForElement($"{type}AnimationHost_{fill}").Single().Rect; var actualDelta = finalLocation.Y - initialLocation.Y; // For some reason, the finalLocation might not reflect the effective location of the control, // instead we rely on pixel validation ... // Assert.IsTrue(Math.Abs(actualDelta - expectedDelta) <= tolerance); if (expectedDelta > 0) { ImageAssert.AreNotEqual(initial, final, initialLocation); } else { ImageAssert.AreEqual(initial, final, initialLocation); } }
public void WhenEqualPng() { ImageAssert.AreEqual(Properties.Resources.SquarePng, Properties.Resources.SquarePng); ImageAssert.AreEqual(Properties.Resources.SquarePng, Properties.Resources.SquarePng, _ => { }); }
public IEnumerator Run(GraphicsTestCase testCase) { #if UNITY_EDITOR while (SceneView.sceneViews.Count > 0) { var sceneView = SceneView.sceneViews[0] as SceneView; sceneView.Close(); } #endif SceneManagement.SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera) { var vfxComponents = Resources.FindObjectsOfTypeAll <VisualEffect>(); #if UNITY_EDITOR var vfxAssets = vfxComponents.Select(o => o.visualEffectAsset).Where(o => o != null).Distinct(); foreach (var vfx in vfxAssets) { //Use Reflection as workaround of the access issue in .net 4 (TODO : Clean this as soon as possible) //var graph = vfx.GetResource().GetOrCreateGraph(); is possible with .net 3.5 but compilation fail with 4.0 var visualEffectAssetExt = AppDomain.CurrentDomain.GetAssemblies().Select(o => o.GetType("UnityEditor.VFX.VisualEffectAssetExtensions")) .Where(o => o != null) .FirstOrDefault(); var fnGetResource = visualEffectAssetExt.GetMethod("GetResource"); var resource = fnGetResource.Invoke(null, new object[] { vfx }); var fnGetOrCreate = visualEffectAssetExt.GetMethod("GetOrCreateGraph"); var graph = fnGetOrCreate.Invoke(null, new object[] { resource }) as VFXGraph; graph.RecompileIfNeeded(); } #endif var rt = RenderTexture.GetTemporary(captureSize, captureSize, 24); camera.targetTexture = rt; foreach (var component in vfxComponents) { component.Reinit(); } #if UNITY_EDITOR //When we change the graph, if animator was already enable, we should reinitialize animator to force all BindValues var animators = Resources.FindObjectsOfTypeAll <Animator>(); foreach (var animator in animators) { animator.Rebind(); } #endif int waitFrameCount = (int)(simulateTime / frequency); int startFrameIndex = Time.frameCount; int expectedFrameIndex = startFrameIndex + waitFrameCount; while (Time.frameCount != expectedFrameIndex) { yield return(null); } Texture2D actual = null; try { camera.targetTexture = null; actual = new Texture2D(captureSize, captureSize, TextureFormat.RGB24, false); RenderTexture.active = rt; actual.ReadPixels(new Rect(0, 0, captureSize, captureSize), 0, 0); RenderTexture.active = null; actual.Apply(); if (!ExcludedTestsButKeepLoadScene.Any(o => testCase.ScenePath.Contains(o)) && !(SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal && UnstableMetalTests.Any(o => testCase.ScenePath.Contains(o)))) { ImageAssert.AreEqual(testCase.ReferenceImage, actual, new ImageComparisonSettings() { AverageCorrectnessThreshold = 10e-5f }); } else { Debug.LogFormat("GraphicTest '{0}' result has been ignored", testCase.ReferenceImage); } } finally { RenderTexture.ReleaseTemporary(rt); if (actual != null) { UnityEngine.Object.Destroy(actual); } } } }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); var settings = Object.FindObjectOfType <LWGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find LWGraphicsTestSettings"); // Stereo screen capture on Mac generates monoscopic images and won't be fixed. Assume.That((Application.platform != RuntimePlatform.OSXEditor && Application.platform != RuntimePlatform.OSXPlayer), "Stereo tests do not run on MacOSX."); var referenceImage = testCase.ReferenceImage; // make sure we're rendering in the same size as the reference image, otherwise this is not really comparable. Screen.SetResolution(referenceImage.width, referenceImage.height, FullScreenMode.Windowed); XRSettings.gameViewRenderMode = GameViewRenderMode.BothEyes; yield return(null); foreach (var camera in cameras) { camera.stereoTargetEye = StereoTargetEyeMask.Both; } var tempScreenshotFile = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // clean up previous file if it happens to exist at this point if (FileAvailable(tempScreenshotFile)) { System.IO.File.Delete(tempScreenshotFile); } for (int i = 0; i < settings.WaitFrames; i++) { yield return(null); } // wait for rendering to complete yield return(new WaitForEndOfFrame()); // we'll take a screenshot here, as what we want to compare is the actual result on-screen. // ScreenCapture.CaptureScreenshotAsTexture --> does not work since colorspace is wrong, would need colorspace change and thus color compression // ScreenCapture.CaptureScreenshotIntoRenderTexture --> does not work since texture is flipped, would need another pass // so we need to capture and reload the resulting file. ScreenCapture.CaptureScreenshot(tempScreenshotFile); // NOTE: there's discussions around whether Unity has actually documented this correctly. // Unity says: next frame MUST have the file ready // Community says: not true, file write might take longer, so have to explicitly check the file handle before use // https://forum.unity.com/threads/how-to-wait-for-capturescreen-to-complete.172194/ yield return(null); while (!FileAvailable(tempScreenshotFile)) { yield return(null); } // load the screenshot back into memory and change to the same format as we want to compare with var actualImage = new Texture2D(1, 1); actualImage.LoadImage(System.IO.File.ReadAllBytes(tempScreenshotFile)); if (actualImage.width != referenceImage.width || actualImage.height != referenceImage.height) { Debug.LogWarning("[" + testCase.ScenePath + "] Image size differs (ref: " + referenceImage.width + "x" + referenceImage.height + " vs. actual: " + actualImage.width + "x" + actualImage.height + "). " + (Application.isEditor ? " is your GameView set to a different resolution than the reference images?" : "is your build size different than the reference images?")); actualImage = ChangeTextureSize(actualImage, referenceImage.width, referenceImage.height); } // ref is usually in RGB24 or RGBA32 while actual is in ARGB32, we need to convert formats if (referenceImage.format != actualImage.format) { actualImage = ChangeTextureFormat(actualImage, referenceImage.format); } // delete temporary file File.Delete(tempScreenshotFile); // for testing // File.WriteAllBytes("reference.png", referenceImage.EncodeToPNG()); // File.WriteAllBytes("actual.png", actualImage.EncodeToPNG()); ImageAssert.AreEqual(referenceImage, actualImage, settings.ImageComparisonSettings); }
public void Default_StrokeThickness() { const string red = "#FF0000"; string reddish = GetReddish(); var shapeExpectations = new[] { new ShapeExpectation { Name = "MyLine", Offsets = new [] { 0, 0, 0, 0 }, Colors = red, }, new ShapeExpectation { Name = "MyRect", Offsets = new [] { 0, 0, -1, -1 }, Colors = red, }, new ShapeExpectation { Name = "MyPolyline", Offsets = new [] { 2, 2, -1, -1 }, Colors = reddish, }, new ShapeExpectation { Name = "MyPolygon", Offsets = new [] { 2, 2, -1, -1 }, Colors = reddish, }, new ShapeExpectation { Name = "MyEllipse", Offsets = new [] { 0, 0, -1, -1 }, Colors = red, }, new ShapeExpectation { Name = "MyPath", Offsets = new [] { 0, 0, 0, 0 }, Colors = red, }, }; Run("UITests.Windows_UI_Xaml_Shapes.Shapes_Default_StrokeThickness"); _app.WaitForElement("TestZone"); foreach (var expectation in shapeExpectations) { _app.Marked($"{expectation.Name}Selector").FastTap(); using var screenshot = TakeScreenshot($"{expectation}"); if (expectation.Name == "MyLine" || expectation.Name == "MyPath") { var targetRect = _app.GetPhysicalRect($"{expectation.Name}Target"); ImageAssert.DoesNotHaveColorAt(screenshot, targetRect.CenterX, targetRect.CenterY, Color.White); _app.Marked("StrokeThicknessButton").FastTap(); using var zeroStrokeThicknessScreenshot = TakeScreenshot($"{expectation.Name}_0_StrokeThickness"); ImageAssert.HasColorAt(zeroStrokeThicknessScreenshot, targetRect.CenterX, targetRect.CenterY, Color.White); } else { var shapeContainer = _app.GetPhysicalRect($"{expectation}Grid"); ImageAssert.HasColorAt(screenshot, shapeContainer.X + expectation.Offsets[0], shapeContainer.CenterY, expectation.Colors, tolerance: 15); ImageAssert.HasColorAt(screenshot, shapeContainer.CenterX, shapeContainer.Y + expectation.Offsets[1], expectation.Colors, tolerance: 15); ImageAssert.HasColorAt(screenshot, shapeContainer.Right + expectation.Offsets[2], shapeContainer.CenterY, expectation.Colors, tolerance: 15); ImageAssert.HasColorAt(screenshot, shapeContainer.CenterX, shapeContainer.Bottom + expectation.Offsets[3], expectation.Colors, tolerance: 15); _app.Marked("StrokeThicknessButton").FastTap(); using var zeroStrokeThicknessScreenshot = TakeScreenshot($"{expectation.Name}_0_StrokeThickness"); ImageAssert.DoesNotHaveColorAt(zeroStrokeThicknessScreenshot, shapeContainer.X + expectation.Offsets[0], shapeContainer.CenterY, expectation.Colors); ImageAssert.DoesNotHaveColorAt(zeroStrokeThicknessScreenshot, shapeContainer.CenterX, shapeContainer.Y + expectation.Offsets[1], expectation.Colors); ImageAssert.DoesNotHaveColorAt(zeroStrokeThicknessScreenshot, shapeContainer.Right + expectation.Offsets[2], shapeContainer.CenterY, expectation.Colors); ImageAssert.DoesNotHaveColorAt(zeroStrokeThicknessScreenshot, shapeContainer.CenterX, shapeContainer.Bottom + expectation.Offsets[3], expectation.Colors); } } }
public void Border_CornerRadius_BorderThickness() { const string red = "#FF0000"; const string blue = "#0000FF"; var expectedColors = new[] { new ExpectedColor { Thicknesses = new [] { 10, 10, 10, 10 }, Colors = new [] { red, red, red, red } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 10, 10 }, Colors = new [] { red, blue, red, red } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 0, 10 }, Colors = new [] { red, blue, blue, red } }, new ExpectedColor { Thicknesses = new [] { 10, 0, 0, 0 }, Colors = new [] { red, blue, blue, blue } }, new ExpectedColor { Thicknesses = new [] { 0, 0, 0, 0 }, Colors = new [] { blue, blue, blue, blue } }, }; Run("UITests.Windows_UI_Xaml_Controls.BorderTests.Border_CornerRadius_BorderThickness"); _app.WaitForElement("MyBorder"); var leftTarget = _app.GetPhysicalRect("LeftTarget"); var topTarget = _app.GetPhysicalRect("TopTarget"); var rightTarget = _app.GetPhysicalRect("RightTarget"); var bottomTarget = _app.GetPhysicalRect("BottomTarget"); var centerTarget = _app.GetPhysicalRect("CenterTarget"); SetBorderProperty("MyBorder", "CornerRadius", "10"); foreach (var expected in expectedColors) { SetBorderProperty("MyBorder", "BorderThickness", expected.ToString()); using var snapshot = TakeScreenshot($"Border-CornerRadius-10-BorderThickness-{expected}"); ImageAssert.HasPixels( snapshot, ExpectedPixels .At($"left-{expected}", leftTarget.CenterX, leftTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[0]), ExpectedPixels .At($"top-{expected}", topTarget.CenterX, topTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[1]), ExpectedPixels .At($"right-{expected}", rightTarget.CenterX, rightTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[2]), ExpectedPixels .At($"bottom-{expected}", bottomTarget.CenterX, bottomTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(expected.Colors[3]), ExpectedPixels .At($"center-{expected}", centerTarget.CenterX, centerTarget.CenterY) .WithPixelTolerance(1, 1) .Pixel(blue) ); } }
private void TestUseSuppliedPalette(ColourTableStrategy strategy) { string globalLocal = strategy == ColourTableStrategy.UseGlobal ? "Global" : "Local"; // First, create and check a series of single-frame GIFs, one for // each of the available colour tables. string[] files = Directory.GetFiles(@"ColourTables", "*.act"); foreach (string act in files) { string actFileWithoutExtension = Path.GetFileNameWithoutExtension(act); _e = new AnimatedGifEncoder(); if (strategy == ColourTableStrategy.UseGlobal) { _e.Palette = Palette.FromFile(act); Assert.AreEqual(ColourTableStrategy.UseGlobal, _e.ColourTableStrategy); // QuantizerType should default to UseSuppliedPalette when // the encoder's Palette property is set. Assert.AreEqual(QuantizerType.UseSuppliedPalette, _e.QuantizerType); } else { _e.ColourTableStrategy = ColourTableStrategy.UseLocal; Assert.AreEqual(ColourTableStrategy.UseLocal, _e.ColourTableStrategy); _e.QuantizerType = QuantizerType.UseSuppliedPalette; Assert.AreEqual(QuantizerType.UseSuppliedPalette, _e.QuantizerType); } GifFrame frame = new GifFrame(Image.FromFile(@"images\smiley.bmp")); if (strategy == ColourTableStrategy.UseLocal) { frame.Palette = Palette.FromFile(act); } _e.AddFrame(frame); string fileName = "AnimatedGifEncoderTest.UseSuppliedPalette" + globalLocal + "-" + actFileWithoutExtension + ".gif"; _e.WriteToFile(fileName); _d = new GifDecoder(fileName, true); _d.Decode(); Assert.AreEqual(ErrorState.Ok, _d.ConsolidatedState, actFileWithoutExtension); Assert.AreEqual(1, _d.Frames.Count, actFileWithoutExtension); if (strategy == ColourTableStrategy.UseGlobal) { Assert.AreEqual(true, _d.LogicalScreenDescriptor.HasGlobalColourTable, actFileWithoutExtension); Assert.IsNotNull(_d.GlobalColourTable, actFileWithoutExtension); } else { Assert.AreEqual(false, _d.LogicalScreenDescriptor.HasGlobalColourTable, actFileWithoutExtension); Assert.IsNull(_d.GlobalColourTable, actFileWithoutExtension); } string expectedFileName = @"images\Smiley\Smiley" + "-" + actFileWithoutExtension + ".bmp"; Image expected = Image.FromFile(expectedFileName); ImageAssert.AreEqual(expected, _d.Frames[0].TheImage, expectedFileName); } // now encode a multi-frame animation with a user-supplied palette _d = new GifDecoder(@"images\globe\spinning globe better 200px transparent background.gif"); _d.Decode(); _e = new AnimatedGifEncoder(); _e.QuantizerType = QuantizerType.UseSuppliedPalette; _e.Palette = Palette.FromFile(@"ColourTables\C64.act"); foreach (GifFrame f in _d.Frames) { _e.AddFrame(f); } string globeFileName = "AnimatedGifEncoderTest.UseSuppliedPalette" + globalLocal + ".gif"; _e.WriteToFile(globeFileName); _d = new GifDecoder(globeFileName); _d.Decode(); Assert.AreEqual(ErrorState.Ok, _d.ConsolidatedState); Assert.AreEqual(_e.Frames.Count, _d.Frames.Count); }
public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); var settings = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); Scene scene = SceneManager.GetActiveScene(); if (scene.name.Substring(3, 4).Equals("_xr_")) { Assume.That((Application.platform != RuntimePlatform.OSXEditor && Application.platform != RuntimePlatform.OSXPlayer), "Stereo Universal tests do not run on MacOSX."); XRSettings.LoadDeviceByName("MockHMD"); yield return(null); XRSettings.enabled = true; yield return(null); XRSettings.gameViewRenderMode = GameViewRenderMode.BothEyes; yield return(null); foreach (var camera in cameras) { camera.stereoTargetEye = StereoTargetEyeMask.Both; } } else { XRSettings.enabled = false; yield return(null); } for (int i = 0; i < settings.WaitFrames; i++) { yield return(null); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(null); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), settings.ImageComparisonSettings); #if CHECK_ALLOCATIONS_WHEN_RENDERING // Does it allocate memory when it renders what's on the main camera? bool allocatesMemory = false; var mainCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); try { ImageAssert.AllocatesMemory(mainCamera, 512, 512); // 512 used for height and width to render } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on main camera"); } #endif }
public void RefProbeAPI(string settings, BakeAPI bakeAPI) { EditorSceneManager.OpenScene(sceneFileName, OpenSceneMode.Single); // Bake with a lighting settings asset. string[] settingsAssets = AssetDatabase.FindAssets(settings + " t:lightingsettings", foldersToLookIn); Debug.Log("Found " + settingsAssets.Length + " matching lighting settings assets in " + foldersToLookIn[0]); Assert.That(settingsAssets.Length, Is.EqualTo(1)); string lsaPath = AssetDatabase.GUIDToAssetPath(settingsAssets[0]); Debug.Log("Loading " + lsaPath); LightingSettings lightingSettings = (LightingSettings)AssetDatabase.LoadAssetAtPath(lsaPath, typeof(LightingSettings)); Lightmapping.lightingSettings = lightingSettings; string fileName = System.IO.Path.GetFileNameWithoutExtension(lsaPath); Assert.That(fileName, Is.EqualTo(settings)); Lightmapping.Clear(); // The disk cache needs clearing between runs because we are only changing the API and not necessarily the settings. // Changing the API use to bake the probe is assumed to not affect the result so the reflection probe is fetched from the disk cache. // To detect that everything works as intended the cached reflection probe needs to be cleared. Lightmapping.ClearDiskCache(); Debug.Log("Baking " + fileName); bool result = true; switch (bakeAPI) { case BakeAPI.Bake: result = Lightmapping.Bake(); break; case BakeAPI.BakeAll: { var probe = Object.FindObjectOfType <ReflectionProbe>(); Assert.That(probe, !Is.EqualTo(null), "Couldn't find ReflectionProbe"); Debug.Log("Found reflection probe: " + probe.name); var oldEnabledValue = probe.enabled; probe.enabled = false; result = Lightmapping.Bake(); probe.enabled = oldEnabledValue; result &= LightmappingExt.BakeAllReflectionProbesSnapshots(); } break; case BakeAPI.BakeSingle: { var probe = Object.FindObjectOfType <ReflectionProbe>(); Assert.That(probe, !Is.EqualTo(null), "Couldn't find ReflectionProbe"); Debug.Log("Found reflection probe: " + probe.name); var oldEnabledValue = probe.enabled; probe.enabled = false; result = Lightmapping.Bake(); probe.enabled = oldEnabledValue; result &= LightmappingExt.BakeReflectionProbeSnapshot(probe); } break; } Assert.That(result, Is.True); // Get Test settings. var graphicsTestSettingsCustom = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.That(graphicsTestSettingsCustom, !Is.EqualTo(null), "Couldn't find GraphicsTestSettingsCustom"); // Load reference image. var referenceImagePath = System.IO.Path.Combine("Assets/ReferenceImages", string.Format("{0}/{1}/{2}/{3}/{4}", UseGraphicsTestCasesAttribute.ColorSpace, UseGraphicsTestCasesAttribute.Platform, UseGraphicsTestCasesAttribute.GraphicsDevice, UseGraphicsTestCasesAttribute.LoadedXRDevice, "RefProbeAPI_" + settings + "-" + bakeAPI.ToString() + "_.png")); Debug.Log("referenceImagePath " + referenceImagePath); var referenceImage = AssetDatabase.LoadAssetAtPath <Texture2D>(referenceImagePath); // Compare screenshots. GraphicsTestCase testCase = new GraphicsTestCase(settings, referenceImage); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), graphicsTestSettingsCustom.ImageComparisonSettings); UnityEditor.TestTools.Graphics.ResultsUtility.ExtractImagesFromTestProperties(TestContext.CurrentContext.Test); }
public IEnumerator Run(GraphicsTestCase testCase) { // #if ENABLE_VR // // XRTODO: Fix XR tests on macOS or disable them from Yamato directly // if (XRGraphicsAutomatedTests.enabled && (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.OSXPlayer)) // Assert.Ignore("Universal XR tests do not run on macOS."); // #endif SceneManager.LoadScene(testCase.ScenePath); // Always wait one frame for scene load yield return(null); var cameras = GameObject.FindGameObjectsWithTag("MainCamera").Select(x => x.GetComponent <Camera>()); var settings = Object.FindObjectOfType <UniversalGraphicsTestSettings>(); Assert.IsNotNull(settings, "Invalid test scene, couldn't find UniversalGraphicsTestSettings"); var gltf = Object.FindObjectOfType <GltfBoundsAsset>(); Assert.IsNotNull(gltf, "Invalid test scene, couldn't find GltfAsset"); var task = gltf.Load(gltf.FullUrl); while (!task.IsCompleted) { yield return(null); } // position camera based on AABB var cam = cameras.First(); FrameBoundsCamera.FrameBounds(cam, gltf.transform, gltf.bounds); // #if ENABLE_VR // if (XRGraphicsAutomatedTests.enabled) // { // if (settings.XRCompatible) // { // XRGraphicsAutomatedTests.running = true; // } // else // { // Assert.Ignore("Test scene is not compatible with XR and will be skipped."); // } // } // #endif Scene scene = SceneManager.GetActiveScene(); yield return(null); int waitFrames = settings.WaitFrames; if (settings.ImageComparisonSettings.UseBackBuffer && settings.WaitFrames < 1) { waitFrames = 1; } for (int i = 0; i < waitFrames; i++) { yield return(new WaitForEndOfFrame()); } #if UNITY_ANDROID // On Android first scene often needs a bit more frames to load all the assets // otherwise the screenshot is just a black screen if (!wasFirstSceneRan) { for (int i = 0; i < firstSceneAdditionalFrames; i++) { yield return(null); } wasFirstSceneRan = true; } #endif ImageAssert.AreEqual(testCase.ReferenceImage, cameras.Where(x => x != null), settings.ImageComparisonSettings); // Does it allocate memory when it renders what's on the main camera? bool allocatesMemory = false; var mainCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); try { ImageAssert.AllocatesMemory(mainCamera, settings?.ImageComparisonSettings); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on main camera"); } }
public async Task NativeCommandBar_Size() { Run("Uno.UI.Samples.Content.UITests.CommandBar.CommandBar_Dynamic"); const string rootElementName = "RootPanel"; _app.WaitForElement(rootElementName); var supportsRotation = GetSupportsRotation(); var isLandscape = GetIsCurrentRotationLandscape(rootElementName); var currentModeIsLandscape = isLandscape; async Task ToggleOrientation() { if (currentModeIsLandscape) { _app.SetOrientationPortrait(); } else { _app.SetOrientationLandscape(); } currentModeIsLandscape = !currentModeIsLandscape; _app.WaitFor(() => GetIsCurrentRotationLandscape(rootElementName) == currentModeIsLandscape); await Task.Delay(125); // A delay ia required after rotation for the test to succeed } try { var firstScreenShot = TakeScreenshot("FirstOrientation"); var firstCommandBarRect = _app.GetRect("TheCommandBar"); var firstYellowBorderRect = _app.GetRect("TheBorder"); firstCommandBarRect.Bottom.Should().Be(firstYellowBorderRect.Y); var firstCommandBarPhysicalRect = ToPhysicalRect(firstCommandBarRect); var x1 = firstCommandBarPhysicalRect.X + (firstCommandBarPhysicalRect.Width * 0.75f); ImageAssert.HasColorAt(firstScreenShot, x1, firstCommandBarPhysicalRect.Bottom - 1, Color.Red); if (!supportsRotation) { return; // We're on a platform not supporting rotations. } await ToggleOrientation(); var secondScreenShot = TakeScreenshot("SecondOrientation"); var secondCommandBarRect = _app.GetRect("TheCommandBar"); var secondYellowBorderRect = _app.GetRect("TheBorder"); secondCommandBarRect.Bottom.Should().Be(secondYellowBorderRect.Y); var secondCommandBarPhysicalRect = ToPhysicalRect(secondCommandBarRect); var x2 = secondCommandBarPhysicalRect.X + (secondCommandBarPhysicalRect.Width * 0.75f); ImageAssert.HasColorAt(secondScreenShot, x2, secondCommandBarPhysicalRect.Bottom - 1, Color.Red); await ToggleOrientation(); var thirdScreenShot = TakeScreenshot("thirdOrientation"); var thirdCommandBarRect = _app.GetRect("TheCommandBar"); var thirdYellowBorderRect = _app.GetRect("TheBorder"); thirdCommandBarRect.Bottom.Should().Be(thirdYellowBorderRect.Y); var thirdCommandBarPhysicalRect = ToPhysicalRect(thirdCommandBarRect); var x3 = thirdCommandBarPhysicalRect.X + (thirdCommandBarPhysicalRect.Width * 0.75f); ImageAssert.HasColorAt(thirdScreenShot, x3, thirdCommandBarPhysicalRect.Bottom - 1, Color.Red); } finally { // Reset orientation to original value if (isLandscape) { _app.SetOrientationLandscape(); } else { _app.SetOrientationPortrait(); } _app.WaitFor(() => GetIsCurrentRotationLandscape(rootElementName) == isLandscape); } }
public void When_CornerRadiusControls() { Run("UITests.Windows_UI_Xaml.Clipping.CornerRadiusControls"); _app.WaitForElement("TestRoot"); using var snapshot = this.TakeScreenshot("validation", ignoreInSnapshotCompare: false); using (new AssertionScope("Rounded corners")) { CheckRoundedCorners("ctl1"); CheckRoundedCorners("ctl2"); CheckRoundedCorners("ctl3"); CheckRoundedCorners("ctl4"); CheckRoundedCorners("ctl5"); CheckRoundedCorners("ctl6"); CheckRoundedCorners("ctl7"); CheckRoundedCorners("ctl8"); } using (new AssertionScope("No Rounded corners")) { CheckNoRoundedCorners("ctl1_rect"); CheckNoRoundedCorners("ctl2_rect"); CheckNoRoundedCorners("ctl3_rect"); } void CheckRoundedCorners(string s) { var rectCtl = _app.GetPhysicalRect(s); var green = "#FF008000"; var white = "#FFFFFFFF"; ImageAssert.HasPixels( snapshot, ExpectedPixels .At("top-middle " + s, rectCtl.CenterX, rectCtl.Y + 2) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("bottom-middle " + s, rectCtl.CenterX, rectCtl.Bottom - 2) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("left-middle " + s, rectCtl.X + 2, rectCtl.CenterY) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("right-middle " + s, rectCtl.Right - 2, rectCtl.CenterY) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("top-left " + s, rectCtl.X + 1, rectCtl.Y + 1) .Pixel(white), ExpectedPixels .At("top-right " + s, rectCtl.Right - 1, rectCtl.Y + 1) .Pixel(white), ExpectedPixels .At("bottom-left " + s, rectCtl.X + 1, rectCtl.Bottom - 1) .Pixel(white), ExpectedPixels .At("bottom-right " + s, rectCtl.Right - 1, rectCtl.Bottom - 1) .Pixel(white) ); } void CheckNoRoundedCorners(string s) { var rectCtl = _app.GetPhysicalRect(s); var green = "#FF008000"; ImageAssert.HasPixels( snapshot, ExpectedPixels .At("top-middle " + s, rectCtl.CenterX, rectCtl.Y + 2) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("bottom-middle " + s, rectCtl.CenterX, rectCtl.Bottom - 2) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("left-middle " + s, rectCtl.X + 2, rectCtl.CenterY) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("right-middle " + s, rectCtl.Right - 2, rectCtl.CenterY) .WithPixelTolerance(1, 1) .Pixel(green), ExpectedPixels .At("top-left " + s, rectCtl.X + 1, rectCtl.Y + 1) .Pixel(green), ExpectedPixels .At("top-right " + s, rectCtl.Right - 1, rectCtl.Y + 1) .Pixel(green), ExpectedPixels .At("bottom-left " + s, rectCtl.X + 1, rectCtl.Bottom - 1) .Pixel(green), ExpectedPixels .At("bottom-right " + s, rectCtl.Right - 1, rectCtl.Bottom - 1) .Pixel(green) ); } }
[Timeout(300 * 1000)] // Set timeout to 5 minutes to handle complex scenes with many shaders (default timeout is 3 minutes) public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Arbitrary wait for 5 frames for the scene to load, and other stuff to happen (like Realtime GI to appear ...) for (int i = 0; i < 5; ++i) { yield return(null); } // Load the test settings var settings = GameObject.FindObjectOfType <HDRP_TestSettings>(); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera == null) { camera = GameObject.FindObjectOfType <Camera>(); } if (camera == null) { Assert.Fail("Missing camera for graphic tests."); } Time.captureFramerate = settings.captureFramerate; if (XRGraphicsAutomatedTests.enabled) { if (settings.xrCompatible) { XRGraphicsAutomatedTests.running = true; // Increase tolerance to account for slight changes due to float precision settings.ImageComparisonSettings.AverageCorrectnessThreshold *= settings.xrThresholdMultiplier; settings.ImageComparisonSettings.PerPixelCorrectnessThreshold *= settings.xrThresholdMultiplier; // Increase number of volumetric slices to compensate for initial half-resolution due to XR single-pass optimization foreach (var volume in GameObject.FindObjectsOfType <Volume>()) { if (volume.profile.TryGet <Fog>(out Fog fog)) { fog.volumeSliceCount.value *= 2; } } } else { Assert.Ignore("Test scene is not compatible with XR and will be skipped."); } } if (settings.doBeforeTest != null) { settings.doBeforeTest.Invoke(); // Wait again one frame, to be sure. yield return(null); } // Reset temporal effects on hdCamera HDCamera.GetOrCreate(camera).Reset(); for (int i = 0; i < settings.waitFrames; ++i) { yield return(null); } var settingsSG = (GameObject.FindObjectOfType <HDRP_TestSettings>() as HDRP_ShaderGraph_TestSettings); if (settingsSG == null || !settingsSG.compareSGtoBI) { // Standard Test ImageAssert.AreEqual(testCase.ReferenceImage, camera, settings?.ImageComparisonSettings); // For some reason, tests on mac os have started failing with render graph enabled by default. // Some tests have 400+ gcalloc in them. Unfortunately it's not reproductible outside of command line so it's impossible to debug. // That's why we don't test on macos anymore. if (settings.checkMemoryAllocation && SystemInfo.graphicsDeviceType != GraphicsDeviceType.Metal) { // Does it allocate memory when it renders what's on camera? bool allocatesMemory = false; try { // GC alloc from Camera.CustomRender (case 1206364) int gcAllocThreshold = 0; ImageAssert.AllocatesMemory(camera, settings?.ImageComparisonSettings, gcAllocThreshold); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on camera"); } } } else { if (settingsSG.sgObjs == null) { Assert.Fail("Missing Shader Graph objects in test scene."); } if (settingsSG.biObjs == null) { Assert.Fail("Missing comparison objects in test scene."); } settingsSG.sgObjs.SetActive(true); settingsSG.biObjs.SetActive(false); yield return(null); // Wait a frame yield return(null); bool sgFail = false; bool biFail = false; // First test: Shader Graph try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { sgFail = true; } settingsSG.sgObjs.SetActive(false); settingsSG.biObjs.SetActive(true); settingsSG.biObjs.transform.position = settingsSG.sgObjs.transform.position; // Move to the same location. yield return(null); // Wait a frame yield return(null); // Second test: HDRP/Lit Materials try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { biFail = true; } // Informs which ImageAssert failed, if any. if (sgFail && biFail) { Assert.Fail("Both Shader Graph and Non-Shader Graph Objects failed to match the reference image"); } else if (sgFail) { Assert.Fail("Shader Graph Objects failed."); } else if (biFail) { Assert.Fail("Non-Shader Graph Objects failed to match Shader Graph objects."); } } }
public void AreEqual_WithNullCamera_ThrowsArgumentNullException() { Assert.That(() => ImageAssert.AreEqual(new Texture2D(1, 1), (Camera)null), Throws.ArgumentNullException); }