void LogResults(PerformanceTestRun run) { foreach (var result in run.Results) { GameDebug.Log(result.ToString()); } }
private Dictionary <string, List <SampleGroup> > MergeTestExecutions(PerformanceTestRun performanceTestRun) { var mergedTestExecutions = new Dictionary <string, List <SampleGroup> >(); var testNames = performanceTestRun.Results.Select(te => te.TestName).Distinct().ToList(); foreach (var testName in testNames) { var executions = performanceTestRun.Results.Where(te => te.TestName == testName); var sampleGroups = new List <SampleGroup>(); foreach (var execution in executions) { foreach (var sampleGroup in execution.SampleGroups) { if (sampleGroups.Any(sg => sg.Definition.Name == sampleGroup.Definition.Name)) { sampleGroups.First(sg => sg.Definition.Name == sampleGroup.Definition.Name).Samples .AddRange(sampleGroup.Samples); } else { sampleGroups.Add(sampleGroup); } } } mergedTestExecutions.Add(testName, sampleGroups); } return(mergedTestExecutions); }
private void CreatePerformanceTestRunJson(PerformanceTestRun perfTestRun) { var json = JsonUtility.ToJson(perfTestRun, true); File.WriteAllText(TestRunPath, json); AssetDatabase.Refresh(); }
private IEnumerator ReadPerformanceTestRunJsonAsync() { string json; if (Application.platform == RuntimePlatform.Android) { var path = m_TestRunPath; UnityWebRequest reader = UnityWebRequest.Get(path); yield return(reader.SendWebRequest()); while (!reader.isDone) { yield return(null); } json = reader.downloadHandler.text; } else { if (!File.Exists(m_TestRunPath)) { m_TestRun = new PerformanceTestRun { PlayerSettings = new Unity.PerformanceTesting.PlayerSettings() }; yield break; } json = File.ReadAllText(m_TestRunPath); } m_TestRun = JsonUtility.FromJson <PerformanceTestRun>(json); }
private void DeserializeMetadata(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { foreach (var line in element.Value.Split('\n')) { var json = GetJsonFromHashtag("performancetestruninfo", line); if (json == null) { continue; } var result = TryDeserializePerformanceTestRunJsonObject(json); if (result != null) { run.TestSuite = result.TestSuite; run.EditorVersion = result.EditorVersion; run.QualitySettings = result.QualitySettings; run.ScreenSettings = result.ScreenSettings; run.BuildSettings = result.BuildSettings; run.PlayerSettings = result.PlayerSettings; run.PlayerSystemInfo = result.PlayerSystemInfo; run.StartTime = result.StartTime; // @TODO fix end time, does it matter for now? run.EndTime = run.StartTime; } } } }
private void GetFieldInfoValue(PerformanceTestRun performanceTestRun, Type metadataType, ref object obj, FieldInfo fieldInfo) { if (metadataType == typeof(PlayerSystemInfo)) { obj = (PlayerSystemInfo)fieldInfo.GetValue(performanceTestRun); } if (metadataType == typeof(PlayerSettings)) { obj = (PlayerSettings)fieldInfo.GetValue(performanceTestRun); } if (metadataType == typeof(ScreenSettings)) { obj = (ScreenSettings)fieldInfo.GetValue(performanceTestRun); } if (metadataType == typeof(QualitySettings)) { obj = (QualitySettings)fieldInfo.GetValue(performanceTestRun); } if (metadataType == typeof(BuildSettings)) { obj = (BuildSettings)fieldInfo.GetValue(performanceTestRun); } if (metadataType == typeof(EditorVersion)) { obj = (EditorVersion)fieldInfo.GetValue(performanceTestRun); } }
public void ValidateEditorVersion(PerformanceTestRun testRun1, PerformanceTestRun testRun2, string firstTestRunResultPath, string xmlFileNamePath, string[] excludedFieldNames) { if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.EditorVersion.FullVersion)) || excludedFieldNames == null) && testRun1.EditorVersion.FullVersion != testRun2.EditorVersion.FullVersion) { AddMismatchedTestConfig( ref EditorVersionResultFiles, MismatchedEditorVersionValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.EditorVersion.FullVersion), testRun1.EditorVersion.FullVersion, testRun2.EditorVersion.FullVersion); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.EditorVersion.FullVersion)) || excludedFieldNames == null) && testRun1.EditorVersion.Branch != testRun2.EditorVersion.Branch) { AddMismatchedTestConfig( ref EditorVersionResultFiles, MismatchedEditorVersionValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.EditorVersion.Branch), testRun1.EditorVersion.Branch, testRun2.EditorVersion.Branch); } }
private void DeserializeTestResults(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { foreach (var line in element.Value.Split('\n')) { { var json = GetJsonFromHashtag(PERFORMANCE_TEST_RESULTS, line); if (json != null) { var result = TryDeserializePerformanceTestResultJsonObject <PerformanceTestResult>(json); if (result != null) { run.Results.Add(result); } continue; } } { var json2 = GetJsonFromHashtag(PERFORMANCE_TEST_RESULTS_2, line); if (json2 != null) { var result = TryDeserializePerformanceTestResultJsonObject <PerformanceTestResult2>(json2); if (result != null) { run.Results.Add(result); } } } } } }
public void ValidateQualitySettings(PerformanceTestRun testRun1, PerformanceTestRun testRun2, string firstTestRunResultPath, string xmlFileNamePath, string[] excludedFieldNames) { if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.QualitySettings.AnisotropicFiltering)) || excludedFieldNames == null) && testRun1.QualitySettings.AnisotropicFiltering != testRun2.QualitySettings.AnisotropicFiltering) { AddMismatchedTestConfig( ref QualitySettingsResultFiles, MismatchedQualitySettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.QualitySettings.AnisotropicFiltering), testRun1.QualitySettings.AnisotropicFiltering, testRun2.QualitySettings.AnisotropicFiltering); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.QualitySettings.AntiAliasing)) || excludedFieldNames == null) && testRun1.QualitySettings.AntiAliasing != testRun2.QualitySettings.AntiAliasing) { AddMismatchedTestConfig( ref QualitySettingsResultFiles, MismatchedQualitySettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.QualitySettings.AntiAliasing), testRun1.QualitySettings.AntiAliasing.ToString(), testRun2.QualitySettings.AntiAliasing.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.QualitySettings.BlendWeights)) || excludedFieldNames == null) && testRun1.QualitySettings.BlendWeights != testRun2.QualitySettings.BlendWeights) { AddMismatchedTestConfig( ref QualitySettingsResultFiles, MismatchedQualitySettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.QualitySettings.BlendWeights), testRun1.QualitySettings.BlendWeights, testRun2.QualitySettings.BlendWeights); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.QualitySettings.ColorSpace)) || excludedFieldNames == null) && testRun1.QualitySettings.ColorSpace != testRun2.QualitySettings.ColorSpace) { AddMismatchedTestConfig( ref QualitySettingsResultFiles, MismatchedQualitySettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.QualitySettings.ColorSpace), testRun1.QualitySettings.ColorSpace, testRun2.QualitySettings.ColorSpace); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.QualitySettings.Vsync)) || excludedFieldNames == null) && testRun1.QualitySettings.Vsync != testRun2.QualitySettings.Vsync) { AddMismatchedTestConfig( ref QualitySettingsResultFiles, MismatchedQualitySettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.QualitySettings.Vsync), testRun1.QualitySettings.Vsync.ToString(), testRun2.QualitySettings.Vsync.ToString()); } }
private void CreatePerformanceTestRunJson(PerformanceTestRun run) { var json = JsonUtility.ToJson(run, true); PlayerPrefs.SetString(Utils.PlayerPrefKeyRunJSON, json); File.WriteAllText(Utils.TestRunPath, json); AssetDatabase.Refresh(); }
public void GetPlayerSettingsTest() { m_TestRun = ReadPerformanceTestRunJson(); m_TestRun.PlayerSystemInfo = GetSystemInfo(); m_TestRun.PlayerSettings = GetPlayerSettings(); m_TestRun.TestSuite = "Editmode"; TestContext.Out.Write("##performancetestruninfo:" + JsonUtility.ToJson(m_TestRun)); }
/// <summary> /// Assumes first performanceTestRun should be used to compare all other performanceTestRuns against. /// </summary> /// <param name="performanceTestRun"></param> /// <param name="xmlFileNamePath"></param> public void ProcessMetadata(PerformanceTestRun performanceTestRun, string xmlFileNamePath) { SetIsBuiltInVr(new[] { performanceTestRun }); SetIsAndroid(new[] { performanceTestRun }); foreach (var metadataType in metadataTypes) { var typeMetadata = TypeMetadata.Any(tm => tm.Type == metadataType) ? TypeMetadata.First(m => m.Type == metadataType) : null; // If metadataType doesn't exist in our TypeMetadata list, add it if (typeMetadata == null) { typeMetadata = new TypeMetadata(metadataType); TypeMetadata.Add(typeMetadata); } var fieldInfos = performanceTestRun.GetType().GetFields(); // If this metadataType is completely missing from the perf test run, mark it as such and move on if (fieldInfos.Any(f => f.FieldType == metadataType)) { var fieldInfo = fieldInfos.First(f => f.FieldType == metadataType); object obj = null; GetFieldInfoValue(performanceTestRun, metadataType, ref obj, fieldInfo); // If null, we're missing metadata for this performanceTestRun if (obj == null) { typeMetadata.NullResultCount++; // But we already have results for this metadataType, // add an empty "missing value" entry for it each FieldGroup if (typeMetadata.ValidResultCount > 0) { foreach (var fieldGroup in typeMetadata.FieldGroups) { BackfillFieldGroupValuesForMissingMetadata(xmlFileNamePath, fieldGroup, typeMetadata); } } continue; } var fieldsToProcess = GetFieldsToProcess(metadataType, obj.GetType().GetFields()); // if we have valid field metadata to process if (fieldsToProcess.Length > 0) { ProcessMetaData(xmlFileNamePath, fieldsToProcess, typeMetadata, metadataType, obj); } } } }
public void Setup() { #if UNITY_EDITOR m_TestRun = ReadPerformanceTestRunJson(); m_TestRun.EditorVersion = GetEditorInfo(); m_TestRun.PlayerSettings = GetPlayerSettings(m_TestRun.PlayerSettings); m_TestRun.BuildSettings = GetPlayerBuildInfo(); m_TestRun.StartTime = Utils.DateToInt(DateTime.Now); CreateStreamingAssetsFolder(); CreatePerformanceTestRunJson(); }
public void Setup() { m_TestRun = new PerformanceTestRun { EditorVersion = GetEditorInfo(), BuildSettings = GetPlayerBuildInfo(), StartTime = DateTime.Now.ToUniversalTime().Subtract(new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)) .TotalMilliseconds }; CreateStreamingAssetsFolder(); CreatePerformanceTestRunJson(); }
public void ValidateBuildSettings(PerformanceTestRun testRun1, PerformanceTestRun testRun2, string firstTestRunResultPath, string xmlFileNamePath, string[] excludedFieldNames) { if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.BuildSettings.Platform)) || excludedFieldNames == null) && testRun1.BuildSettings.Platform != testRun2.BuildSettings.Platform) { AddMismatchedTestConfig( ref BuildSettingsResultFiles, MismatchedBuildSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.BuildSettings.Platform), testRun1.BuildSettings.Platform, testRun2.BuildSettings.Platform); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.BuildSettings.BuildTarget)) || excludedFieldNames == null) && testRun1.BuildSettings.BuildTarget != testRun2.BuildSettings.BuildTarget) { AddMismatchedTestConfig( ref BuildSettingsResultFiles, MismatchedBuildSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.BuildSettings.BuildTarget), testRun1.BuildSettings.BuildTarget, testRun2.BuildSettings.BuildTarget); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.BuildSettings.DevelopmentPlayer)) || excludedFieldNames == null) && testRun1.BuildSettings.DevelopmentPlayer != testRun2.BuildSettings.DevelopmentPlayer) { AddMismatchedTestConfig( ref BuildSettingsResultFiles, MismatchedBuildSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.BuildSettings.DevelopmentPlayer), testRun1.BuildSettings.DevelopmentPlayer.ToString(), testRun2.BuildSettings.DevelopmentPlayer.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.BuildSettings.AndroidBuildSystem)) || excludedFieldNames == null) && testRun1.BuildSettings.AndroidBuildSystem != testRun2.BuildSettings.AndroidBuildSystem) { AddMismatchedTestConfig( ref BuildSettingsResultFiles, MismatchedBuildSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.BuildSettings.AndroidBuildSystem), testRun1.BuildSettings.AndroidBuildSystem, testRun2.BuildSettings.AndroidBuildSystem); } }
private void DeserializeMetadata(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { var elements = element.Value.Split('\n'); if (!elements.Any(e => e.Length > 0 && e.Substring(0, 2).Equals("##"))) { continue; } { var line = elements.First(e => e.Length > 0 && e.Substring(0, 2).Equals("##")); var json = GetJsonFromHashtag("performancetestruninfo", line); // This is the happy case where we have a performancetestruninfo json object if (json != null) { var result = TryDeserializePerformanceTestRunJsonObject(json); if (result == null) { continue; } run.TestSuite = result.TestSuite; run.EditorVersion = result.EditorVersion; run.QualitySettings = result.QualitySettings; run.ScreenSettings = result.ScreenSettings; run.BuildSettings = result.BuildSettings; run.PlayerSettings = result.PlayerSettings; run.PlayerSystemInfo = result.PlayerSystemInfo; run.StartTime = result.StartTime; run.EndTime = Utils.DateToInt(DateTime.Now); } // Unhappy case where we couldn't find a performancetestruninfo object // This could be because we have missing metadata for the test run // In this case, we try to look for a performancetestresult json object // We should have at least startime metadata that we can use to correctly // display the test results on the x-axis of the chart else { json = GetJsonFromHashtag("performancetestresult", line); if (json != null) { var result = TryDeserializePerformanceTestRunJsonObject(json); run.StartTime = result.StartTime; run.EndTime = Utils.DateToInt(DateTime.Now); } } } } }
public void ValidateScreenSettings(PerformanceTestRun testRun1, PerformanceTestRun testRun2, string firstTestRunResultPath, string xmlFileNamePath, string[] excludedFieldNames) { if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.ScreenSettings.Fullscreen)) || excludedFieldNames == null) && testRun1.ScreenSettings.Fullscreen != testRun2.ScreenSettings.Fullscreen) { AddMismatchedTestConfig( ref ScreenSettingsResultFiles, MismatchedScreenSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.ScreenSettings.Fullscreen), testRun1.ScreenSettings.Fullscreen.ToString(), testRun2.ScreenSettings.Fullscreen.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.ScreenSettings.ScreenHeight)) || excludedFieldNames == null) && testRun1.ScreenSettings.ScreenHeight != testRun2.ScreenSettings.ScreenHeight) { AddMismatchedTestConfig( ref ScreenSettingsResultFiles, MismatchedScreenSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.ScreenSettings.ScreenHeight), testRun1.ScreenSettings.ScreenHeight.ToString(), testRun2.ScreenSettings.ScreenHeight.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.ScreenSettings.ScreenRefreshRate)) || excludedFieldNames == null) && testRun1.ScreenSettings.ScreenRefreshRate != testRun2.ScreenSettings.ScreenRefreshRate) { AddMismatchedTestConfig( ref ScreenSettingsResultFiles, MismatchedScreenSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.ScreenSettings.ScreenRefreshRate), testRun1.ScreenSettings.ScreenRefreshRate.ToString(), testRun2.ScreenSettings.ScreenRefreshRate.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.ScreenSettings.ScreenWidth)) || excludedFieldNames == null) && testRun1.ScreenSettings.ScreenWidth != testRun2.ScreenSettings.ScreenWidth) { AddMismatchedTestConfig( ref ScreenSettingsResultFiles, MismatchedScreenSettingsValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.ScreenSettings.ScreenWidth), testRun1.ScreenSettings.ScreenWidth.ToString(), testRun2.ScreenSettings.ScreenWidth.ToString()); } }
private PerformanceTestRun TryParseXmlToPerformanceTestRun(XDocument xmlDocument) { var output = xmlDocument.Descendants("output").ToArray(); if (output == null || !output.Any()) { throw new Exception("The xmlDocument passed to the TryParseXmlToPerformanceTestRun method does not have any \'ouput\' xml tags needed for correct parsing."); } var run = new PerformanceTestRun(); DeserializeTestResults(output, run); DeserializeMetadata(output, run); return(run); }
private PerformanceTestRun TryParseXmlToPerformanceTestRun(XContainer xmlDocument) { var output = xmlDocument.Descendants("output").ToArray(); if (!output.Any()) { return(null); } var run = new PerformanceTestRun(); DeserializeTestResults(output, run); DeserializeMetadata(output, run); return(run); }
private static void DeserializeTestResults(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { foreach (var line in element.Value.Split('\n')) { var json = GetJsonFromHashtag("performancetestresult", line); if (json == null) { continue; } var result = JsonConvert.DeserializeObject <PerformanceTestResult>(json); run.Results.Add(result); } } }
public List <TestResult> GetTestResults( PerformanceTestRun performanceTestRun) { var mergedTestExecutions = MergeTestExecutions(performanceTestRun); var performanceTestResults = new List <TestResult>(); foreach (var testName in mergedTestExecutions.Keys) { var performanceTestResult = new TestResult { TestName = testName, TestCategories = performanceTestRun.Results.First(r => r.TestName == testName).TestCategories, TestVersion = performanceTestRun.Results.First(r => r.TestName == testName).TestVersion, State = (int)TestState.Success, SampleGroupResults = new List <SampleGroupResult>() }; foreach (var sampleGroup in mergedTestExecutions[testName]) { var sampleGroupResult = new SampleGroupResult { SampleGroupName = sampleGroup.Definition.Name, SampleUnit = sampleGroup.Definition.SampleUnit.ToString(), IncreaseIsBetter = sampleGroup.Definition.IncreaseIsBetter, Threshold = sampleGroup.Definition.Threshold, AggregationType = sampleGroup.Definition.AggregationType.ToString(), Percentile = sampleGroup.Definition.Percentile, Min = sampleGroup.Min, Max = sampleGroup.Max, Median = sampleGroup.Median, Average = sampleGroup.Average, StandardDeviation = sampleGroup.StandardDeviation, PercentileValue = sampleGroup.PercentileValue, Sum = sampleGroup.Sum, Zeroes = sampleGroup.Zeroes, SampleCount = sampleGroup.SampleCount, BaselineValue = -1, AggregatedValue = GetAggregatedSampleValue(sampleGroup) }; performanceTestResult.SampleGroupResults.Add(sampleGroupResult); } performanceTestResults.Add(performanceTestResult); } return(performanceTestResults); }
private static void DeserializeTestResultsV2(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { foreach (var line in element.Value.Split('\n')) { var json = GetJsonFromHashtag("performancetestresult2", line); if (json == null) { continue; } var result = TryDeserializePerformanceTestResultJsonObject(json); if (result != null) { var pt = new PerformanceTestResult() { TestCategories = result.Categories, TestName = result.Name, TestVersion = result.Version, SampleGroups = result.SampleGroups.Select(sg => new Entities.SampleGroup { Samples = sg.Samples, Average = sg.Average, Max = sg.Max, Median = sg.Median, Min = sg.Min, Sum = sg.Sum, StandardDeviation = sg.StandardDeviation, SampleCount = sg.Samples.Count, Definition = new SampleGroupDefinition() { Name = sg.Name, SampleUnit = (Entities.SampleUnit)sg.Unit, IncreaseIsBetter = sg.IncreaseIsBetter, Threshold = sg.Threshold } }).ToList() }; run.Results.Add(pt); } } } }
private void ValidateMetadata(PerformanceTestRun performanceTestRun, string xmlFileNamePath) { if (firstResult) { firstTestRunResultPath = xmlFileNamePath; firstTestRun = performanceTestRun; firstResult = false; } else { MetadataValidator.ValidatePlayerSystemInfo(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <PlayerSystemInfo>()); MetadataValidator.ValidatePlayerSettings(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <PlayerSettings>()); MetadataValidator.ValidateQualitySettings(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <QualitySettings>()); MetadataValidator.ValidateScreenSettings(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <ScreenSettings>()); MetadataValidator.ValidateBuildSettings(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <BuildSettings>()); MetadataValidator.ValidateEditorVersion(firstTestRun, performanceTestRun, firstTestRunResultPath, xmlFileNamePath, ExcludedFieldNames <EditorVersion>()); } }
public PerformanceTestRunResult CreateTestRunResult(PerformanceTestRun runResults, List <TestResult> testResults, string resultName, bool isBaseline = false) { var performanceTestRunResult = new PerformanceTestRunResult { ResultName = resultName, IsBaseline = isBaseline, TestSuite = runResults.TestSuite, StartTime = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddMilliseconds( runResults.StartTime), TestResults = testResults, PlayerSystemInfo = runResults.PlayerSystemInfo, EditorVersion = runResults.EditorVersion, BuildSettings = runResults.BuildSettings, ScreenSettings = runResults.ScreenSettings, QualitySettings = runResults.QualitySettings, PlayerSettings = runResults.PlayerSettings }; return(performanceTestRunResult); }
private void DeserializeMetadata(IEnumerable <XElement> output, PerformanceTestRun run) { foreach (var element in output) { var elements = element.Value.Split('\n'); if (!elements.Any(e => e.Length > 0 && e.Substring(0, 2).Equals("##"))) { continue; } { var lines = elements.Where(e => e.Length > 0 && e.Substring(0, 2).Equals("##")); foreach (var line in lines) { var json = GetJsonFromHashtag("performancetestruninfo", line); if (json != null) { var result = TryDeserializePerformanceTestRunJsonObject(json); if (result == null) { continue; } run.TestSuite = result.TestSuite; run.EditorVersion = result.EditorVersion; run.QualitySettings = result.QualitySettings; run.ScreenSettings = result.ScreenSettings; run.BuildSettings = result.BuildSettings; run.PlayerSettings = result.PlayerSettings; run.PlayerSystemInfo = result.PlayerSystemInfo; run.StartTime = result.StartTime; run.EndTime = Utils.DateToInt(DateTime.Now); break; } } } } }
private static void SetXRPlayerSettings(PerformanceTestRun run) { #if ENABLE_VR var settings = Resources.Load <CurrentSettings>("settings"); #if !UNITY_2020_1_OR_NEWER run.PlayerSettings.VrSupported = UnityEditor.PlayerSettings.virtualRealitySupported; #endif #if OCULUS_SDK run.PlayerSettings.StereoRenderingPath = EditorUserBuildSettings.activeBuildTarget == BuildTarget.Android ? settings.StereoRenderingModeAndroid : settings.StereoRenderingModeDesktop; #endif #if !UNITY_2020_1_OR_NEWER // EnabledXrTargets is only populated for builtin VR, and builtin VR is not supported for 2020.1 or newer run.PlayerSettings.EnabledXrTargets = new List <string>(UnityEditor.PlayerSettings.GetVirtualRealitySDKs(EditorUserBuildSettings.selectedBuildTargetGroup)); run.PlayerSettings.EnabledXrTargets.Sort(); #else run.PlayerSettings.EnabledXrTargets = new List <string>(); #endif run.PlayerSettings.ScriptingBackend = UnityEditor.PlayerSettings.GetScriptingBackend(EditorUserBuildSettings.selectedBuildTargetGroup).ToString(); run.PlayerSettings.ScriptingRuntimeVersion = GetAdditionalMetadata(settings); #endif }
public static bool VerifyTestRunMetadata(PerformanceTestRun run) { List <String> errors = new List <string>(); if (run.TestSuite != "Playmode" && run.TestSuite != "Editmode") { errors.Add("TestSuite"); } if (run.StartTime < 1.0D) { errors.Add("StartTime"); } if (run.EndTime < 1.0D) { errors.Add("EndTime"); } if (run.BuildSettings == null) { errors.Add("BuildSettings"); } else { if (run.BuildSettings.BuildTarget.Length == 0) { errors.Add("BuildSettings.BuildTarget"); } if (run.BuildSettings.Platform.Length == 0) { errors.Add("BuildSettings.Platform"); } } if (run.EditorVersion == null) { errors.Add("EditorVersion"); } else { if (run.EditorVersion.DateSeconds == 0) { errors.Add("EditorVersion.DateSeconds"); } if (run.EditorVersion.FullVersion.Length == 0) { errors.Add("EditorVersion.FullVersion"); } if (run.EditorVersion.Branch.Length == 0) { errors.Add("EditorVersion.Branch"); } if (run.EditorVersion.RevisionValue == 0) { errors.Add("EditorVersion.RevisionValue"); } } if (run.PlayerSettings == null) { errors.Add("Performance test has its build settings unassigned."); } else { if (run.PlayerSettings.ScriptingBackend.Length == 0) { errors.Add("PlayerSettings.ScriptingBackend"); } if (run.PlayerSettings.GraphicsApi.Length == 0) { errors.Add("PlayerSettings.GraphicsAp"); } if (run.PlayerSettings.Batchmode.Length == 0) { errors.Add("PlayerSettings.Batchmode"); } } if (run.PlayerSystemInfo == null) { errors.Add("Performance test has its build settings unassigned."); } else { if (run.PlayerSystemInfo.ProcessorCount == 0) { errors.Add("PlayerSystemInfo.ProcessorCount"); } if (run.PlayerSystemInfo.OperatingSystem.Length == 0) { errors.Add("PlayerSystemInfo.OperatingSystem"); } if (run.PlayerSystemInfo.ProcessorType.Length == 0) { errors.Add("PlayerSystemInfo.ProcessorType"); } if (run.PlayerSystemInfo.GraphicsDeviceName.Length == 0) { errors.Add("PlayerSystemInfo.GraphicsDeviceName"); } if (run.PlayerSystemInfo.SystemMemorySize == 0) { errors.Add("PlayerSystemInfo.SystemMemorySize"); } } if (run.QualitySettings == null) { errors.Add("Performance test has its build settings unassigned."); } else { if (run.QualitySettings.ColorSpace.Length == 0) { errors.Add("QualitySettings.ColorSpace"); } if (run.QualitySettings.BlendWeights.Length == 0) { errors.Add("QualitySettings.BlendWeights"); } if (run.QualitySettings.AnisotropicFiltering.Length == 0) { errors.Add("QualitySettings.AnisotropicFiltering"); } } if (run.ScreenSettings == null) { errors.Add("ScreenSettings"); } if (errors.Count > 0) { StringBuilder sb = new StringBuilder(); foreach (var error in errors) { sb.Append(error + ", "); } Debug.LogError("Performance run has missing metadata. Please report this as a bug on #devs-performance. The following fields have not been set: " + sb); return(false); } return(true); }
private static PerformanceTestRun ParseJsonV2(string json) { Run run = null; try { run = JsonConvert.DeserializeObject <Run>(json); } catch (System.Exception) { throw; } if (run != null) { var testRun = new PerformanceTestRun() { BuildSettings = new BuildSettings() { Platform = run.Player.Platform, BuildTarget = run.Player.BuildTarget, DevelopmentPlayer = true, AndroidBuildSystem = run.Player.AndroidBuildSystem }, EditorVersion = new EditorVersion() { Branch = run.Editor.Branch, DateSeconds = run.Editor.Date, FullVersion = $"{run.Editor.Version} ({run.Editor.Changeset})", RevisionValue = 0 }, PlayerSettings = new PlayerSettings() { GpuSkinning = run.Player.GpuSkinning, GraphicsApi = run.Player.GraphicsApi, RenderThreadingMode = run.Player.RenderThreadingMode, ScriptingBackend = run.Player.ScriptingBackend, AndroidTargetSdkVersion = run.Player.AndroidTargetSdkVersion, EnabledXrTargets = new List <string>(), ScriptingRuntimeVersion = "", StereoRenderingPath = GetStereoPath(run.Player.StereoRenderingPath, run.Player.AndroidTargetSdkVersion) }, QualitySettings = new QualitySettings() { Vsync = run.Player.Vsync, AntiAliasing = run.Player.AntiAliasing, AnisotropicFiltering = run.Player.AnisotropicFiltering, BlendWeights = run.Player.BlendWeights, ColorSpace = run.Player.ColorSpace }, ScreenSettings = new ScreenSettings() { Fullscreen = run.Player.Fullscreen, ScreenHeight = run.Player.ScreenHeight, ScreenWidth = run.Player.ScreenWidth, ScreenRefreshRate = run.Player.ScreenRefreshRate }, PlayerSystemInfo = new Entities.PlayerSystemInfo() { DeviceModel = run.Hardware.DeviceModel, DeviceName = run.Hardware.DeviceName, OperatingSystem = run.Hardware.OperatingSystem, ProcessorCount = run.Hardware.ProcessorCount, ProcessorType = run.Hardware.ProcessorType, GraphicsDeviceName = run.Hardware.GraphicsDeviceName, SystemMemorySize = run.Hardware.SystemMemorySizeMB, XrDevice = run.Hardware.XrDevice, XrModel = run.Hardware.XrModel }, StartTime = run.Date, TestSuite = run.TestSuite, Results = new List <PerformanceTestResult>() }; testRun.EndTime = DateTime.Now.ToUniversalTime() .Subtract(new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)) .TotalMilliseconds; foreach (var res in run.Results) { var pt = new PerformanceTestResult() { TestCategories = res.Categories, TestName = res.Name, TestVersion = res.Version, SampleGroups = res.SampleGroups.Select(sg => new Entities.SampleGroup { Samples = sg.Samples, Average = sg.Average, Max = sg.Max, Median = sg.Median, Min = sg.Min, Sum = sg.Sum, StandardDeviation = sg.StandardDeviation, SampleCount = sg.Samples.Count, Definition = new SampleGroupDefinition() { Name = sg.Name, SampleUnit = (Entities.SampleUnit)sg.Unit, IncreaseIsBetter = sg.IncreaseIsBetter, Threshold = sg.Threshold } }).ToList() }; testRun.Results.Add(pt); } return(testRun); } return(null); }
public void ValidatePlayerSystemInfo(PerformanceTestRun testRun1, PerformanceTestRun testRun2, string firstTestRunResultPath, string xmlFileNamePath, string[] excludedFieldNames) { if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.DeviceModel)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.DeviceModel != testRun2.PlayerSystemInfo.DeviceModel) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.DeviceModel), testRun1.PlayerSystemInfo.DeviceModel, testRun2.PlayerSystemInfo.DeviceModel); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.GraphicsDeviceName)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.GraphicsDeviceName != testRun2.PlayerSystemInfo.GraphicsDeviceName) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.GraphicsDeviceName), testRun1.PlayerSystemInfo.GraphicsDeviceName, testRun2.PlayerSystemInfo.GraphicsDeviceName); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.ProcessorCount)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.ProcessorCount != testRun2.PlayerSystemInfo.ProcessorCount) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.ProcessorCount), testRun1.PlayerSystemInfo.ProcessorCount.ToString(), testRun2.PlayerSystemInfo.ProcessorCount.ToString()); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.ProcessorType)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.ProcessorType != testRun2.PlayerSystemInfo.ProcessorType) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.ProcessorType), testRun1.PlayerSystemInfo.ProcessorType, testRun2.PlayerSystemInfo.ProcessorType); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.XrDevice)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.XrDevice != testRun2.PlayerSystemInfo.XrDevice) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.XrDevice), testRun1.PlayerSystemInfo.XrDevice, testRun2.PlayerSystemInfo.XrDevice); } if ((excludedFieldNames != null && !excludedFieldNames.Contains(nameof(testRun1.PlayerSystemInfo.XrModel)) || excludedFieldNames == null) && testRun1.PlayerSystemInfo.XrModel != testRun2.PlayerSystemInfo.XrModel) { AddMismatchedTestConfig( ref PlayerSystemInfoResultFiles, MismatchedPlayerSystemInfoValues, firstTestRunResultPath, xmlFileNamePath, nameof(testRun1.PlayerSystemInfo.XrModel), testRun1.PlayerSystemInfo.XrModel, testRun2.PlayerSystemInfo.XrModel); } }
/// <summary> /// Assumes first performanceTestRun should be used to compare all other performanceTestRuns against. /// </summary> /// <param name="performanceTestRun"></param> /// <param name="xmlFileNamePath"></param> public void ProcessMetadata(PerformanceTestRun performanceTestRun, string xmlFileNamePath) { SetIsVrSupported(new[] { performanceTestRun }); SetIsAndroid(new[] { performanceTestRun }); foreach (var metadataType in metadataTypes) { var typeMetadata = TypeMetadata.Any(tm => tm.Type == metadataType) ? TypeMetadata.First(m => m.Type == metadataType) : null; // If metadataType doesn't exist in our TypeMetadata list, add it if (typeMetadata == null) { typeMetadata = new TypeMetadata(metadataType); TypeMetadata.Add(typeMetadata); } var fieldInfos = performanceTestRun.GetType().GetFields(); // If this metadataType is completely missing from the perf test run, mark it as such and move on if (fieldInfos.Any(f => f.FieldType == metadataType)) { var fieldInfo = fieldInfos.First(f => f.FieldType == metadataType); object obj = null; GetFieldInfoValue(performanceTestRun, metadataType, ref obj, fieldInfo); // If null, we're missing metadata for this performanceTestRun if (obj == null) { typeMetadata.NullResultCount++; // But we already have results for this metadataType, // add an empty "missing value" entry for it each FieldGroup if (typeMetadata.ValidResultCount > 0) { foreach (var fieldGroup in typeMetadata.FieldGroups) { BackfillFieldGroupValuesForMissingMetadata(xmlFileNamePath, fieldGroup, typeMetadata); } } continue; } var fields = obj.GetType().GetFields(); var fieldsToProcess = GetFieldsToProcess(metadataType, fields); // if we have valid field metadata to process if (fieldsToProcess.Length > 0) { foreach (var field in fieldsToProcess) { if (!typeMetadata.FieldGroups.Any(fg => fg.FieldName.Equals(field.Name))) { typeMetadata.FieldGroups.Add(new FieldGroup(field.Name)); } var thisFieldGroup = typeMetadata.FieldGroups.First(fg => fg.FieldName.Equals(field.Name)); // We want to keep the values array length consistent with the number of results, even for results // that are missing metadata. We do that here. BackfillFieldGroupValuesForMissingMetadata(xmlFileNamePath, thisFieldGroup, typeMetadata); // Add this field value var value = GetValueBasedOnType(metadataType, field, obj); Array.Resize(ref thisFieldGroup.Values, thisFieldGroup.Values.Length + 1); thisFieldGroup.Values[thisFieldGroup.Values.Length - 1] = new FieldValue(xmlFileNamePath, value); // fieldGroup.Values is sorted by result name; the first element in this array // is considered to be the reference point, regardless if it's a "baseline" or not. if (thisFieldGroup.Values[thisFieldGroup.Values.Length - 1].Value != thisFieldGroup.Values[0].Value) { thisFieldGroup.Values[thisFieldGroup.Values.Length - 1].IsMismatched = true; } } typeMetadata.ValidResultCount++; } } } }