/// <summary> /// Draws the control (= node). /// </summary> /// <param name="node">The node to take the data from.</param> /// <param name="context">A <see cref="DrawContext"/> which holds information about the drawing.</param> public override void Draw(TreeNodeAdv node, DrawContext context) { TestCaseBase testCase = GetValue(node) as TestCaseBase; if (testCase != null) { // Draw a white background Rectangle rect = new Rectangle(context.Bounds.X + 1, context.Bounds.Y + 1, context.Bounds.Width - 3, context.Bounds.Height - 2); context.Graphics.FillRectangle(Brushes.White, rect); StringFormat format = new StringFormat(StringFormatFlags.NoWrap) { Alignment = StringAlignment.Center, LineAlignment = StringAlignment.Center }; if (testCase.State == TestCaseState.Unknown) { context.Graphics.DrawString("-", context.Font, Brushes.Black, rect, format); } else { SolidBrush bgBrush = new SolidBrush(Color.FromArgb((int)((1 - testCase.Percent) * 255), (int)(testCase.Percent * 255), 0)); context.Graphics.FillRectangle(bgBrush, rect.X, rect.Y, (int)(rect.Width * testCase.Percent), rect.Height); context.Graphics.DrawString(string.Format("{0}%", (int)(testCase.Percent * 100)), context.Font, Brushes.Black, rect, format); } } }
/// <summary> /// Gets called when a value for the state column is needed. /// </summary> /// <param name="sender">The sender of the event.</param> /// <param name="e">More information about the event.</param> void nodeIconState_ValueNeeded(object sender, NodeControlValueEventArgs e) { TestCaseBase testCase = e.Node.Tag as TestCaseBase; if (testCase != null) { switch (testCase.State) { case TestCaseState.Unknown: e.Value = Resources.Unknown; break; case TestCaseState.Success: e.Value = Resources.OK; break; case TestCaseState.Fail: e.Value = Resources.Error; break; case TestCaseState.CompilationFailed: e.Value = Resources.Error; break; case TestCaseState.ReflectionFailed: e.Value = Resources.Error; break; } } }
protected void Compare(string testCase, TestCaseBase test1, TestCaseBase test2) { Directory.CreateDirectory(TestOutputDir); var originalFile1 = Path.Combine(OriginalDir, testCase + test1.ToString() + ".png"); var originalFile2 = Path.Combine(OriginalDir, testCase + test2.ToString() + ".png"); var currentFile1 = Path.Combine(DiffsDir, TestClassName + "." + testCase + test1.ToStringWithoutComparison() + "." + test1.ComparisonName + ".current1.png"); var currentFile2 = Path.Combine(DiffsDir, TestClassName + "." + testCase + test2.ToStringWithoutComparison() + "." + test2.ComparisonName + ".current2.png"); Console.WriteLine($"Comparing Files: {originalFile1} {originalFile2}"); if (!File.Exists(originalFile1)) { throw new Exception("Missing file: " + originalFile1); } if (!File.Exists(originalFile2)) { throw new Exception("Missing file: " + originalFile2); } var bmpOriginal1 = Image.Load(originalFile1); var bmpOriginal2 = Image.Load(originalFile2); var maxDiffAllowed = 1000; var diff = CompareImage(bmpOriginal1, bmpOriginal2, maxDiffAllowed); if (diff > maxDiffAllowed) { Console.WriteLine($"MaxDifference: {diff} MaxDiffAllowed: {maxDiffAllowed}"); Directory.CreateDirectory(DiffsDir); File.Copy(originalFile1, currentFile1); File.Copy(originalFile2, currentFile2); } else { if (File.Exists(currentFile1)) { File.Delete(currentFile1); } if (File.Exists(currentFile2)) { File.Delete(currentFile2); } } Assert.InRange(diff, 0, maxDiffAllowed); }
private void dgvTrackId_CellClick(object sender, DataGridViewCellEventArgs e) { if (tabControl.SelectedIndex == 0) { try { MyTestSummary tid = (MyTestSummary)dgvTrackId.Rows[e.RowIndex].DataBoundItem; if (tid != null) { rTxtBox.BackColor = tid.FilePath.Contains("PASS") ? rTxtBox.BackColor = Color.DarkGreen : rTxtBox.BackColor = Color.DarkRed; rTxtBox.Text = File.ReadAllText(tid.FilePath); } } catch (Exception) { rTxtBox.Text = rm.GetString("uiCantReadTrackidFile"); } } else { try { MyTestResult tr = (MyTestResult)dgvTrackId.Rows[e.RowIndex].DataBoundItem; if (tr != null) { string json = File.ReadAllText(tr.FilePath); tr.MyTestResultList = TestCaseBase.ToObject(json); bindingListTestResult.Clear(); foreach (var item in tr.MyTestResultList) { bindingListTestResult.Add(item); } dgvTestResult.DataSource = null; dgvTestResult.DataSource = bindingListTestResult; } } catch (Exception) { throw; } } }
private static void run(List <TestCaseBase> testCases, TestCoreRunner runner) { bool hasFailTests = false; runner.tcc.Andon.SetState(Domain.Andon.State.OFF); try { //AdbManager.installSambaAppIfNeeded(runner.PrintModInfo); //runner.tcc.Mod.GetModInfo(runner.PrintModInfo); //Check if have match ModTrackId with scanned trackId. if (!runner.modTrackId.Equals(runner.trackId)) { //runner.tcc.runTest = false; //throw new Exception("No match ModTrackId with scanned trackId."); } } catch (Exception ex) { runner.tcc.Andon.SetState(Domain.Andon.State.FAIL); runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, ex.Message); hasFailTests = true; runner.tcc.SetLogNameTo(hasFailTests ? "FAIL" : "PASS"); TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation); return; } TestCaseBase.TestResultList.Clear(); int totalTestCases = (testCases.Count) * 3; // *3 because we have Prepare, Execute and Evaluate Results double percentByTest = 100 / totalTestCases; double currentPercent = 0; foreach (TestCaseBase item in testCases) { isClosedJig(runner); if (!runner.tcc.runTest) { runner.tcc.Andon.SetState(Domain.Andon.State.FAIL); hasFailTests = true; runner.tcc.SetLogNameTo("CANCELED"); TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation); return; } runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.WARNING, "==================================================\n"); int result = -1; //Preparing... currentPercent = currentPercent + percentByTest; runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent)))); if (!TryExecute(item.Prepare, item.Timeout, out result)) { runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms."); } if (result != TestCoreMessages.SUCCESS) { hasFailTests = true; continue; } //Executing... currentPercent = currentPercent + percentByTest; runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent)))); if (!TryExecute(item.Execute, item.Timeout, out result)) { runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms."); } if (result != TestCoreMessages.SUCCESS) { hasFailTests = true; continue; } //Evaluating... currentPercent = currentPercent + percentByTest; runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.UPDATE_TEST_PERCENTUAL, (string.Format("{0:0}", (currentPercent)))); if (!TryExecute(item.EvaluateResults, item.Timeout, out result)) { runner.tcc.NotifyUI(TestCoreMessages.TypeMessage.ERROR, TestCoreMessages.ParseMessages(TestCoreMessages.REACHED_TIMEOUT) + " => " + item.Timeout + " ms."); } if (result != TestCoreMessages.SUCCESS) { hasFailTests = true; continue; } //Set andon state if (item.ResulTest == TestCaseBase.TestEvaluateResult.PASS) { runner.tcc.Andon.SetState(Domain.Andon.State.PASS); } else { runner.tcc.Andon.SetState(Domain.Andon.State.FAIL); hasFailTests = true; } } //Change log name and save it. runner.tcc.SetLogNameTo(hasFailTests ? "FAIL" : "PASS"); TestCaseBase.SaveTestResultListToFile(runner.tcc.newLogFileLocation); //Execute LogResult if (Boolean.Parse(runner.tcc.GetValueConfiguration("SETTINGS", "LOG_RESULT_ENABLE").ToLower())) { runner.tcc.MQS.LogResult(hasFailTests ? "FAIL" : "PASS"); } }
protected static object[] TestDataResult(TestCaseBase test) { return(new object[] { test.Clone() }); }