/// <summary> /// Displays a test case in the control. /// </summary> public void Initialize(TestCase testcase) { ItemsLV.Items.Clear(); m_testcase = testcase; if (testcase != null) { AddParamater("TestID", testcase.TestId); AddParamater("TestCase", testcase.Name); if (testcase.SeedSpecified) { AddParamater("Seed", testcase.Seed); } if (testcase.StartSpecified) { AddParamater("Start", testcase.Start); } if (testcase.CountSpecified) { AddParamater("Count", testcase.Count); } AddParamater("SkipTest", testcase.SkipTest); if (testcase.Parameter != null) { foreach (TestParameter parameter in testcase.Parameter) { AddItem(parameter); } } } AdjustColumns(); }
/// <summary> /// This method executes the test case. /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> private void ExecuteTestCase(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase) { try { channelContext.TestCaseComplete.Reset(); channelContext.AsyncTestsComplete.Set(); lock (m_lock) { if (m_cancel) { return; } } try { channelContext.EventLogger.LogStartEvent(testCase, TestCases.TestSetupIteration); ExecuteTest(channelContext, testCaseContext, testCase, TestCases.TestSetupIteration); } catch (Exception e) { channelContext.EventLogger.LogErrorEvent(testCase, TestCases.TestSetupIteration, e); if (m_sequenceToExecute.HaltOnError) { throw; } else { return; } } // start the test at the iteration specified in the test case. for (int ii = (int)testCase.Start; ii < testCase.Count; ii++) { lock (m_lock) { if (m_cancel) { break; } if (m_quickTest && ii > 10) { break; } RaiseEvent(new TestSequenceEventArgs(testCase.TestId, testCase.Name, ii)); } try { ExecuteTest(channelContext, testCaseContext, testCase, ii); } catch (Exception e) { channelContext.EventLogger.LogErrorEvent(testCase, ii, e); if (m_sequenceToExecute.HaltOnError) { throw; } else { continue; } } } try { // Wait till any of the pending test cases to finish before closing the logger channelContext.AsyncTestsComplete.WaitOne(); ExecuteTest(channelContext, testCaseContext, testCase, TestCases.TestCleanupIteration); channelContext.EventLogger.LogCompleteEvent(testCase, TestCases.TestCleanupIteration); } catch (Exception e) { channelContext.EventLogger.LogErrorEvent(testCase, TestCases.TestCleanupIteration, e); if (m_sequenceToExecute.HaltOnError) { throw; } else { return; } } } finally { channelContext.TestCaseComplete.Set(); } }
/// <summary> /// Logs an event when a test case completes sucessfully. /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> public void LogCompleteEvent(TestCase testCase, int iteration) { // the complete event can be logged each iteration or once per test case. if ((m_detailLevel & TestLogDetailMasks.AllsEnds) == 0) { if ((m_detailLevel & TestLogDetailMasks.LastEnd) == 0) { return; } if (!TestUtils.IsSetupIteration(iteration)) { return; } } TestEvent events = new TestEvent(); events.TestId = testCase.TestId; events.Timestamp = DateTime.UtcNow; events.Iteration = iteration; events.EventType = TestEventType.Completed; LogEvent(events); }
/// <summary> /// This method executes a test. /// </summary> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="input">Input value.</param> /// <returns>Output value.</returns> private Variant ExecuteTest(TestCaseContext testCaseContext, TestCase testCase, int iteration, Variant input) { Variant output = Variant.Null; switch (testCase.Name) { #region Serialization Tests case TestCases.ScalarValues: { output = ExecuteTest_ScalarValues(testCaseContext, testCase, iteration, input); break; } case TestCases.ArrayValues: { output = ExecuteTest_ArrayValues(testCaseContext, testCase, iteration, input); break; } case TestCases.ExtensionObjectValues: { output = ExecuteTest_ExtensionObjectValues(testCaseContext, testCase, iteration, input); break; } case TestCases.LargeMessages: { output = ExecuteTest_LargeMessages(testCaseContext, testCase, iteration, input); break; } #endregion #region Protocol Tests case TestCases.MultipleChannels: { output = ExecuteTest_MultipleChannels(testCaseContext, testCase, iteration, input); break; } case TestCases.AutoReconnect: { output = ExecuteTest_AutoReconnect(testCaseContext, testCase, iteration, input); break; } #endregion #region Fault Tests case TestCases.ServerFault: { output = ExecuteTest_ServerFault(testCaseContext, testCase, iteration, input); break; } case TestCases.ServerTimeout: { output = ExecuteTest_ServerTimout(testCaseContext, testCase, iteration, input); break; } #endregion #region Performance #endregion default: { throw ServiceResultException.Create(StatusCodes.BadConfigurationError, "Unsupported test case : " + testCase.Name); } } return output; }
private void ExecuteTest_AutoReconnect( ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { Variant input; Variant output; // initialize test case. if (iteration == TestCases.TestSetupIteration) { m_fault = null; m_blackouts = new List<BlackoutPeriod>(); channelContext.ClientSession.OperationTimeout = 30000; RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; ResponseHeader responseHeader = channelContext.ClientSession.TestStack( null, testCase.TestId, TestCases.TestSetupIteration, input, out output); return; } if (iteration == TestCases.TestCleanupIteration) { do { lock (m_lock) { if (m_requestCount == 0) { return; } } Thread.Sleep(100); } while (true); } Thread.Sleep(testCaseContext.RequestInterval); // report fault after waiting for all active threads to exit. if (m_sequenceToExecute.HaltOnError) { ServiceResult fault = null; lock (m_lock) { fault = m_fault; } if (fault != null) { do { lock (m_lock) { if (m_requestCount == 0) { throw new ServiceResultException(fault); } } Thread.Sleep(100); } while (true); } } // begin iteration. channelContext.EventLogger.LogStartEvent(testCase, iteration); lock (m_lock) { // set up header. RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; // generate input data. channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); input = channelContext.Random.GetVariant(); // determine processing time in server. int processingTime = channelContext.Random.GetInt32Range(0, testCaseContext.MaxResponseDelay); Utils.Trace("Iteration {0}; Processing Time {1}.", iteration, processingTime); AsyncTestState state = new AsyncTestState(channelContext, testCaseContext, testCase, iteration); state.CallData = (DateTime.UtcNow.AddMilliseconds(processingTime).Ticks/TimeSpan.TicksPerMillisecond); // set timeout to twice the processing time. if (processingTime < testCaseContext.MaxTransportDelay) { processingTime = testCaseContext.MaxTransportDelay; } channelContext.ClientSession.OperationTimeout = processingTime*2; if ((iteration+1)%testCaseContext.StackEventFrequency == 0) { StackAction action = TestUtils.GetStackAction(testCaseContext, channelContext.EndpointDescription); if (action != null) { BlackoutPeriod period = new BlackoutPeriod(); period.Start = (DateTime.UtcNow.Ticks/TimeSpan.TicksPerMillisecond); m_blackouts.Add(period); Utils.Trace("Iteration {0}; Expecting Fault {1}", iteration, action.ActionType); } } try { channelContext.ClientSession.BeginTestStack( requestHeader, testCase.TestId, iteration, input, EndAutoReconnect, state); m_requestCount++; } catch (Exception e) { // check if a fault is expected. bool faultExpected = FaultExpected((long)state.CallData , testCaseContext); if (faultExpected) { Utils.Trace("Iteration {0}; Fault Expected {1}", state.Iteration, e.Message); state.ChannelContext.EventLogger.LogCompleteEvent(testCase, iteration); return; } channelContext.EventLogger.LogErrorEvent(testCase, iteration, e); if (m_sequenceToExecute.HaltOnError) { if (m_fault == null) { m_fault = ServiceResult.Create(e, StatusCodes.BadUnexpectedError, "Could not send request."); } } } } }
/// <summary> /// This method executes a multi channel test. /// <see cref="ExecuteTest"/> /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// <item>ChannelsPerServer <see cref="TestCaseContext.ChannelsPerServer"/></item> /// <item>ServerDetails <see cref="TestCaseContext.ServerDetails"/></item> /// </list> /// </remarks> private void ExecuteTest_MultipleChannels(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { channelContext.EventLogger.LogStartEvent(testCase, iteration); } else { channelContext.ClientSession.OperationTimeout = 30000; } RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; Variant input; Variant output; Variant expectedOutput; ResponseHeader responseHeader; if (isSetupStep) { input = Variant.Null; responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); } else { channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); input = channelContext.Random.GetScalarVariant(false); responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); channelContext.Random.Start( (int)(testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); expectedOutput = channelContext.Random.GetScalarVariant(false); if (!Compare.CompareVariant(output, expectedOutput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", output, expectedOutput)); } } if (!isSetupStep) { channelContext.EventLogger.LogCompleteEvent(testCase, iteration); } }
/// <summary> /// This method executes a test using messages that exceed the maximum message size. /// </summary> private void ExecuteTest_LargeMessages( ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { channelContext.EventLogger.LogStartEvent(testCase, iteration); } else { channelContext.ClientSession.OperationTimeout = 30000; } RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; Variant input; Variant output; Variant expectedOutput; ResponseHeader responseHeader; if (isSetupStep) { testCaseContext.ClientMaxMessageSize = channelContext.MessageContext.MaxMessageSize; responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, new Variant(testCaseContext.ClientMaxMessageSize), out output); if (output.Value is int) { testCaseContext.ServerMaxMessageSize = (int)output.Value; } // update the parameters. for (int ii = 0; ii < testCase.Parameter.Length; ii++) { if (testCase.Parameter[ii].Name == TestCases.ServerMaxMessageSize) { testCase.Parameter[ii].Value = Utils.Format("{0}", testCaseContext.ServerMaxMessageSize); continue; } if (testCase.Parameter[ii].Name == TestCases.ClientMaxMessageSize) { testCase.Parameter[ii].Value = Utils.Format("{0}", testCaseContext.ClientMaxMessageSize); continue; } } } else { channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); int messageLength = 0; if (channelContext.Random.GetRandomBoolean()) { messageLength = channelContext.Random.GetInt32Range(1, testCaseContext.ServerMaxMessageSize/2); } else { messageLength = channelContext.Random.GetInt32Range(testCaseContext.ServerMaxMessageSize, testCaseContext.ServerMaxMessageSize*2); } input = new Variant(channelContext.Random.GetRandomByteString(messageLength)); ServiceResultException sre = null; try { responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); } catch (ServiceResultException e) { sre = e; } if (messageLength > testCaseContext.ServerMaxMessageSize) { if (sre == null) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("Server did not reject a message that is too large ({0} bytes).", messageLength)); } else { if (sre.StatusCode != StatusCodes.BadRequestTooLarge) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("Client do not receive a BadRequestTooLarge exception: {0}", sre.StatusCode)); } } } else { channelContext.Random.Start( (int)(testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); if (channelContext.Random.GetRandomBoolean()) { messageLength = channelContext.Random.GetInt32Range(1, testCaseContext.ClientMaxMessageSize/2); } else { messageLength = channelContext.Random.GetInt32Range(testCaseContext.ClientMaxMessageSize, testCaseContext.ClientMaxMessageSize*2); } if (sre == null) { if (messageLength > testCaseContext.ClientMaxMessageSize) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("Client received a message that is too large ({0} bytes).", messageLength)); } expectedOutput = new Variant(channelContext.Random.GetRandomByteString(messageLength)); if (!Compare.CompareVariant(output, expectedOutput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", output, expectedOutput)); } } else { if (sre.StatusCode != StatusCodes.BadResponseTooLarge) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("Client do not receive a BadResponseTooLarge exception: {0}", sre.StatusCode)); } } } } if (!isSetupStep) { channelContext.EventLogger.LogCompleteEvent(testCase, iteration); } }
/// <summary> /// This method executes a scalar values test. /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MinTimeout <see cref="TestCaseContext.MinTimeout"/></item> /// <item>MaxTimeout <see cref="TestCaseContext.MaxTimeout"/></item> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// <item>MaxTransportDelay <see cref="TestCaseContext.MaxTransportDelay"/></item> /// </list> /// </remarks> private void ExecuteTest_ServerTimout(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { channelContext.EventLogger.LogStartEvent(testCase, iteration); } isSetupStep = TestUtils.IsSetupIteration(iteration); RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; Variant input; Variant output; if (isSetupStep) { input = Variant.Null; ResponseHeader responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); } else { int serverSleepTime = 0; DateTime startTime = DateTime.UtcNow; try { channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); input = channelContext.Random.GetScalarVariant(false); // Server's sleep time serverSleepTime = channelContext.Random.GetTimeout(); channelContext.ClientSession.OperationTimeout = serverSleepTime-100; ResponseHeader responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); channelContext.EventLogger.LogErrorEvent(testCase, iteration, new Exception("Test failed. Expected a TimeoutException, but did not occur.")); return; } catch (Exception e) { ServiceResultException sre = e as ServiceResultException; if (e is TimeoutException || (sre != null && sre.StatusCode == StatusCodes.BadRequestTimeout)) { // This indicates that Stack did timeout the request. } else { throw e; } } TimeSpan timeSpent = DateTime.UtcNow.Subtract(startTime); if (timeSpent.TotalMilliseconds > serverSleepTime*1.10) { channelContext.EventLogger.LogErrorEvent(testCase, iteration, new Exception("Test failed. Timeout took too long.")); return; } } if (!isSetupStep) { channelContext.EventLogger.LogCompleteEvent(testCase, iteration); } }
/// <summary> /// /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> public AsyncTestEventState(TestCase testCase, int iteration) { Testcase = testCase; Iteration = iteration; E = null; }
/// <summary /> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> public AsyncTestState( ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { ChannelContext = channelContext; TestCaseContext = testCaseContext; Testcase = testCase; Iteration = iteration; }
/// <summary> /// Checks if the specified test case is the last one to execute /// </summary> /// <param name="sequenceToExecute"></param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <returns>True if the test case is the last test case.</returns> public static bool IsLastTestCase(TestSequence sequenceToExecute, TestCase testCase) { int lastTestCaseIndex = sequenceToExecute.TestCase.Length - 1; // Find the last testCase with skiptest attribute value as false ; while (lastTestCaseIndex >= 0 && sequenceToExecute.TestCase[lastTestCaseIndex].SkipTest == true) { lastTestCaseIndex--; } if (lastTestCaseIndex == -1) { // This case should never occur. return true; } return Object.ReferenceEquals(sequenceToExecute.TestCase[lastTestCaseIndex], testCase); }
/// <summary> /// Validates the test case /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> public static void ValidateTestCase(TestCase testCase, int iteration) { if (testCase.Name.Equals(string.Empty)) { throw new Exception("Invalid test case name"); } if (testCase.StartSpecified && testCase.Start < 0) { throw new Exception("Start value is less than 0 for test case."); } if (testCase.CountSpecified && testCase.Count < 0) { throw new Exception("Iteration value is less than 0 for test case."); } if (testCase.StartSpecified && testCase.Start > iteration && iteration != TestCases.TestSetupIteration) { throw new Exception("Iteration is less than start for test case."); } if (testCase.CountSpecified && testCase.Count < iteration && iteration != TestCases.TestCleanupIteration) { throw new Exception("Iteration is greater than count for test case."); } }
/// <summary> /// Logs stack events of an iteration /// </summary> /// <param name="stackEvents">List of stack events.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> public void LogStackEvents(List<StackEvent> stackEvents, TestCase testCase, int iteration) { if (stackEvents != null && stackEvents.Count != 0 && iteration != TestCases.TestSetupIteration && iteration != TestCases.TestCleanupIteration ) { StackEvent[] stackEventsArray = new StackEvent[stackEvents.Count]; stackEvents.CopyTo(stackEventsArray); TestEvent testEvent = new TestEvent(); testEvent.TestId = testCase.TestId; testEvent.Iteration = iteration; testEvent.EventType = TestEventType.StackEvents; testEvent.Timestamp = DateTime.UtcNow; testEvent.StackEvents = stackEventsArray; LogEvent(testEvent); } }
/// <summary> /// Logs an event when an error occurs during a test case. /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="e">Exception to be logged.</param> public void LogErrorEvent(TestCase testCase, int iteration, Exception e) { if ((m_detailLevel & TestLogDetailMasks.Errors) == 0) { return; } TestEvent events = new TestEvent(); events.TestId = testCase.TestId; events.Timestamp = DateTime.UtcNow; events.Iteration = iteration; events.EventType = TestEventType.Failed; events.Details = new ServiceResult(e).ToLongString(); LogEvent(events); }
/// <summary> /// Validates the test execution context and checks if the test parameters are with in the allowed range /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> private void ValidateTestContext(TestCaseContext testCaseContext, TestCase testCase) { switch (testCase.Name) { case TestCases.ServerTimeout: { if (testCaseContext.MinTimeout >= testCaseContext.MaxTimeout) { throw new Exception("The test parameter MinTimeout should be less than test parameter MaxTimeout"); } break; } case TestCases.AutoReconnect: { break; } default: { return; } } }
/// <summary> /// This method executes a test. /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> private void ExecuteTest(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { switch (testCase.Name) { #region Serialization Tests case TestCases.ScalarValues: { ExecuteTest_ScalarValues(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.ArrayValues: { ExecuteTest_ArrayValues(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.ExtensionObjectValues: { ExecuteTest_ExtensionObjectValues(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.BuiltInTypes: { ExecuteTest_BuiltInTypes(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.LargeMessages: { ExecuteTest_LargeMessages(channelContext, testCaseContext, testCase, iteration); break; } #endregion #region Protocol Tests case TestCases.MultipleChannels: { ExecuteTest_MultipleChannels(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.AutoReconnect: { ExecuteTest_AutoReconnect(channelContext, testCaseContext, testCase, iteration); break; } #endregion #region Fault Tests case TestCases.ServerFault: { ExecuteTest_ServerFault(channelContext, testCaseContext, testCase, iteration); break; } case TestCases.ServerTimeout: { ExecuteTest_ServerTimout(channelContext, testCaseContext, testCase, iteration); break; } #endregion default: { throw ServiceResultException.Create(StatusCodes.BadConfigurationError, "Unsupported test case : " + testCase.Name); } } }
/// <summary> /// /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="e">Exception</param> public AsyncTestEventState(TestCase testCase, int iteration, Exception e) { Testcase = testCase; Iteration = iteration; E = e; }
/// <summary> /// This method executes a test using messages that exceed the maximum message size. /// </summary> private Variant ExecuteTest_LargeMessages( TestCaseContext testCaseContext, TestCase testCase, int iteration, Variant input) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); // No verification for the input is required. if (isSetupStep) { testCaseContext.ServerMaxMessageSize = MessageContext.MaxMessageSize; if (input.Value is int) { testCaseContext.ClientMaxMessageSize = (int)input.Value; } // update the parameters. for (int ii = 0; ii < testCase.Parameter.Length; ii++) { if (testCase.Parameter[ii].Name == TestCases.ServerMaxMessageSize) { testCase.Parameter[ii].Value = Utils.Format("{0}", testCaseContext.ServerMaxMessageSize); continue; } if (testCase.Parameter[ii].Name == TestCases.ClientMaxMessageSize) { testCase.Parameter[ii].Value = Utils.Format("{0}", testCaseContext.ClientMaxMessageSize); continue; } } return new Variant(testCaseContext.ServerMaxMessageSize); } m_logger.LogStartEvent(testCase, iteration); try { m_random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); int messageLength = 0; if (m_random.GetRandomBoolean()) { messageLength = m_random.GetInt32Range(1, testCaseContext.ServerMaxMessageSize/2); } else { messageLength = m_random.GetInt32Range(testCaseContext.ServerMaxMessageSize, testCaseContext.ServerMaxMessageSize*2); } Variant expectedInput = new Variant(m_random.GetRandomByteString(messageLength)); if (messageLength > testCaseContext.ServerMaxMessageSize) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("Server received a message that is too large ({0} bytes).", messageLength)); } try { if (!Compare.CompareVariant(input, expectedInput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", input, expectedInput)); } } catch (Exception e) { throw ServiceResultException.Create( StatusCodes.BadInvalidState, e, "'{0}' is not equal to '{1}'.", input, expectedInput); } m_random.Start((int)( testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); if (m_random.GetRandomBoolean()) { messageLength = m_random.GetInt32Range(1, testCaseContext.ClientMaxMessageSize/2); } else { messageLength = m_random.GetInt32Range(testCaseContext.ClientMaxMessageSize, testCaseContext.ClientMaxMessageSize*2); } return new Variant(m_random.GetRandomByteString(messageLength)); } finally { m_logger.LogCompleteEvent(testCase, iteration); } }
/// <summary> /// Populates the test parameters information into a TestCaseContext object /// </summary> /// <param name="testCase">This parameter stores the test case related data.</param> /// <returns>TestCaseContext object</returns> public static TestCaseContext GetExecutionContext(TestCase testCase) { TestCaseContext testCaseContext = new TestCaseContext(); int value = 0; value = TestUtils.GetTestParameterIntValue(TestCases.MaxStringLength, testCase.Parameter); if (value != 0) { testCaseContext.MaxStringLength = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxArrayLength, testCase.Parameter); if (value != 0) { testCaseContext.MaxArrayLength = value; } value = TestUtils.GetTestParameterIntValue(TestCases.ServerMaxMessageSize, testCase.Parameter); if (value != 0) { testCaseContext.ServerMaxMessageSize = value; } value = TestUtils.GetTestParameterIntValue(TestCases.ClientMaxMessageSize, testCase.Parameter); if (value != 0) { testCaseContext.ClientMaxMessageSize = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxDepth, testCase.Parameter); ; if (value != 0) { testCaseContext.MaxDepth = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MinTimeout, testCase.Parameter); if (value != 0) { testCaseContext.MinTimeout = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxTimeout, testCase.Parameter); if (value != 0) { testCaseContext.MaxTimeout = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxResponseDelay, testCase.Parameter); if (value != 0) { testCaseContext.MaxResponseDelay = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxTransportDelay, testCase.Parameter); if (value != 0) { testCaseContext.MaxTransportDelay = value; } value = TestUtils.GetTestParameterIntValue(TestCases.RequestInterval, testCase.Parameter); if (value != 0) { testCaseContext.RequestInterval = value; } value = TestUtils.GetTestParameterIntValue(TestCases.StackEventType, testCase.Parameter); if (value != 0) { testCaseContext.StackEventType = value; } value = TestUtils.GetTestParameterIntValue(TestCases.StackEventFrequency, testCase.Parameter); if (value != 0) { testCaseContext.StackEventFrequency = value; } value = TestUtils.GetTestParameterIntValue(TestCases.MaxRecoveryTime, testCase.Parameter); if (value != 0) { testCaseContext.MaxRecoveryTime = value; } if (testCase.Name == TestCases.MultipleChannels) { value = TestUtils.GetTestParameterIntValue(TestCases.ChannelsPerServer, testCase.Parameter); if (value != 0) { testCaseContext.ChannelsPerServer = value; } List<ServerDetail> ServerDetails; ServerDetails = TestUtils.GetTestParameterServerDetails(testCase.Parameter); if (ServerDetails != null) { testCaseContext.ServerDetails = ServerDetails; } } return testCaseContext; }
/// <summary> /// This method executes a multi channel test. /// </summary> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="input">Input value.</param> /// <returns>Input variant.</returns> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// <item>ChannelsPerServer <see cref="TestCaseContext.ChannelsPerServer"/></item> /// <item>ServerDetails <see cref="TestCaseContext.ServerDetails"/></item> /// </list> /// </remarks> private Variant ExecuteTest_MultipleChannels(TestCaseContext testCaseContext, TestCase testCase, int iteration, Variant input) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { m_logger.LogStartEvent(testCase, iteration); } try { if (isSetupStep) { // No verification for the input is required. return Variant.Null; } else { Variant expectedInput ; lock (m_random) { m_random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); expectedInput = m_random.GetScalarVariant(false); } try { if (!Compare.CompareVariant(input, expectedInput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", input, expectedInput)); } } catch (Exception e) { throw ServiceResultException.Create( StatusCodes.BadInvalidState, e, "'{0}' is not equal to '{1}'.", input, expectedInput); } lock (m_random) { m_random.Start((int)( testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); return m_random.GetScalarVariant(false); } } } finally { if (!isSetupStep) { m_logger.LogCompleteEvent(testCase, iteration); } } }
/// <summary> /// This method executes a builtin types test. /// </summary> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="input">Input value.</param> /// <returns>A variant of the type scalar value.</returns> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// </list> /// </remarks> private CompositeTestType ExecuteTest_BuiltInTypes(TestCaseContext testCaseContext, TestCase testCase, int iteration, CompositeTestType input) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { m_logger.LogStartEvent(testCase, iteration); } try { if (isSetupStep) { // No verification for the input is required. return null; } else { m_random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); CompositeTestType expectedInput = m_random.GetCompositeTestType(); try { if (!Compare.CompareCompositeTestType(input, expectedInput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", input, expectedInput)); } } catch (Exception e) { throw ServiceResultException.Create( StatusCodes.BadInvalidState, e, "'{0}' is not equal to '{1}'.", input, expectedInput); } m_random.Start((int)( testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); return m_random.GetCompositeTestType(); } } finally { if (!isSetupStep) { m_logger.LogCompleteEvent(testCase, iteration); } } }
/// <summary> /// This method executes a Server Fault test. /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// </list> /// </remarks> private void ExecuteTest_ServerFault(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { bool isSetupStep = isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { channelContext.EventLogger.LogStartEvent(testCase, iteration); } else { channelContext.ClientSession.OperationTimeout = 30000; } RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; Variant input; Variant output; ResponseHeader responseHeader; if (isSetupStep) { input = Variant.Null; responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); } else { channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); input = channelContext.Random.GetScalarVariant(false); channelContext.Random.Start( (int)(testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); string expectedExceptionMessage = channelContext.Random.GetRandomString(); try { responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); channelContext.EventLogger.LogErrorEvent(testCase, iteration, new Exception("Test failed. Expected a ServiceResultException.")); } catch (ServiceResultException sre) { if (sre.StatusCode != StatusCodes.BadMethodInvalid || sre.LocalizedText != expectedExceptionMessage) { channelContext.EventLogger.LogErrorEvent(testCase, iteration, sre); } } } if (!isSetupStep) { channelContext.EventLogger.LogCompleteEvent(testCase, iteration); } }
/// <summary> /// This method executes a Server Fault test. /// </summary> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="input">Input value.</param> /// <returns>A variant of the type scalar value.</returns> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// </list> /// </remarks> private Variant ExecuteTest_ServerFault(TestCaseContext testCaseContext, TestCase testCase, int iteration, Variant input) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { m_logger.LogStartEvent(testCase, iteration); } try { if (isSetupStep) { // No verification for the input is required. return(Variant.Null); } else { m_random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); Variant expectedInput = m_random.GetScalarVariant(false); try { if (!Compare.CompareVariant(input, expectedInput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", input, expectedInput)); } } catch (Exception e) { throw ServiceResultException.Create( StatusCodes.BadInvalidState, e, "'{0}' is not equal to '{1}'.", input, expectedInput); } m_random.Start((int)( testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); string exceptionMessage = m_random.GetRandomString(); uint faultValue = StatusCodes.BadMethodInvalid; throw new ServiceResultException(faultValue, exceptionMessage); } } finally { if (!isSetupStep) { m_logger.LogCompleteEvent(testCase, iteration); } } }
private Variant ExecuteTest_AutoReconnect(TestCaseContext testCaseContext, TestCase testCase, int iteration, Variant input) { if (TestUtils.IsSetupIteration(iteration)) { SetEventSink(); return Variant.Null; } // get the expected input. Variant expectedInput; int processingTime = 0; lock (m_random) { Utils.Trace("Iteration {0}; Server Received", iteration); // compare actual to expected input. m_random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); expectedInput = m_random.GetVariant(); if (!Compare.CompareVariant(input, expectedInput)) { throw ServiceResultException.Create( StatusCodes.BadUnexpectedError, "Server did not receive expected input\r\nActual = {0}\r\nExpected = {0}", input, expectedInput); } // determine processing time in server. processingTime = m_random.GetInt32Range(0, testCaseContext.MaxResponseDelay); if ((iteration+1)%testCaseContext.StackEventFrequency == 0) { if (testCaseContext.StackEventType == 4) { InterruptListener(testCaseContext.StackEventFrequency*testCaseContext.RequestInterval/2); } StackAction action = TestUtils.GetStackAction(testCaseContext, SecureChannelContext.Current.EndpointDescription); if (action != null) { QueueStackAction(action); } } } // wait. Thread.Sleep(processingTime); // generate and return the output. lock (m_random) { m_random.Start((int)( testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); return m_random.GetVariant(); } }
/// <summary> /// This method executes a test. /// </summary> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <param name="input">Input value.</param> /// <returns>Output value.</returns> private CompositeTestType ExecuteTest(TestCaseContext testCaseContext, TestCase testCase, int iteration, CompositeTestType input) { CompositeTestType output = null; switch (testCase.Name) { case TestCases.BuiltInTypes: { output = ExecuteTest_BuiltInTypes(testCaseContext, testCase, iteration, input); break; } default: { throw ServiceResultException.Create(StatusCodes.BadConfigurationError, "Unsupported test case : " + testCase.Name); } } return output; }
/// <summary> /// This method executes a scalar values test. /// </summary> /// <param name="channelContext">This parameter stores the channel related data.</param> /// <param name="testCaseContext">This parameter stores the test case parameter values.</param> /// <param name="testCase">This parameter stores the test case related data.</param> /// <param name="iteration">This parameter stores the current iteration number.</param> /// <remarks> /// The test parameters required for this test case are of the /// following types: /// <list type="bullet"> /// <item>MaxStringLength <see cref="TestCaseContext.MaxStringLength"/></item> /// </list> /// </remarks> private void ExecuteTest_ScalarValues(ChannelContext channelContext, TestCaseContext testCaseContext, TestCase testCase, int iteration) { bool isSetupStep = TestUtils.IsSetupIteration(iteration); if (!isSetupStep) { channelContext.EventLogger.LogStartEvent(testCase, iteration); } else { channelContext.ClientSession.OperationTimeout = 30000; } RequestHeader requestHeader = new RequestHeader(); requestHeader.Timestamp = DateTime.UtcNow; requestHeader.ReturnDiagnostics = (uint)DiagnosticsMasks.All; Variant input; Variant output; Variant expectedOutput; ResponseHeader responseHeader; if (isSetupStep) { input = Variant.Null; responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); } else { channelContext.Random.Start( (int)(testCase.Seed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); input = channelContext.Random.GetScalarVariant(false); responseHeader = channelContext.ClientSession.TestStack( requestHeader, testCase.TestId, iteration, input, out output); channelContext.Random.Start( (int)(testCase.ResponseSeed + iteration), (int)m_sequenceToExecute.RandomDataStepSize, testCaseContext); expectedOutput = channelContext.Random.GetScalarVariant(false); if (!Compare.CompareVariant(output, expectedOutput)) { throw new ServiceResultException( StatusCodes.BadInvalidState, Utils.Format("'{0}' is not equal to '{1}'.", output, expectedOutput)); } } if (!isSetupStep) { channelContext.EventLogger.LogCompleteEvent(testCase, iteration); } }