public override ITestContext Arrange(TestArgs testArgs) { _projectConfigsStorageHelper.ClearAllProjects(); ProjectConfigItem projectConfigItem1 = new ProjectConfigItem() { Id = "aa1", Description = "Desc1" }; _projectConfigsStorageHelper.AppendProject(projectConfigItem1); ProjectConfigItem projectConfigItem2 = new ProjectConfigItem() { Id = "Id2", Description = "aa2" }; _projectConfigsStorageHelper.AppendProject(projectConfigItem2); ProjectConfigItem projectConfigItem3 = new ProjectConfigItem() { Id = "Id3", Description = "Desc3" }; _projectConfigsStorageHelper.AppendProject(projectConfigItem3); ITestContext testContext = new ProcessTestContext(null); return(testContext); }
public void PopulatedListMonadLaw(Law <TestArgs <ListF, string, int, long> > law) { var args = TestArgs.Default <ListF>(); args.LiftedA = new List <string> { args.A }.K(); args.LiftedB = new List <int> { args.B }.K(); args.LiftedFuncAtoB = new List <Func <string, int> > { args.FuncAtoB }.K(); args.LiftedFuncBtoC = new List <Func <int, long> > { args.FuncBtoC }.K(); args.FuncAtoLiftedB = a => new List <int> { args.FuncAtoB(a) }.K(); args.FuncBtoLiftedC = b => new List <long> { args.FuncBtoC(b) }.K(); law.TestLaw(args).ShouldBe(true); }
public override ITestContext Arrange(TestArgs testArgs) { ITestContext testContext = _projectConfigWithDBArrangeAndAssert.Arrange(testArgs, false, DBBackupFileType.FinalState_DeliveryEnv, ScriptFilesStateType.RepeatableChanged); return(testContext); }
public override IEnumerable <string> GetExecutableArguments() { foreach (var arg in _mcopts.GetCommandLineArgs()) { yield return(arg); } // Add the arg for specifying the unit test method to run yield return(Path.GetFileName(TestCase.AssemblyLocation)); yield return("/test:" + TestCase.FullTestMethodName); // Add the test case args TestArgs testArgs = TestCase.GetTestArgs(); if (testArgs != null) { foreach (var tcArg in testArgs.Values) { yield return("/arg:" + tcArg); } } // set directory for output files yield return("/outputprefix:" + OutputPathPrefix); }
public override ITestContext Arrange(TestArgs testArgs) { ProjectConfigItem projectConfig = new ProjectConfigItem() { Id = IntegrationTestsConsts.DummyProjectConfigValid.Id, Description = IntegrationTestsConsts.DummyProjectConfigValid.Description, DBType = IntegrationTestsConsts.DummyProjectConfigValid.DBType, Server = IntegrationTestsConsts.DummyProjectConfigValid.Server, DBName = IntegrationTestsConsts.DummyProjectConfigValid.DBName, Username = IntegrationTestsConsts.DummyProjectConfigValid.Username, Password = IntegrationTestsConsts.DummyProjectConfigValid.Password, BackupFolderPath = IntegrationTestsConsts.DummyProjectConfigValid.BackupFolderPath, DevEnvironment = false, DevScriptsBaseFolderPath = IntegrationTestsConsts.DummyProjectConfigValid.DevScriptsBaseFolderPath, DeployArtifactFolderPath = IntegrationTestsConsts.DummyProjectConfigValid.DeployArtifactFolderPath, DeliveryArtifactFolderPath = IntegrationTestsConsts.DummyProjectConfigValid.DeliveryArtifactFolderPath, }; _projectConfigsStorageHelper.PrepareTestProject(projectConfig); ProjectConfigTestArgs overrideTestArgs = new ProjectConfigTestArgs(projectConfig); return(new ProcessTestContext(overrideTestArgs)); }
public async Task TestAsync(int _class, int _struct, CancellationToken cancellationToken = default(CancellationToken)) { await OutputProtocol.WriteMessageBeginAsync(new TMessage("Test", TMessageType.Call, SeqId), cancellationToken); var args = new TestArgs(); args._class = _class; args._struct = _struct; await args.WriteAsync(OutputProtocol, cancellationToken); await OutputProtocol.WriteMessageEndAsync(cancellationToken); await OutputProtocol.Transport.FlushAsync(cancellationToken); var msg = await InputProtocol.ReadMessageBeginAsync(cancellationToken); if (msg.Type == TMessageType.Exception) { var x = await TApplicationException.ReadAsync(InputProtocol, cancellationToken); await InputProtocol.ReadMessageEndAsync(cancellationToken); throw x; } var result = new TestResult(); await result.ReadAsync(InputProtocol, cancellationToken); await InputProtocol.ReadMessageEndAsync(cancellationToken); return; }
private IEnumerable <RunTestCaseInfo> GetRunTestTaskInfos(IEnumerable <MCutTestEntity> tests) { return(tests .SelectMany((test, tidx) => { // Get the contexts var contexts = test.GetContexts().ToArray(); // This isn't needed yet because the GetContexts method should always return at least the default context instance //if (contexts.Length == 0) // contexts = new TestContextEntityBase[] { null }; // Get the argument sets TestArgs[] argsSets = test.GetTestArgsSets().ToArray(); // Need to make sure there is at least one arg instance for tests that don't have parameters if (argsSets.Length == 0 && !test.TestSource.HasParameters) { argsSets = new TestArgs[] { null } } ; // Get the cross product of contexts with arg sets return from ctx in contexts from args in argsSets select new RunTestCaseInfo { Test = test, TestIdx = tidx, Context = ctx, Args = args }; })); }
private static void RunScriptOnAllCallback(FileSelectionModel model) { if (model == null) { return; } TestArgs args = model.Args as TestArgs; if (args == null) { throw new Exception("Args must be of type TestArgs!"); } args.Files = model.Files; args.Pattern.SelectedFiles = model.Files; ThreadPool.QueueUserWorkItem(d => { var sw = new Stopwatch("Saving Pattern Selected Files"); // Remember which files where selected for the next Run - Sara var m = new PatternModel() { Item = args.Pattern, Mode = InputMode.Edit }; PatternCRUDService.SavePattern(m); sw.Stop(0); }); Run(args); }
public override ITestContext Arrange(TestArgs testArgs) { ITestContext testContext = _projectConfigWithDBArrangeAndAssert.Arrange(testArgs, true, DBBackupFileType.MiddleState, ScriptFilesStateType.IncrementalChanged); return(testContext); }
public async Task Test_ProcessAsync(int seqid, TProtocol iprot, TProtocol oprot, CancellationToken cancellationToken) { var args = new TestArgs(); await args.ReadAsync(iprot, cancellationToken); await iprot.ReadMessageEndAsync(cancellationToken); var result = new TestResult(); try { await _iAsync.TestAsync(args._class, args._struct, cancellationToken); await oprot.WriteMessageBeginAsync(new TMessage("Test", TMessageType.Reply, seqid), cancellationToken); await result.WriteAsync(oprot, cancellationToken); } catch (TTransportException) { throw; } catch (Exception ex) { Console.Error.WriteLine("Error occurred in processor:"); Console.Error.WriteLine(ex.ToString()); var x = new TApplicationException(TApplicationException.ExceptionType.InternalError, " Internal error."); await oprot.WriteMessageBeginAsync(new TMessage("Test", TMessageType.Exception, seqid), cancellationToken); await x.WriteAsync(oprot, cancellationToken); } await oprot.WriteMessageEndAsync(cancellationToken); await oprot.Transport.FlushAsync(cancellationToken); }
/// <summary> /// Осуществляет перебор тестовых сервисов данных из <see cref="BaseOrmIntegratedTest"/>, и вызывает переданный делегат /// для каждого сервиса данных, передав в него <see cref="HttpClient"/> для осуществления запросов к OData-сервису. /// </summary> /// <param name="action">Действие, выполняемое для каждого сервиса данных из <see cref="BaseOrmIntegratedTest"/>.</param> public virtual void ActODataService(Action <TestArgs> action) { if (action == null) { return; } foreach (IDataService dataService in DataServices) { var container = new UnityContainer(); container.RegisterInstance(dataService); using (var config = new HttpConfiguration()) using (var server = new HttpServer(config)) using (var client = new HttpClient(server, false) { BaseAddress = new Uri("http://localhost/odata/") }) { config.EnableCors(new EnableCorsAttribute("*", "*", "*")); config.DependencyResolver = new UnityDependencyResolver(container); var token = config.MapODataServiceDataObjectRoute(_builder); var args = new TestArgs { UnityContainer = container, DataService = dataService, HttpClient = client, Token = token }; action(args); } } }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_devEnv_Restore_Recreate_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { ITestContext testContext = _projectConfigWithDBArrangeAndAssert.Arrange(testArgs, false, DBBackupFileType.MiddleState, ScriptFilesStateType.ValidScripts); ClearDeployFiles(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { _projectConfigsDirectoriesCleaner.ClearAutoCreatedFolders(); _dbHandler.RestoreDB(IntegrationTestsConsts.DummyProjectConfigValid.DBConnectionInfo, DBBackupFileType.EmptyDB); return(new ProcessTestContext(testArgs)); }
internal static void Run(TestArgs args) { ThreadPool.QueueUserWorkItem(m => { IDEService.Test(args); args.ResultCallback?.Invoke(args.Result); }); }
public void SuccessFunctorLaw(Law <TestArgs <TryF, string, int, long> > law) { var args = TestArgs.Default <TryF>(); args.LiftedA = new Success <string>(args.A); law.TestLaw(args).ShouldBe(true); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(new ProcessTestContext(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_init_AllProperties_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_deliveryEnv_Virtual_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_devEnv_SyncDB_RepeatableChanged_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_devEnv_Validate_MissingSystemTables_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_deliveryEnv_Validate_HistoryExecutedFilesMissing_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_getProjectConfigById_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_devEnv_NewScrtiptFile_DevDummyData_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public void SomeFunctorLaw(Law <TestArgs <OptionF, string, int, long> > law) { var args = TestArgs.Default <OptionF>(); args.LiftedA = Some(args.A); law.TestLaw(args).ShouldBe(true); }
public override ITestContext Arrange(TestArgs testArgs) { ITestContext testContext = _projectConfigWithDBArrangeAndAssert.Arrange(testArgs, true, DBBackupFileType.FinalState_DevEnv, ScriptFilesStateType.ValidScripts); ClearScriptsFiles(testContext); return(testContext); }
public void NoneInvariantLaw(Law <TestArgs <OptionF, string, int, long> > law) { var args = TestArgs.Default <OptionF>(); args.LiftedA = None.K <string>(); law.TestLaw(args).ShouldBe(true); }
public override ITestContext Arrange(TestArgs testArgs) { CLITestContext testContext = new CLITestContext(_files_IncrementalChanged_API.Arrange(testArgs)); MockObjectsProvider.SetTestContextDataByMockCallbacksForCLI(testContext); return(testContext); }
public void OkFunctorLaw(Law <TestArgs <ResultOkF <bool>, string, int, long> > law) { var args = TestArgs.Default <ResultOkF <bool> >(); args.LiftedA = Ok(args.A).K <string, bool>(); law.TestLaw(args).ShouldBe(true); }
public void ErrorInvariantLaw(Law <TestArgs <ResultOkF <bool>, string, int, long> > law) { var args = TestArgs.Default <ResultOkF <bool> >(); args.LiftedA = Error(false).K <string, bool>(); law.TestLaw(args).ShouldBe(true); }
public void Basic_DomainEventArgs_Test() { // is this really needed? ... probably not, // but it does lock-in my api design, and I think // thats a good thing for right now var args = new TestArgs { TestProp = 1, Cancel = true }; Assert.IsTrue(args.Cancel); Assert.AreEqual(1, args.TestProp); }
public void Test() { var pipelineName = "test"; var pipelineTitle = "Some variable value is {SomeProperty}"; var type = typeof(TestProcessor).AssemblyQualifiedName; var processorTitle = "Some title of the processor"; var args = new TestArgs(); var controller = new TestController(); processorsCount = 0; actions.Clear(); var str = @"<pipelines> <{0} title=""{1}""> <step> <processor type=""{2}"" title=""{3}"" param=""1-1""> <processor type=""{2}"" title=""{3}"" param=""1-1-1"" /> </processor> <processor type=""{2}"" title=""{3}"" param=""1-2""/> </step> <step> <processor type=""{2}"" title=""{3}"" param=""2-1"" /> </step> </{0}> </pipelines>".FormatWith(pipelineName, pipelineTitle, type, processorTitle); var xml = XmlDocumentEx.LoadXml(str); PipelineManager.Initialize(xml.DocumentElement); PipelineManager.StartPipeline(pipelineName, args, controller, false); var expected = @"Start|Some variable value is SomeValue ProcessorStarted|Some title of the processor Process|1-1|1 ProcessorDone|Some title of the processor IncrementProgress|1 ProcessorStarted|Some title of the processor Process|1-1-1|2 ProcessorDone|Some title of the processor IncrementProgress|1 ProcessorStarted|Some title of the processor Process|1-2|3 ProcessorDone|Some title of the processor IncrementProgress|1 ProcessorStarted|Some title of the processor Process|2-1|4 ProcessorDone|Some title of the processor IncrementProgress|1 Finish|Done.|True"; Microsoft.VisualStudio.TestTools.UnitTesting.Assert.AreEqual(expected, actions.Join(Environment.NewLine)); }
/// <summary> /// Main Program /// </summary> /// <param name="args"> /// There should be at least two input parameters from command line: /// file name of the stored boost tree, and /// file name of the source test/validation data /// </param> public static void Main(string[] args) { TestArgs cmd = new TestArgs(args); Random r = new Random(cmd.seed); //Load the model first BoostTree boostTree = BoostTree.Read(cmd.binaryTreeFile); if (boostTree == null) { Debug.Assert(false, "Fail to load model"); Console.WriteLine("Fail to load model " + cmd.binaryTreeFile); return; } int numIter = cmd.numIter; if (cmd.numIter == 0) // If iteration not specified, use the optimal validation iteration found during training { numIter = boostTree.OptIter; } //compute and output the feature importance for the specified number of iterations // boostTree.SummarizeFeatureImporance(numIter, "featureImportance.txt"); string[] activeFeatureNames = null; //read and process only a subset of activated features as specified in the activeFeatureFile if (cmd.activeFeatureFile != null) { activeFeatureNames = TsvFileLoader.ReadFeatureNames(cmd.activeFeatureFile); } //feature parser: special module that understand MSN style value encoding MsnFeatureParser featureParser = new MsnFeatureParser(activeFeatureNames); //the column name for label: values to regress to string[] labelName = { cmd.labelName }; //label/rating parser: special module that understand regression value IParser<float> RateParser = new MsnLabelParser(labelName, cmd.labelNameValueFile); //data boundary: every row of data is by itself / all data is in one group / no data groups OnelineGroup noBoundary = new OnelineGroup(); //Load coded data if exist LabelFeatureData labelFeatureData = (CLabelFeatureData)CLabelFeatureData.Load(cmd.inputFile, featureParser, RateParser, noBoundary, typeof(CLabelFeatureData), activeFeatureNames, cmd.cThreads); if (!boostTree.SetFeatureNames(labelFeatureData.FeatureNames)) { Debug.Assert(false, "Sub-model failed to initialize"); Console.WriteLine("Sub-model failed to initialize, program exits"); return; } //All data are for test float[] percentage = DataGroups.DataSplit("0:0:10"); //"Train:Valid:Test" labelFeatureData.DataGroups.PartitionData(percentage, r); //Specify the data partitions to be tested DataPartitionType[] dataTypes = DataGroups.PartitionTypes("Train:Valid:Test"); //using all data as default LabelConverter labelConvert = new LabelConverterNull(); //set up the error metrics that we like to keep tract of during testing //dp.LabelFeatureData is the data we are evaluating Metrics metrics; if (string.Compare(cmd.metric, "ErrRate", true) == 0) { metrics = new ClassError(labelFeatureData, labelConvert, dataTypes); } else if (string.Compare(cmd.metric, "PrecRecall", true) == 0) { metrics = new PrecRecall(labelFeatureData, labelConvert, dataTypes); } else { metrics = new ClassError(labelFeatureData, labelConvert, dataTypes); } boostTree.Predict(labelFeatureData, numIter, metrics, cmd.silent); // Output the testing error history. This should at least help validate the optimal // number of iterations, although it is probably better that we use NDCG history // for the optimum. metrics.SaveAllResults("testErrHistory.txt"); }