public void ScopeNestedWorkspaceWrapperTearDownTest() { MockProgress progress = new MockProgress(); Experiment experiment = ExperimentManager.Load("DecisionScopeTest.teml", AppContext.Components); using (var dispatcher = ExperimentRunnerHelper.CreateExperimentRunner(experiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); } Assert.AreEqual(2, AppContext.WorkspaceInstance.Units.Count); foreach (WorkspaceUnit unit in AppContext.WorkspaceInstance.Units) { bool isCorrect = false; if (unit.FriendlyUnitName.Equals("a") && (int)unit.Data == 1) { isCorrect = true; } else if (unit.FriendlyUnitName.Equals("b") && (int)unit.Data == 7) { isCorrect = true; } Assert.IsTrue(isCorrect); } }
public void ScopeWithCompositeComponent_ScopeNestedWorkspaceWrapperTearDownTest() { MockProgress progress = new MockProgress(); string testExperimentFilepath = System.IO.Path.Combine(AppContext.BaseTestDirectory, "Decision Scope with composite component.teml"); Experiment experiment = ExperimentManager.Load(testExperimentFilepath, AppContext.Components); using (var dispatcher = ExperimentRunnerHelper.CreateExperimentRunner(experiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); } Assert.AreEqual(3, AppContext.WorkspaceInstance.Units.Count); HashSet <string> expectedUnitNames = new HashSet <string>(); expectedUnitNames.Add("targetArtifacts"); expectedUnitNames.Add("sourceArtifacts"); expectedUnitNames.Add("similarityMatrix"); foreach (WorkspaceUnit unit in AppContext.WorkspaceInstance.Units) { bool isCorrect = false; if (expectedUnitNames.Contains(unit.FriendlyUnitName)) { isCorrect = true; } Assert.IsTrue(isCorrect); } }
public AsyncMonitorTests() { this.authenticationProvider = new MockAuthenticationProvider(); this.serializer = new MockSerializer(); this.httpResponseMessage = new HttpResponseMessage(); this.httpProvider = new MockHttpProvider(this.httpResponseMessage, this.serializer.Object); this.client = new Mock <IBaseClient>(MockBehavior.Strict); this.client.SetupAllProperties(); this.client.SetupGet(client => client.HttpProvider).Returns(this.httpProvider.Object); this.progress = new MockProgress(); this.asyncMonitor = new AsyncMonitor <DerivedTypeClass>(this.client.Object, AsyncMonitorTests.monitorUrl); }
public async Task ReadStreamWithProgress() { var updates = new List <int>(); var largeStream = new MemoryStream(new byte[1024 * 1024]); var progress = new MockProgress <int>(u => updates.Add(u)); var progressStream = largeStream.ReadStreamWithProgress(progress); await progressStream.CopyToAsync(Stream.Null); Assert.AreNotEqual(0, updates.Count); for (int i = 1; i < updates.Count; i++) { Assert.IsTrue(updates[i] >= updates[i - 1]); } Assert.AreEqual(largeStream.Length, updates[updates.Count - 1]); }
public void Setup() { this.authenticationProvider = new MockAuthenticationProvider(); this.serializer = new MockSerializer(); this.httpResponseMessage = new HttpResponseMessage(); this.httpProvider = new MockHttpProvider(this.httpResponseMessage, this.serializer.Object); this.oneDriveClient = new Mock <IOneDriveClient>(MockBehavior.Strict); this.oneDriveClient.SetupAllProperties(); this.oneDriveClient.SetupGet(client => client.AuthenticationProvider).Returns(this.authenticationProvider.Object); this.oneDriveClient.Setup(client => client.AuthenticateAsync()).Returns(Task.FromResult(new AccountSession())); this.oneDriveClient.SetupGet(client => client.HttpProvider).Returns(this.httpProvider.Object); this.progress = new MockProgress(); this.asyncMonitor = new ItemCopyAsyncMonitor(this.oneDriveClient.Object, AsyncMonitorTests.monitorUrl); }
public void Setup() { this.authenticationProvider = new MockAuthenticationProvider(); this.serializer = new MockSerializer(); this.httpResponseMessage = new HttpResponseMessage(); this.httpProvider = new MockHttpProvider(this.httpResponseMessage, this.serializer.Object); this.oneDriveClient = new Mock<IOneDriveClient>(MockBehavior.Strict); this.oneDriveClient.SetupAllProperties(); this.oneDriveClient.SetupGet(client => client.AuthenticationProvider).Returns(this.authenticationProvider.Object); this.oneDriveClient.Setup(client => client.AuthenticateAsync()).Returns(Task.FromResult(new AccountSession())); this.oneDriveClient.SetupGet(client => client.HttpProvider).Returns(this.httpProvider.Object); this.progress = new MockProgress(); this.asyncMonitor = new ItemCopyAsyncMonitor(this.oneDriveClient.Object, AsyncMonitorTests.monitorUrl); }
public void PrepareAndRunBenchmarkExperiment() { Assert.Fail("Test temporarily broken. Ignored till contest feature is going to be revisited."); List <Benchmark> benchmarks = BenchmarkLoader.LoadBenchmarksInfo(BenchmarkDirectory); Benchmark testBenchmark = benchmarks[0]; // load the experiment to be run against benchmark string experimentFilename = System.IO.Path.Combine(AppContext.BaseTestDirectory, "experiment_to_be_benchmarked.gml"); Experiment experimentToBeBenchmarked = ExperimentManager.Load(experimentFilename, AppContext.Components); //prepare matching io testBenchmark.PrepareMatchingIOByType(experimentToBeBenchmarked); Assert.AreEqual(2, testBenchmark.BenchmarkInputSetting.Count); Assert.AreEqual(1, testBenchmark.BenchmarkOutputsSetting.Count); //match benchmarkSourceArtifact with original source artifacts foreach (BenchmarkItemSetting <IOItem> pair in testBenchmark.BenchmarkInputSetting) { IOItem item = pair.Item; ItemSettingCollection candidates = pair.CandidateSettings; if (item.MappedTo.Equals("benchmarkSourceArtifacts")) { //we found the item we want to remap pair.SelectedSetting = candidates["originalSourceArtifacts"]; } } //finally prepare benchmark experiment testBenchmark.PrepareBenchmarkExperiment(experimentToBeBenchmarked, AppContext.Components); //assert that only two inputs are included in the export settings and one output int includedInputs = 0; foreach (KeyValuePair <string, ItemSetting> pair in testBenchmark.Setup.InputSettings) { if (pair.Value.Include == true) { includedInputs++; } } Assert.AreEqual(2, includedInputs); int includedOutputs = 0; foreach (KeyValuePair <string, ItemSetting> pair in testBenchmark.Setup.OutputSettings) { if (pair.Value.Include == true) { includedOutputs++; } } Assert.AreEqual(1, includedOutputs); Assert.IsNotNull(testBenchmark.BenchmarkExperiment); //for debug output file // string path = System.IO.Path.Combine(AppContext.BaseTestDirectory, "benchmarkTest1.gml"); // AppContext.ExperimentManager.Save(testBenchmark.BenchmarkExperiment, path); MockProgress progress = new MockProgress(); using (var dispatcher = ExperimentRunnerHelper.CreateExperimentRunner(testBenchmark.BenchmarkExperiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); Assert.AreEqual(7, progress.NumSteps); Assert.IsFalse(progress.HasError); } }
public void DefiningBenchmarkTest() { Assert.Fail("Test temporarily broken. Ignored till contest feature is going to be revisited."); string baseExperimentFilename = "DefiningBenchmarkTestExperiment.teml"; string testingSolutionFilename = "DefiningBenchmarkTestingSolution.teml"; //create temporary directory for defining benchmark string benchmarkTemporaryDirectory = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTest"); System.IO.Directory.CreateDirectory(benchmarkTemporaryDirectory); string newBenchmarkFilePath = System.IO.Path.Combine(benchmarkTemporaryDirectory, "newDefinedBenchmark.tbml"); //copy the test data into temporary benchmark directory string testData = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTestData.xml"); System.IO.File.Copy(testData, System.IO.Path.Combine(benchmarkTemporaryDirectory, "DefiningBenchmarkTestData.xml")); // load the experiment from which the benchmark is going to be defined from string baseExperimentFilePath = System.IO.Path.Combine(AppContext.BaseTestDirectory, baseExperimentFilename); Experiment baseExperimentForDefiningBenchmark = ExperimentManager.Load(baseExperimentFilePath, AppContext.Components); var benchmarkDefiner = new DefiningBenchmark(baseExperimentForDefiningBenchmark, AppContext.Components, AppContext.WorkspaceInstance, AppContext.PackageManager, AppContext.WorkspaceInstance.TypeDirectories, null); Assert.AreEqual(1, benchmarkDefiner.TemplatizableComponents.Count); Assert.AreEqual("Preprocessor", benchmarkDefiner.TemplatizableComponents[0].Data.Metadata.Label); //select preprocessor template as Component Template for benchmarking benchmarkDefiner.SelectedTemplateNode = benchmarkDefiner.TemplatizableComponents[0]; //select new benchmark path benchmarkDefiner.BenchmarkInfo.FilePath = newBenchmarkFilePath; //set some values for benchmark info string benchmarkName = "Testing defining new benchmark"; string author = "Re test author"; string contributors = "Re test contributors"; string description = "Re test description"; string shortDescription = "Re test short description"; DateTime deadline = DateTime.Now; string fakeExperimentResultsUnitname = "fakeunitname"; string webpageLink = "test://test.webpage.link"; benchmarkDefiner.BenchmarkInfo.Name = benchmarkName; benchmarkDefiner.BenchmarkInfo.Author = author; benchmarkDefiner.BenchmarkInfo.Contributors = contributors; benchmarkDefiner.BenchmarkInfo.Description = description; benchmarkDefiner.BenchmarkInfo.ShortDescription = shortDescription; benchmarkDefiner.BenchmarkInfo.Deadline = deadline; benchmarkDefiner.BenchmarkInfo.ExperimentResultsUnitname = fakeExperimentResultsUnitname; benchmarkDefiner.BenchmarkInfo.WebPageLink = new Uri(webpageLink); //assure file does not exists prior defining Assert.IsFalse(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //set some mock experiment results as baseline TraceLabSDK.Types.Contests.TLExperimentResults fakeBaseline = CreateDummyExperimentResults("FAKE-BASELINE"); benchmarkDefiner.SelectedExperimentResults = fakeBaseline; //call define benchmark benchmarkDefiner.Define(); //check if new benchmark has been created Assert.IsTrue(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //load newly defined benchmark List <Benchmark> benchmarks = BenchmarkLoader.LoadBenchmarksInfo(benchmarkTemporaryDirectory); Benchmark testBenchmark = benchmarks[0]; //there should be only 1, since the directory has been just created //check if new test benchmark has previously defined properties Assert.AreEqual(benchmarkName, testBenchmark.BenchmarkInfo.Name); Assert.AreEqual(author, testBenchmark.BenchmarkInfo.Author); Assert.AreEqual(contributors, testBenchmark.BenchmarkInfo.Contributors); Assert.AreEqual(description, testBenchmark.BenchmarkInfo.Description); Assert.AreEqual(shortDescription, testBenchmark.BenchmarkInfo.ShortDescription); Assert.AreEqual(deadline.ToString(), testBenchmark.BenchmarkInfo.Deadline.ToString()); Assert.AreEqual(fakeExperimentResultsUnitname, testBenchmark.BenchmarkInfo.ExperimentResultsUnitname); //check if baseline results has been saved properly, by loading it from xml TraceLabSDK.Types.Contests.TLExperimentResults baseline = BenchmarkLoader.ReadBaseline(benchmarkDefiner.BenchmarkInfo.FilePath); Assert.AreEqual(fakeBaseline.TechniqueName, baseline.TechniqueName); Assert.AreEqual(fakeBaseline.Score, baseline.Score); Assert.AreEqual(fakeBaseline.AcrossAllDatasetsResults, baseline.AcrossAllDatasetsResults); Assert.IsTrue(fakeBaseline.DatasetsResults.SequenceEqual(baseline.DatasetsResults)); // load the experiment to be run against new defined benchmark string experimentFilename = System.IO.Path.Combine(AppContext.BaseTestDirectory, testingSolutionFilename); Experiment testingSolutionExperiment = ExperimentManager.Load(experimentFilename, AppContext.Components); //finally prepare benchmark experiment testBenchmark.PrepareBenchmarkExperiment(testingSolutionExperiment, AppContext.Components); //run benchmark MockProgress progress = new MockProgress(); using (var dispatcher = CreateExperiment(testBenchmark.BenchmarkExperiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); Assert.AreEqual(5, progress.NumSteps); Assert.IsFalse(progress.HasError); } }