public static ContestResults PrepareBaselineContestRestults(string contestId, TraceLabSDK.Types.Contests.TLExperimentResults experimentResults, string techniqueName, string techniqueDescription) { List <DatasetResultsDTO> results = new List <DatasetResultsDTO>(); foreach (TraceLabSDK.Types.Contests.DatasetResults dataset in experimentResults.DatasetsResults) { results.Add(new DatasetResultsDTO(dataset)); } var baselineResults = new ContestResults(contestId, techniqueName, techniqueDescription, results, experimentResults.Score, experimentResults.BaseData); return(baselineResults); }
public static void ExtractDatasetsAndMetricsDefinitions(TraceLabSDK.Types.Contests.TLExperimentResults experimentResults, out List <MetricDefinition> metrics, out List <string> datasets) { datasets = new List <string>(); metrics = new List <MetricDefinition>(); foreach (TraceLabSDK.Types.Contests.DatasetResults dataset in experimentResults.DatasetsResults) { datasets.Add(dataset.DatasetName); //if metrics has not been yet assigned if (metrics.Count == 0) { //iterate through first dataset for all metrics //(assumption is that all datasets are using the same metrics foreach (TraceLabSDK.Types.Contests.Metric metric in dataset.Metrics) { metrics.Add(new MetricDefinition(metric)); } } } }
/// <summary> /// Creates some dummy experiment results, to test its serialization /// </summary> /// <param name="techniqueName">Name of the technique.</param> /// <returns></returns> internal static TraceLabSDK.Types.Contests.TLExperimentResults CreateDummyExperimentResults(string techniqueName) { var dummyResults = new TraceLabSDK.Types.Contests.TLExperimentResults(techniqueName); dummyResults.BaseData = MockContestResultsHelper.CreateDummyBaseData(); //create some metrics var datasetResults1 = MockContestResultsHelper.CreateDummyDatasetResults("Dataset 1", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); var datasetResults2 = MockContestResultsHelper.CreateDummyDatasetResults("Dataset 2", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); var datasetResults3 = MockContestResultsHelper.CreateDummyDatasetResults("Across all datasets", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); dummyResults.AddDatasetResult(datasetResults1); dummyResults.AddDatasetResult(datasetResults2); dummyResults.AcrossAllDatasetsResults = datasetResults3; return(dummyResults); }
/// <summary> /// Runs the experiement. /// </summary> /// <param name="progress">The progress.</param> /// <param name="experiment">The experiment.</param> /// <param name="baseline">(optional) The baseline data that is going to be preloaded into workspace before executing the experiment.</param> public void RunExperiment(IProgress progress, Workspace workspace, ComponentsLibrary library, TraceLabSDK.Types.Contests.TLExperimentResults baseline) { progress.CurrentStatus = "Preparing experiment..."; progress.IsIndeterminate = true; progress.SetError(false); ClearErrors(); Action method = () => { //prevent the component library from rescanning while running the experiment using (var rescanLibraryGuard = new RescanLibraryGuard(library)) { using (var dispatcher = CreateExperimentRunner(workspace, library, baseline)) { dispatcher.ExecuteExperiment(progress); } } }; Thread dispatchThread = ThreadFactory.CreateThread(new System.Threading.ThreadStart(method)); dispatchThread.IsBackground = true; dispatchThread.Name = "ExperimentRunner"; dispatchThread.SetApartmentState(System.Threading.ApartmentState.STA); dispatchThread.Start(); }
/// <summary> /// Creates the experiment. /// </summary> /// <param name="experiment">The experiment.</param> /// <param name="baseline">The baseline - if baseline is different than null it is going to be written into workspace before executing the experiment /// with the Unitname BASELINE.</param> /// <returns></returns> private IExperimentRunner CreateExperimentRunner(Workspace workspace, ComponentsLibrary library, TraceLabSDK.Types.Contests.TLExperimentResults baseline) { RunnableExperimentBase graph = null; var experimentWorkspaceWrapper = WorkspaceWrapperFactory.CreateExperimentWorkspaceWrapper(workspace, ExperimentInfo.Id); RunnableNodeFactory templateGraphNodesFactory = new RunnableNodeFactory(experimentWorkspaceWrapper); graph = GraphAdapter.Adapt(this, templateGraphNodesFactory, library, experimentWorkspaceWrapper.TypeDirectories); //clear Workspace experimentWorkspaceWrapper.DeleteExperimentUnits(); //if baseline has been provided write it into the workspace before returning the dispatcher if (baseline != null) { experimentWorkspaceWrapper.Store("BASELINE", baseline); } IExperimentRunner dispatcher = ExperimentRunnerFactory.CreateExperimentRunner(graph); dispatcher.NodeExecuting += dispatcher_NodeExecuting; dispatcher.NodeFinished += dispatcher_NodeFinished; dispatcher.NodeHasError += dispatcher_NodeHasError; dispatcher.ExperimentFinished += dispatcher_ExperimentFinished; dispatcher.ExperimentStarted += dispatcher_ExperimentStarted; m_dispatcher = dispatcher; return(dispatcher); }
/// <summary> /// Creates some dummy experiment results, to test its serialization /// </summary> /// <param name="techniqueName">Name of the technique.</param> /// <returns></returns> internal static TraceLabSDK.Types.Contests.TLExperimentResults CreateDummyExperimentResults(string techniqueName) { var dummyResults = new TraceLabSDK.Types.Contests.TLExperimentResults(techniqueName); dummyResults.BaseData = MockContestResultsHelper.CreateDummyBaseData(); //create some metrics var datasetResults1 = MockContestResultsHelper.CreateDummyDatasetResults("Dataset 1", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); var datasetResults2 = MockContestResultsHelper.CreateDummyDatasetResults("Dataset 2", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); var datasetResults3 = MockContestResultsHelper.CreateDummyDatasetResults("Across all datasets", MockContestResultsHelper.CreateDummySeriesMetricResults("Mock series metric", "Mock series metric description"), MockContestResultsHelper.CreateDummyBoxSummaryMetricResults("Mock box summary metric", "Mock box summary metric description")); dummyResults.AddDatasetResult(datasetResults1); dummyResults.AddDatasetResult(datasetResults2); dummyResults.AcrossAllDatasetsResults = datasetResults3; return dummyResults; }
public void DefiningBenchmarkTest() { Assert.Fail("Test temporarily broken. Ignored till contest feature is going to be revisited."); string baseExperimentFilename = "DefiningBenchmarkTestExperiment.teml"; string testingSolutionFilename = "DefiningBenchmarkTestingSolution.teml"; //create temporary directory for defining benchmark string benchmarkTemporaryDirectory = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTest"); System.IO.Directory.CreateDirectory(benchmarkTemporaryDirectory); string newBenchmarkFilePath = System.IO.Path.Combine(benchmarkTemporaryDirectory, "newDefinedBenchmark.tbml"); //copy the test data into temporary benchmark directory string testData = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTestData.xml"); System.IO.File.Copy(testData, System.IO.Path.Combine(benchmarkTemporaryDirectory, "DefiningBenchmarkTestData.xml")); // load the experiment from which the benchmark is going to be defined from string baseExperimentFilePath = System.IO.Path.Combine(AppContext.BaseTestDirectory, baseExperimentFilename); Experiment baseExperimentForDefiningBenchmark = ExperimentManager.Load(baseExperimentFilePath, AppContext.Components); var benchmarkDefiner = new DefiningBenchmark(baseExperimentForDefiningBenchmark, AppContext.Components, AppContext.WorkspaceInstance, AppContext.PackageManager, AppContext.WorkspaceInstance.TypeDirectories, null); Assert.AreEqual(1, benchmarkDefiner.TemplatizableComponents.Count); Assert.AreEqual("Preprocessor", benchmarkDefiner.TemplatizableComponents[0].Data.Metadata.Label); //select preprocessor template as Component Template for benchmarking benchmarkDefiner.SelectedTemplateNode = benchmarkDefiner.TemplatizableComponents[0]; //select new benchmark path benchmarkDefiner.BenchmarkInfo.FilePath = newBenchmarkFilePath; //set some values for benchmark info string benchmarkName = "Testing defining new benchmark"; string author = "Re test author"; string contributors = "Re test contributors"; string description = "Re test description"; string shortDescription = "Re test short description"; DateTime deadline = DateTime.Now; string fakeExperimentResultsUnitname = "fakeunitname"; string webpageLink = "test://test.webpage.link"; benchmarkDefiner.BenchmarkInfo.Name = benchmarkName; benchmarkDefiner.BenchmarkInfo.Author = author; benchmarkDefiner.BenchmarkInfo.Contributors = contributors; benchmarkDefiner.BenchmarkInfo.Description = description; benchmarkDefiner.BenchmarkInfo.ShortDescription = shortDescription; benchmarkDefiner.BenchmarkInfo.Deadline = deadline; benchmarkDefiner.BenchmarkInfo.ExperimentResultsUnitname = fakeExperimentResultsUnitname; benchmarkDefiner.BenchmarkInfo.WebPageLink = new Uri(webpageLink); //assure file does not exists prior defining Assert.IsFalse(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //set some mock experiment results as baseline TraceLabSDK.Types.Contests.TLExperimentResults fakeBaseline = CreateDummyExperimentResults("FAKE-BASELINE"); benchmarkDefiner.SelectedExperimentResults = fakeBaseline; //call define benchmark benchmarkDefiner.Define(); //check if new benchmark has been created Assert.IsTrue(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //load newly defined benchmark List <Benchmark> benchmarks = BenchmarkLoader.LoadBenchmarksInfo(benchmarkTemporaryDirectory); Benchmark testBenchmark = benchmarks[0]; //there should be only 1, since the directory has been just created //check if new test benchmark has previously defined properties Assert.AreEqual(benchmarkName, testBenchmark.BenchmarkInfo.Name); Assert.AreEqual(author, testBenchmark.BenchmarkInfo.Author); Assert.AreEqual(contributors, testBenchmark.BenchmarkInfo.Contributors); Assert.AreEqual(description, testBenchmark.BenchmarkInfo.Description); Assert.AreEqual(shortDescription, testBenchmark.BenchmarkInfo.ShortDescription); Assert.AreEqual(deadline.ToString(), testBenchmark.BenchmarkInfo.Deadline.ToString()); Assert.AreEqual(fakeExperimentResultsUnitname, testBenchmark.BenchmarkInfo.ExperimentResultsUnitname); //check if baseline results has been saved properly, by loading it from xml TraceLabSDK.Types.Contests.TLExperimentResults baseline = BenchmarkLoader.ReadBaseline(benchmarkDefiner.BenchmarkInfo.FilePath); Assert.AreEqual(fakeBaseline.TechniqueName, baseline.TechniqueName); Assert.AreEqual(fakeBaseline.Score, baseline.Score); Assert.AreEqual(fakeBaseline.AcrossAllDatasetsResults, baseline.AcrossAllDatasetsResults); Assert.IsTrue(fakeBaseline.DatasetsResults.SequenceEqual(baseline.DatasetsResults)); // load the experiment to be run against new defined benchmark string experimentFilename = System.IO.Path.Combine(AppContext.BaseTestDirectory, testingSolutionFilename); Experiment testingSolutionExperiment = ExperimentManager.Load(experimentFilename, AppContext.Components); //finally prepare benchmark experiment testBenchmark.PrepareBenchmarkExperiment(testingSolutionExperiment, AppContext.Components); //run benchmark MockProgress progress = new MockProgress(); using (var dispatcher = CreateExperiment(testBenchmark.BenchmarkExperiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); Assert.AreEqual(5, progress.NumSteps); Assert.IsFalse(progress.HasError); } }