public async Task <IActionResult> CreateIteration(IterationModel model) { var entity = mapper.Map <Iteration>(model); await dbContext.AddAsync(entity); await dbContext.SaveChangesAsync(); // TODO: Return 201 with location header return(Ok()); }
public IterationPage(Guid projectId, string projectName, IterationModel iterationModel) { InitializeComponent(); this.projectId = projectId; this.projectName = projectName; List <IterationModel> iterationModels = MainWindow.currentWindow.GetIterations(projectId).Where(Entity => Entity.Status != "New").ToList(); IterationListView.ItemsSource = iterationModels; Thread thread = new Thread(new ParameterizedThreadStart(UpdateIterationState)); thread.Start(iterationModel); }
private static ScenarioBenchmark PostProcessing() { PrintHeader("Starting POST"); var scenarioBenchmark = new ScenarioBenchmark("MusicStore") { Namespace = "JitBench" }; // Create (measured) test entries for this scenario. var startup = new ScenarioTestModel("Startup"); scenarioBenchmark.Tests.Add(startup); var request = new ScenarioTestModel("First Request"); scenarioBenchmark.Tests.Add(request); // TODO: add response time once jit bench is updated to // report more reasonable numbers. // Add measured metrics to each test. startup.Performance.Metrics.Add(new MetricModel { Name = "Duration", DisplayName = "Duration", Unit = "ms" }); request.Performance.Metrics.Add(new MetricModel { Name = "Duration", DisplayName = "Duration", Unit = "ms" }); for (int i = 0; i < s_iterations; ++i) { var startupIteration = new IterationModel { Iteration = new Dictionary <string, double>() }; startupIteration.Iteration.Add("Duration", s_startupTimes[i]); startup.Performance.IterationModels.Add(startupIteration); var requestIteration = new IterationModel { Iteration = new Dictionary <string, double>() }; requestIteration.Iteration.Add("Duration", s_requestTimes[i]); request.Performance.IterationModels.Add(requestIteration); } return(scenarioBenchmark); }
private static void addMeasurement(ref ScenarioBenchmark scenario, string name, MetricModel metric, double value) { var iteration = new IterationModel { Iteration = new Dictionary <string, double>() }; iteration.Iteration.Add(metric.Name, value); var size = new ScenarioTestModel(name); size.Performance.Metrics.Add(metric); size.Performance.IterationModels.Add(iteration); scenario.Tests.Add(size); }
/// <summary> /// Converts IterationResult into Benchview's IterationModel, remaping and filtering the metrics reported /// </summary> static IterationModel ConvertIterationResult(IterationResult iterationResult, Func <Metric, Metric> metricMapping) { IterationModel iterationModel = new IterationModel(); iterationModel.Iteration = new Dictionary <string, double>(); foreach (KeyValuePair <Metric, double> measurement in iterationResult.Measurements) { Metric finalMetric = metricMapping(measurement.Key); if (!finalMetric.Equals(default(Metric))) { iterationModel.Iteration.Add(finalMetric.Name, measurement.Value); } } return(iterationModel); }
public static BurndownChartModel ConvertIterationDataToBurndownChartModel(IterationModel data) { const int daysInIteration = 10; var points = data.DataPoints.Select(x => x.RemainingPoints).ToList(); var everhour = data.DataPoints.Select(x => x.RemainingEverhourPoints).ToList(); var chartModel = new BurndownChartModel { IdealBurndown = MakeIdealBurndown(data.InitialPoints, daysInIteration), IdealEverhourBurndown = MakeIdealBurndown(data.InitialEverhourPoints ?? data.InitialPoints, daysInIteration), PointBurndown = MakePointBurndown(points), EverhourBurndown = MakePointBurndown(everhour) }; return(chartModel); }
private void TrainButton_Click(object sender, RoutedEventArgs e) { IterationModel iterationModel = null; try { iterationModel = MainWindow.currentWindow.TrainProject(projectId); } catch (HttpOperationException ex) { string result = ex.Response.Content; TrainResponseModel response = Utils.Utils.JsonDeserialize <TrainResponseModel>(result); if (response != null && !String.IsNullOrEmpty(response.Code)) { switch (response.Code) { case "BadRequestTrainingNotNeeded": MessageBox.Show("Nothing has changed since previous training."); return; case "BadRequestTrainingValidationFailed": MessageBox.Show("Your project can't be trained just yet. Make sure you have at least 2 tags with at least 5 images in each."); return; default: MessageBox.Show(response.Code); return; } } else { RateLimitResponseModel limitResponse = Utils.Utils.JsonDeserialize <RateLimitResponseModel>(result); if (limitResponse != null) { MessageBox.Show(limitResponse.message); return; } } MessageBox.Show("an error occurs"); return; } IterationPage iterationPage = new IterationPage(projectId, projectName, iterationModel); this.NavigationService.Navigate(iterationPage); }
/// <summary> /// Carga las tareas de una iteración /// </summary> private void LoadTasksByIteration(TreeNode trnParent, int intID) { NodeModel objNode = Project.Iterations.SearchRecursive(intID); // Si hay alguna iteración if (objNode != null) { IterationModel objIteration = objNode.Tag as IterationModel; if (objIteration != null) { foreach (TaskModel objTask in objIteration.Tasks) { trvProject.AddNode(trnParent, GetNodeKey(NodeType.Task, objTask.ID), objTask.Name, true, 0, System.Drawing.Color.Black, false); } } } }
private void UpdateIterationState(object obj) { IterationModel iterationModel = (IterationModel)obj; if (iterationModel != null) { while (iterationModel.Status == "Training") { Thread.Sleep(1000); iterationModel = MainWindow.currentWindow.GetIteration(projectId, iterationModel.Id); } iterationModel.IsDefault = true; MainWindow.currentWindow.UpdateIteration(projectId, iterationModel.Id, iterationModel); this.Dispatcher.Invoke(new Action(() => { List <IterationModel> iterationModels = MainWindow.currentWindow.GetIterations(projectId).Where(Entity => Entity.Status != "New").ToList(); IterationListView.ItemsSource = iterationModels; })); } }
/// <summary> /// Obtiene los proyectos /// </summary> private ProjectModelCollection GetProjects() { ProjectModelCollection objColProjects = new ProjectModelCollection(); WorkItemStore objWorkItemStore = tfsTeamProject.GetService <WorkItemStore>(); // Carga los proyectos foreach (Project objTfsProject in objWorkItemStore.Projects) { ProjectModel objNewProject = new ProjectModel(); // Asigna los datos objNewProject.ID = objTfsProject.Id; objNewProject.Name = objTfsProject.Name; // Asigna los objetos lazy objNewProject.Categories.AddRange(LoadCategories(objTfsProject)); // Añade los tipos de tareas for (int intIndex = 0; intIndex < objTfsProject.WorkItemTypes.Count; intIndex++) { objNewProject.TaskTypes.Add(objTfsProject.WorkItemTypes[intIndex].Name); } // Recorre las iteraciones for (int intIndex = 0; intIndex < objTfsProject.IterationRootNodes.Count; intIndex++) { NodeModel objNode; IterationModel objIteration = new IterationModel(); // Añade el nodo objNode = objNewProject.Iterations.Add(null, objTfsProject.IterationRootNodes[intIndex].Id, objTfsProject.IterationRootNodes[intIndex].Name, objIteration); // Asigna los objetos Lazy objIteration.LazyTasks.LazyData = new Lazy <TaskModelCollection>(() => LoadTasksIteration(objNewProject, objNode.ID ?? 0)); } // Añade los objetos Lazy objNewProject.LazyTasks.LazyData = new Lazy <TaskModelCollection>(() => LoadTasks(objNewProject)); // Y lo añade a la colección objColProjects.Add(objNewProject); } // Devuelve los proyectos return(objColProjects); }
private async Task TrainProjectsAsync() { this.progressControl.IsActive = true; bool trainingSucceeded = true; try { IterationModel iterationModel = await trainingApi.TrainProjectAsync(this.CurrentProject.Id); while (true) { iterationModel = await trainingApi.GetIterationAsync(this.CurrentProject.Id, iterationModel.Id); if (iterationModel.Status != "Training") { if (iterationModel.Status == "Failed") { trainingSucceeded = false; } break; } await Task.Delay(500); } this.needsTraining = false; } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Failure requesting training"); } this.progressControl.IsActive = false; if (!trainingSucceeded) { await new MessageDialog("Training failed.").ShowAsync(); } }
public IterationModel GetIteration(Guid collectionId, int projectId, string iterationPath) { using (var server = GetServer()) { TfsTeamProjectCollection collection = server.GetTeamProjectCollection(collectionId); WorkItemStore wiStore = collection.GetService <WorkItemStore>(); string query = @"SELECT * " + "FROM WorkItems " + "WHERE ([System.WorkItemType] = 'Bug' OR [System.WorkItemType] = 'Product Backlog Item' OR [System.WorkItemType] = 'Task') " + "AND [System.IterationPath] = '@IterationPath' " + "ORDER BY [System.WorkItemType]"; query = query.Replace("@IterationPath", iterationPath); var items = wiStore.Query(query); ICommonStructureService4 css = collection.GetService <ICommonStructureService4>(); var path = GetFullIterationPath(iterationPath); NodeInfo pathRoot = css.GetNodeFromPath(path); IterationModel model = Map(pathRoot); model.WorkItems = Map(items); return(model); } }
private static ScenarioBenchmark AddEtwData( ScenarioBenchmark scenarioBenchmark, ScenarioExecutionResult scenarioExecutionResult, IReadOnlyCollection <string> processesOfInterest, IReadOnlyCollection <string> modulesOfInterest) { var metricModels = scenarioExecutionResult.PerformanceMonitorCounters .Select(pmc => new MetricModel { DisplayName = pmc.DisplayName, Name = pmc.Name, Unit = pmc.Unit, }); // Get the list of processes of interest. Console.WriteLine($"Parsing: {scenarioExecutionResult.EventLogFileName}"); var processes = new SimpleTraceEventParser().GetProfileData(scenarioExecutionResult); // Extract the Pmc data for each one of the processes. foreach (var process in processes) { if (!processesOfInterest.Any(p => p.Equals(process.Name, StringComparison.OrdinalIgnoreCase))) { continue; } var processTest = scenarioBenchmark.Tests .SingleOrDefault(t => t.Name == process.Name && t.Namespace == ""); if (processTest == null) { processTest = new ScenarioTestModel(process.Name) { Namespace = "", }; scenarioBenchmark.Tests.Add(processTest); // Add metrics definitions. processTest.Performance.Metrics.Add(ElapsedTimeMilliseconds); processTest.Performance.Metrics.AddRange(metricModels); } var processIterationModel = new IterationModel { Iteration = new Dictionary <string, double>() }; processTest.Performance.IterationModels.Add(processIterationModel); processIterationModel.Iteration.Add( ElapsedTimeMilliseconds.Name, process.LifeSpan.Duration.TotalMilliseconds); // Add process metrics values. foreach (var pmcData in process.PerformanceMonitorCounterData) { processIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value); } foreach (var module in process.Modules) { var moduleName = Path.GetFileName(module.FullName); if (modulesOfInterest.Any(m => m.Equals(moduleName, StringComparison.OrdinalIgnoreCase))) { var moduleTestName = $"{moduleName}"; var moduleTest = scenarioBenchmark.Tests .SingleOrDefault(t => t.Name == moduleTestName && t.Namespace == process.Name); if (moduleTest == null) { moduleTest = new ScenarioTestModel(moduleTestName) { Namespace = process.Name, Separator = "!", }; scenarioBenchmark.Tests.Add(moduleTest); // Add metrics definitions. moduleTest.Performance.Metrics.AddRange(metricModels); } var moduleIterationModel = new IterationModel { Iteration = new Dictionary <string, double>() }; moduleTest.Performance.IterationModels.Add(moduleIterationModel); // 5. Add module metrics values. foreach (var pmcData in module.PerformanceMonitorCounterData) { moduleIterationModel.Iteration.Add(pmcData.Key.Name, pmcData.Value); } } } } return(scenarioBenchmark); }
private static void TestDir(XunitPerformanceHarness harness) { string commandName = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dir" : "ls"; var testModel = new ScenarioTestModel(commandName); testModel.Performance.Metrics.Add(new MetricModel { Name = "ExecutionTime", DisplayName = "Execution Time", Unit = "ms" }); void PreIteration(ScenarioTest scenarioTest) { } void PostIteration(ScenarioExecutionResult scenarioExecutionResult) { var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime; var iteration = new IterationModel { Iteration = new Dictionary <string, double>() }; iteration.Iteration.Add(testModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds); testModel.Performance.IterationModels.Add(iteration); } void PostRun(ScenarioBenchmark scenario) { } ProcessStartInfo processToMeasure; if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { processToMeasure = new ProcessStartInfo("cmd.exe", $"/c {commandName}"); } else { processToMeasure = new ProcessStartInfo(commandName); } processToMeasure.RedirectStandardError = true; processToMeasure.RedirectStandardOutput = true; var scenarioTestConfiguration = new ScenarioTestConfiguration(Timeout, processToMeasure) { Iterations = Iterations, PreIterationDelegate = PreIteration, PostIterationDelegate = PostIteration, Scenario = new ScenarioBenchmark("ExecuteCommand") }; scenarioTestConfiguration.Scenario.Tests.Add(testModel); scenarioTestConfiguration.TestName = commandName; harness.RunScenario(scenarioTestConfiguration, PostRun); }
static void Main(string[] args) { // You can either add your training key here, pass it on the command line, or type it in when the program runs string trainingKey = GetTrainingKey(trainingKeyString, args); // Create the Api, passing in a credentials object that contains the training key TrainingApiCredentials trainingCredentials = new TrainingApiCredentials(trainingKey); TrainingApi trainingApi = new TrainingApi(trainingCredentials); // Create a new project Console.WriteLine("Creating new project:"); var project = trainingApi.CreateProject(projectName); // Create some tags, you need at least two var tag1 = trainingApi.CreateTag(project.Id, "tag1"); var tag2 = trainingApi.CreateTag(project.Id, "tag2"); // Add some images to the tags Console.Write("\n\tProcessing images"); // Upload using the path to the images, a reference to the training API, a ference to your project and a tag UploadImages(@"..\..\..\Images\1", trainingApi, project, new List <string>() { tag1.Id.ToString() }); UploadImages(@"..\..\..\Images\1", trainingApi, project, new List <string>() { tag2.Id.ToString() }); // Or uploaded in a single batch //trainingApi.CreateImagesFromData(project.Id, japaneseCherryImages, new List<Guid>() { japaneseCherryTag.Id }); // Now there are images with tags start training the project Console.WriteLine("\tStarting training"); IterationModel iteration = null; try { iteration = trainingApi.TrainProject(project.Id); } catch (Exception e) { Console.WriteLine($"Trainig could not be completed. Error: {e.Message}"); } if (iteration != null) { // The returned iteration will be in progress, and can be queried periodically to see when it has completed while (iteration.Status == "Training") { Thread.Sleep(1000); // Re-query the iteration to get it's updated status iteration = trainingApi.GetIteration(project.Id, iteration.Id); } Console.WriteLine($"\tFinished training iteration {iteration.Id}"); // The iteration is now trained. Make it the default project endpoint iteration.IsDefault = true; trainingApi.UpdateIteration(project.Id, iteration.Id, iteration); Console.WriteLine("Done!\n"); // Now there is a trained endpoint, it can be used to make a prediction // Get the prediction key, which is used in place of the training key when making predictions //var account = trainingApi.GetAccountInfo(); //var predictionKey = account.Keys.PredictionKeys.PrimaryKey; //// Create a prediction endpoint, passing in a prediction credentials object that contains the obtained prediction key //PredictionEndpointCredentials predictionEndpointCredentials = new PredictionEndpointCredentials(predictionKey); //PredictionEndpoint endpoint = new PredictionEndpoint(predictionEndpointCredentials); //// Make a prediction against the new project //Console.WriteLine("Making a prediction:"); //var result = endpoint.PredictImage(project.Id, testImage); //// Loop over each prediction and write out the results //foreach (var c in result.Predictions) //{ // Console.WriteLine($"\t{c.Tag}: {c.Probability:P1}"); //} } Console.ReadKey(); }
private async void TriggerActiveLearningButtonClicked(object sender, RoutedEventArgs e) { this.activeLearningFlyout.Hide(); var currentProject = ((ProjectViewModel)this.projectsComboBox.SelectedValue).Model; try { var tags = this.PredictionDataForRetraining.Where(d => d.HasTag).Select(d => d.TagId).ToList(); if (tags.Any()) { var test = await this.userProvidedTrainingApi.CreateImagesFromPredictionsAsync(currentProject.Id, new ImageIdCreateBatch { TagIds = tags, Ids = new List <Guid>(new Guid[] { this.PredictionDataForRetraining.First().PredictionResultId }) }); } else { await new MessageDialog("You need to select at least one Tag in order to save and re-train.").ShowAsync(); return; } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Failure adding image to the training set"); return; } this.progressRing.IsActive = true; bool trainingSucceeded = true; try { IterationModel iterationModel = await userProvidedTrainingApi.TrainProjectAsync(currentProject.Id); while (true) { iterationModel = await userProvidedTrainingApi.GetIterationAsync(currentProject.Id, iterationModel.Id); if (iterationModel.Status != "Training") { if (iterationModel.Status == "Failed") { trainingSucceeded = false; } break; } await Task.Delay(500); } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "The image was added to the training set, but re-training failed. You can try re-training later via the Custom Vision Setup page."); } if (!trainingSucceeded) { await new MessageDialog("The image was added to the training set, but re-training failed. You can try re-training later via the Custom Vision Setup page.").ShowAsync(); } this.progressRing.IsActive = false; }
public void Run([CallerMemberName] string callerName = null) { // Handle case where we're running inside VS (or via dotnet test), so the Main method hasn't run to initialize the perf harness // In the future we may want to do this via an xUnit fixture, which would also let us call the dispose method to write the results // afterwards if (_performanceHarness == null) { Program.HandlePerfArgs(new List <string>() { "--iterations", "1" }); } TestName = TestName ?? callerName; int currentIteration = 0; var durationTestModel = new ScenarioTestModel(TestName); durationTestModel.Performance.Metrics.Add(new MetricModel { Name = "ExecutionTime", DisplayName = "Execution Time", Unit = "ms" }); string testIdentifier = _performanceHarness.Configuration.RunId + "-" + ScenarioName + " - " + TestName; string testResultsFolder = Path.Combine(_performanceHarness.OutputDirectory, testIdentifier + "-traces"); if (!Directory.Exists(testResultsFolder)) { Directory.CreateDirectory(testResultsFolder); } using (FolderSnapshot snapshot = FolderSnapshot.Create(TestFolder)) { void PreIteration(ScenarioTest scenarioTest) { if (currentIteration > 0) { snapshot.Restore(); } // TODO: Optionally kill processes such as MSBuild.exe and VBCSCompiler.exe // We should always do this before the first iteration, but it should be configurable whether we // do it between iterations. This is because when testing "warm" / incremental builds, we would // expect the persistent processes to already be running and have already built the project } void PostIteration(ScenarioExecutionResult scenarioExecutionResult) { var elapsed = scenarioExecutionResult.ProcessExitInfo.ExitTime - scenarioExecutionResult.ProcessExitInfo.StartTime; var durationIteration = new IterationModel { Iteration = new Dictionary <string, double>() }; durationIteration.Iteration.Add(durationTestModel.Performance.Metrics[0].Name, elapsed.TotalMilliseconds); durationTestModel.Performance.IterationModels.Add(durationIteration); if (GetPerformanceSummary) { string performanceSummaryFileDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).txt"); File.Move(Path.Combine(TestFolder, "PerformanceSummary.txt"), performanceSummaryFileDestination); } if (GetBinLog) { string binlogDestination = Path.Combine(testResultsFolder, $"{testIdentifier}({currentIteration}).binlog"); File.Move(Path.Combine(TestFolder, "msbuild.binlog"), binlogDestination); } currentIteration++; } void PostRun(ScenarioBenchmark scenario) { } if (GetPerformanceSummary) { ProcessToMeasure.Arguments += " /flp9:PerformanceSummary;v=q;logfile=\"" + Path.Combine(TestFolder, "PerformanceSummary.txt") + "\""; } if (GetBinLog) { ProcessToMeasure.Arguments += " /bl:\"" + Path.Combine(TestFolder, "msbuild.binlog") + "\""; } var scenarioTestConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMilliseconds(Timeout.TotalMilliseconds), ProcessToMeasure); scenarioTestConfiguration.Iterations = NumberOfIterations; scenarioTestConfiguration.PreIterationDelegate = PreIteration; scenarioTestConfiguration.PostIterationDelegate = PostIteration; scenarioTestConfiguration.SaveResults = false; scenarioTestConfiguration.Scenario = GetScenarioBenchmark(ScenarioName ?? TestName); scenarioTestConfiguration.Scenario.Tests.Add(durationTestModel); scenarioTestConfiguration.TestName = TestName; _performanceHarness.RunScenario(scenarioTestConfiguration, PostRun); } }
/// <summary> /// 更新Iteration信息 /// </summary> /// <param name="projectId"></param> /// <param name="iterationId"></param> /// <param name="iteration"></param> public void UpdateIteration(Guid projectId, Guid iterationId, IterationModel iteration) { trainingApi.UpdateIteration(projectId, iterationId, iteration); }