/// <summary> /// Initializes a new instance of the <see cref="ExperimentPage"/> class. /// </summary> /// <param name="name">The name.</param> public ExperimentPage(string name) { InitializeComponent(); Facilitator.UI = this; _experiment = Experiment.Load(name); Facilitator.RunExperiment(_experiment); }
public static void Test() { List<Experiment> list = new List<Experiment>(); for (int i = 0; i < 100; i++) { var exp = new Experiment(); exp.ExpNo = Guid.NewGuid().ToString(); exp.LayerNo = i.ToString(); if (i < 30) { exp.AppVersion = "7.2.3"; } else if (i < 60) { exp.AppVersion = "7.2.4"; } else { exp.AppVersion = "7.3"; } list.Add(exp); } Dictionary<string, List<Experiment>> result = new Dictionary<string, List<Experiment>>(); var groups = list.GroupBy(g => g.AppVersion); foreach (var item in groups) { result[item.Key] = item.ToList(); } Console.WriteLine("a"); }
private static void NT_Experiment() { Experiment e = new Experiment(); for (;;) { switch (Syn.Interpret()) { case 0: return; case 1: Lex.GETidentifierAttr(out name); break; case 2: // SEM e.Name = name; experiments.Add(e); break; case 3: NT_Ingredient(ref e); break; case 4: NT_Ingredient(ref e); break; } // switch } // for }
protected void btnCreate_Click(object sender, EventArgs e) { Experiment exp = new Experiment() { Operator = OperatorBLL.GetByID(Convert.ToInt32(ddlOperators.SelectedValue)), SeedLotNumber = tbSeedLotNumber.Text, Explant = tbExplant.Text, OpticalDensity = decimal.Parse(tbOpticalDensity.Text), Construct = ConstructBLL.GetByID(ConstructID) }; ExperimentBLL.Insert(exp); lvExperiments.DataSource = ConstructBLL.GetByID(ConstructID).Experiments; lvExperiments.DataBind(); // clear out the controls ddlOperators.SelectedIndex = -1; tbSeedLotNumber.Text = string.Empty; tbExplant.Text = string.Empty; tbOpticalDensity.Text = string.Empty; // update the status in case something changes litStatus.Text = ConstructBLL.GetByID(ConstructID).Status.Name; }
public void Awake(){ if (_instance != null) { Debug.Log ("Instance already exists!"); return; } _instance = this; }
public void WhenRunTrialsIsCalled() { _trialRunner = new Mock<ITrialRunner>(); var experiment = new Experiment(_trialRunner.Object); experiment.RunTrials(_numberOfTrials); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="experimentId">Id of the experiment to be promoted.</param> public void Run(AdWordsUser user, long experimentId) { // Get the ExperimentService. ExperimentService experimentService = (ExperimentService) user.GetService(AdWordsService.v201601.ExperimentService); // Set experiment's status to PROMOTED. Experiment experiment = new Experiment(); experiment.id = experimentId; experiment.status = ExperimentStatus.PROMOTED; // Create the operation. ExperimentOperation operation = new ExperimentOperation(); operation.@operator = Operator.SET; operation.operand = experiment; try { // Update the experiment. ExperimentReturnValue retVal = experimentService.mutate( new ExperimentOperation[] {operation}); // Display the results. if (retVal != null && retVal.value != null && retVal.value.Length > 0) { Experiment promotedExperiment = retVal.value[0]; Console.WriteLine("Experiment with name = \"{0}\" and id = \"{1}\" was promoted.\n", promotedExperiment.name, promotedExperiment.id); } else { Console.WriteLine("No experiments were promoted."); } } catch (Exception e) { throw new System.ApplicationException("Failed to promote experiment.", e); } }
public void Saves_experiment() { Assert.DoesNotThrow(() => { var experiment = new Experiment("This is a test", "This is a test description"); var repository = new SqlExperimentRepository(() => UnitOfWork.Current); repository.Save(experiment); }); }
// Use this for initialization void Start () { exp = GameObject.FindGameObjectWithTag ("Experiment").GetComponent<Experiment> (); objsInSceneDict = new Dictionary<String, GameObject> (); if (exp.isReplay) { ReplayScene(); } }
public TwoD_PoissonSolver_Scaled(Experiment exp, bool using_external_code, Dictionary<string, object> input) : base(using_external_code, input) { this.exp = exp; // check if the top layer is air... if so, we need to use natural boundary conditions on the upper surface (which must be zero) natural_topbc = (exp.Layers[exp.Layers.Length - 1].Material == Material.Air); // calculate scaling factor w = a * z such that the z dimension has the same length as the y dimension z_scaling = (exp.Ny_Pot * exp.Dy_Pot) / (exp.Nz_Pot * exp.Dz_Pot); }
public void StartExperiment(string name) { allExperients = new List<Experiment>(); curExperiment ++; print("CreateExperiment: " + curExperiment.ToString()); Experiment temp = new Experiment(); temp.session = this; temp.name = name; allExperients.Add(temp); }
/// <summary> /// Creates a new experiment ready for Start Task; /// </summary> /// <param name="experimentName"></param> public void StartExperiment(string experimentName) { //Creates curExperiment ++; print("CreateExperiment: " + curExperiment.ToString()); Experiment temp = new Experiment(); temp.session = this; temp.name = experimentName; temp.Awake(); allExperients.Add(temp); }
private static void CompareExperiments(Experiment templateExpt, Experiment myExpt, bool isQstar) { var tofProperties_template = (ITOFProperties)templateExpt; var tofProperties_mine = (ITOFProperties)myExpt; Assert.AreEqual(tofProperties_template.TOFMassMin, tofProperties_mine.TOFMassMin); Assert.AreEqual(tofProperties_template.TOFMassMax, tofProperties_mine.TOFMassMax); // It looks like the accumulation time for an experiment is not updated if the value provided // to the setter method is within a certain delta. So the accumulation time in the template // experiment may not be identical to the one that we generate. Assert.AreEqual(tofProperties_template.AccumTime, tofProperties_mine.AccumTime, 0.1); var srcParamsTbl_template = (ParamDataColl)templateExpt.SourceParamsTbl; var srcParamsTbl_mine = (ParamDataColl)myExpt.SourceParamsTbl; CompareParam("GS1", srcParamsTbl_template, srcParamsTbl_mine); CompareParam("GS2", srcParamsTbl_template, srcParamsTbl_mine); CompareParam("CUR", srcParamsTbl_template, srcParamsTbl_mine); CompareParam("TEM", srcParamsTbl_template, srcParamsTbl_mine); Assert.AreEqual(templateExpt.MassRangesCount, myExpt.MassRangesCount); for (int i = 0; i < templateExpt.MassRangesCount; i++) { var compoundDepParams_template = (ParamDataColl) ((MassRange) (templateExpt.GetMassRange(i))).MassDepParamTbl; var compoundDepParams_mine = (ParamDataColl) ((MassRange) (myExpt.GetMassRange(i))).MassDepParamTbl; AssertParamNotZero("DP", compoundDepParams_mine); AssertParamNotZero("CE", compoundDepParams_mine); CompareParam("IRD", compoundDepParams_template, compoundDepParams_mine); CompareParam("IRW", compoundDepParams_template, compoundDepParams_mine); if (!isQstar) { CompareParam("CES", compoundDepParams_template, compoundDepParams_mine); } else { CompareParam("FP", compoundDepParams_template, compoundDepParams_mine); CompareParam("DP2", compoundDepParams_template, compoundDepParams_mine); CompareParam("CAD", compoundDepParams_template, compoundDepParams_mine); } } if (!isQstar) { Assert.AreEqual(((ITOFProperties2)templateExpt).HighSensitivity, ((ITOFProperties2)myExpt).HighSensitivity); } }
public void Upserts_experiment() { Assert.DoesNotThrow(() => { var experiment = new Experiment("This is a test", "This is a test description"); var repository = new SqlExperimentRepository(() => UnitOfWork.Current); repository.Save(experiment); experiment = new Experiment(1, "This is a test", 1, experiment.CreatedAt, DateTime.Now); repository.Save(experiment); var experiments = repository.GetAll(); Assert.AreEqual(1, experiments.Count()); }); }
public TwoD_SO_DFTSolver(Experiment exp, Carrier carrier_type) : base(exp, carrier_type) { tx = -0.5 * Physics_Base.hbar * Physics_Base.hbar / (mass * dx * dx); ty = -0.5 * Physics_Base.hbar * Physics_Base.hbar / (mass * dy * dy); double r_so = 117.1 * (0.01 / Physics_Base.q_e); // r^{6c6c}_{41} for InAs as reported by Winkler (Table 6.6, p.87) // NOTE: the result there is in e A^2... to convert to nm^2, we divide by shown factors alpha = r_so / Physics_Base.q_e; theta_x = r_so * mass * dx / (Physics_Base.q_e / Physics_Base.hbar); theta_y = -1.0 * r_so * mass * dy / (Physics_Base.q_e / Physics_Base.hbar); g_1D = 0.5 / Math.PI; if (carrier_type == Carrier.hole) throw new NotImplementedException(); }
public void AreEqual(Mock<ISciencePublisher> publisher, Mock<IExperimentSteps<string, string>> steps, Mock<IExperimentState> state, string name, string ctrlResult, string candResult, string candName, InvalidProgramException excp, string errMsg) { //Setup AreEqualDelegate<string> areEqual = (ctrl, cand) => { throw excp; }; bool excpPassed = false; Action<ErrorEventArgs> onError = (e) => { excpPassed = e.ExperimentError.LastException == excp && e.ExperimentError.LastStep == Operations.AreEqual; e.Publisher.Publish(errMsg, e.State); }; steps.DefaultValue = DefaultValue.Empty; steps.SetupAllProperties(); steps.SetupGet(x => x.AreEqual).Returns(areEqual); steps.Setup(x => x.OnError(It.IsAny<ErrorEventArgs>())) .Callback(onError); SetupControlAndCandidate(steps, ctrlResult, candResult, candName); state.SetupAllProperties(); SetupStateSnapshot(state); //Exercise var sut = new Experiment<string, string>(name, publisher.Object, state.Object, steps.Object, true); var result = sut.Run(); //no exception //Verify steps.Verify(x => x.AreEqual, Times.AtLeastOnce); steps.Verify(x => x.OnError(It.Is<ErrorEventArgs>( a => a.State.CurrentStep == Operations.OnError && a.ExperimentError.LastException == excp && a.ExperimentError.LastStep == Operations.AreEqual)), Times.AtLeastOnce); //Results correct publisher.Verify(x => x.Publish(It.Is<IExperimentResult<string>>( r => !r.Control.ExceptionThrown && r.Control.Value == ctrlResult && r.Candidates.All(kvp => kvp.Value.ExceptionThrown && kvp.Value.ExperimentError.LastException == excp && kvp.Value.ExperimentError.LastStep == Operations.AreEqual))), Times.Once); //Message published publisher.Verify(x => x.Publish(errMsg, It.Is<IExperimentState>(y => y.CurrentStep == Operations.OnError)), Times.AtLeastOnce); Assert.True(excpPassed); }
public override void Main() { var ga = new GeneticAlgorithm { MaximumGenerations = { Value = 50 }, PopulationSize = { Value = 10 }, Problem = new TravelingSalesmanProblem() }; var experiment = new Experiment(); for (int i = 0; i < 5; i++) { experiment.Optimizers.Add(new BatchRun() { Optimizer = (IOptimizer)ga.Clone(), Repetitions = 10 }); ga.PopulationSize.Value *= 2; } experiment.ExecutionStateChanged += OnExecutionStateChanged; experiment.Start(); mutex.WaitOne(); vars.experiment = experiment; MainFormManager.MainForm.ShowContent(experiment); var viewHost = (ViewHost)MainFormManager.MainForm.ShowContent(experiment.Runs, typeof(RunCollectionBubbleChartView)); var bubbleChart = (UserControl)(viewHost.ActiveView); bubbleChart.Controls.OfType<ComboBox>().Single(x => x.Name == "yAxisComboBox").SelectedItem = "BestQuality"; bubbleChart.Controls.OfType<ComboBox>().Single(x => x.Name == "xAxisComboBox").SelectedItem = "PopulationSize"; }
// Try the following? //private string ConvertToString(char[] buffer, int count) //{ // StringBuilder data = new StringBuilder(buffer.Length); // for (int i = 0; i < count; i++) // { // data.Append(buffer[i]); // } // return data.ToString(); //} //void replace_checksat(Experiment e, Job j) //{ // string fn = j.localFilename; // string tr = "(check-sat)"; // string rp = e.custom_check_sat; // int cl = tr.Length; // int ll = cl + rp.Length; // char[] buffer = new char[ll]; // int index = 0; // int rl = tr.Length; // StreamReader streamReader = new StreamReader(fn); // StreamWriter streamWriter = new StreamWriter(fn + ".tmp"); // while (true) // { // streamReader.DiscardBufferedData(); // streamReader.BaseStream.Seek(index, SeekOrigin.Begin); // int count = streamReader.ReadBlock(buffer, 0, ll); // if (count == 0) break; // string data = ConvertToString(buffer, count); // bool isEndReplaced = false; // if (count >= cl) // isEndReplaced = (data.LastIndexOf(tr, cl) > 0); // data = data.Replace(tr, rp); // if (isEndReplaced) // { // streamWriter.Write(data); // index += count; // } // else // { // if (count >= cl) // { // streamWriter.Write(data.Substring(0, data.Length - rl)); // index += cl; // } // else // { // streamWriter.Write(data); // index += cl; // } // } // } // streamReader.Close(); // streamWriter.Close(); // File.Delete(fn); // File.Move(fn + ".tmp", fn); //} void runJob(Experiment e, Job j) { retry_from_scratch: try { getBinary(e); Result r = new Result(); r.j = j; try { // Console.WriteLine("Running job #" + j.ID); File.Copy(j.filename, j.localFilename, true); if (e.custom_check_sat != null) replace_checksat(e, j); } catch (System.OutOfMemoryException) { r.exitCode = "MEMORY"; r.runtime = 0; results.Add(r); return; } int output_limit = 134217728; // 128 MB int error_limit = 262144; // 256 KB StreamWriter out_writer = new StreamWriter(r.stdout); StreamWriter err_writer = new StreamWriter(r.stderr); Process p = new Process(); p.StartInfo.FileName = e.localExecutable; p.StartInfo.WorkingDirectory = e.localDir; p.StartInfo.Arguments = j.localFilename + " " + e.Parameters; //p.StartInfo.Arguments = e.Parameters + " " + j.localFilename; //p.StartInfo.Arguments = e.Parameters; //p.StartInfo.Arguments = " " + j.filename; p.StartInfo.CreateNoWindow = true; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.RedirectStandardError = true; p.StartInfo.UseShellExecute = false; p.OutputDataReceived += (sender, args) => WriteToStream(sender, args, out_writer, ref output_limit); p.ErrorDataReceived += (sender, args) => WriteToStream(sender, args, err_writer, ref error_limit); bool exhausted_time = false, exhausted_memory = false; if (e.localExecutable.EndsWith(".cmd") || e.localExecutable.EndsWith(".bat")) { p.StartInfo.FileName = @"C:\Windows\System32\cmd.exe"; p.StartInfo.Arguments = "/c " + e.localExecutable + " " + p.StartInfo.Arguments; } // For stdin-only programs like mathsat: //p.StartInfo.RedirectStandardInput = true; //p.StartInfo.Arguments = e.Parameters; //StreamReader fin = new StreamReader(j.localFilename); retry: try { p.Start(); p.BeginOutputReadLine(); p.BeginErrorReadLine(); // For stdin-only programs like mathsat: //while (!fin.EndOfStream) // p.StandardInput.WriteLine(fin.ReadLine()); //fin.Close(); //p.StandardInput.Close(); } catch (System.ComponentModel.Win32Exception ex) { if (ex.Message == "The process cannot access the file because it is being used by another process") { Console.WriteLine("Retrying to execute binary..."); Thread.Sleep(500); goto retry; } else throw ex; } try { do { p.Refresh(); if (!p.HasExited) { if (processTime(p).TotalSeconds >= e.timeout.TotalSeconds) { Console.WriteLine("Job timed out; killing."); exhausted_time = true; processKill(p); } else if (e.memout != 0 && processMemory(p) > e.memout) { Console.WriteLine("Job uses too much memory; killing."); exhausted_memory = true; processKill(p); } else if (output_limit <= 0 || error_limit <= 0) { Console.WriteLine("Job produced too much output; killing."); processKill(p); throw new Exception("Job produced too much output."); } } } while (!p.WaitForExit(500)); } catch (InvalidOperationException ex) { Console.WriteLine("Invalid Operation: " + ex.Message); Console.WriteLine("Assuming process has ended."); } p.WaitForExit(); int excode = p.ExitCode; if (excode == -1073741515) logInfrastructureError(j, "Binary could not be executed."); double runtime = (exhausted_time ? e.timeout.TotalSeconds : processTime(p).TotalSeconds); p.Close(); Thread.Sleep(500); // Give the asynch stdout/stderr events a chance to finish. out_writer.Flush(); err_writer.Flush(); r.exitCode = ((exhausted_time) ? "TIME" : (exhausted_memory) ? "MEMORY" : excode.ToString()); r.runtime = runtime; results.Add(r); } catch (Exception ex) { if (logInfrastructureError(j, ex.Message + "\n" + ex.StackTrace)) goto retry_from_scratch; } }
void replace_checksat(Experiment e, Job j) { string tmpf = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); FileStream f = new FileStream(j.localFilename, FileMode.Open, FileAccess.Read); FileStream ft = new FileStream(tmpf, FileMode.Create, FileAccess.Write); StreamReader fr = new StreamReader(f); StreamWriter ftw = new StreamWriter(ft); while (!fr.EndOfStream) { string s = fr.ReadLine(); ftw.WriteLine(s.Replace("(check-sat)", e.custom_check_sat)); } ftw.Close(); ft.Close(); fr.Close(); f.Close(); File.Copy(tmpf, j.localFilename, true); try { File.Delete(tmpf); } catch { } }
public FriedmanTables(Experiment experiment, string basePath, IDictionary <string, IDictionary <string, IDictionary <string, List <double> > > > indicators) { this.basePath = basePath; this.indicators = indicators; this.experiment = experiment; }
internal static IEnumerable <Plot> FindPlots(this IRemsDbContext context, object content, Experiment experiment) { // Find all the plots in the experiment var plots = context.Plots.Where(p => p.Treatment.Experiment == experiment); var text = content.ToString().ToLower(); if (text == "all" || text == "avg") { return(plots); } var ids = text.Split(',').Select(i => Convert.ToInt32(i)); return(plots.Where(p => ids.Contains(p.Column.GetValueOrDefault()))); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="campaignId">Id of the campaign to which experiments are /// added.</param> /// <param name="adGroupId">Id of the ad group to which experiments are /// added.</param> /// <param name="criterionId">Id of the criterion for which experiments /// are added.</param> public void Run(AdWordsUser user, long campaignId, long adGroupId, long criterionId) { // Get the ExperimentService. ExperimentService experimentService = (ExperimentService)user.GetService(AdWordsService.v201601.ExperimentService); // Get the AdGroupService. AdGroupService adGroupService = (AdGroupService)user.GetService(AdWordsService.v201601.AdGroupService); // Get the AdGroupCriterionService. AdGroupCriterionService adGroupCriterionService = (AdGroupCriterionService)user.GetService(AdWordsService.v201601.AdGroupCriterionService); // Create the experiment. Experiment experiment = new Experiment(); experiment.campaignId = campaignId; experiment.name = "Interplanetary Cruise #" + ExampleUtilities.GetRandomString(); experiment.queryPercentage = 10; experiment.startDateTime = DateTime.Now.AddDays(1).ToString("yyyyMMdd HHmmss"); // Optional: Set the end date. experiment.endDateTime = DateTime.Now.AddDays(30).ToString("yyyyMMdd HHmmss"); // Optional: Set the status. experiment.status = ExperimentStatus.ENABLED; // Create the operation. ExperimentOperation experimentOperation = new ExperimentOperation(); experimentOperation.@operator = Operator.ADD; experimentOperation.operand = experiment; try { // Add the experiment. ExperimentReturnValue experimentRetVal = experimentService.mutate( new ExperimentOperation[] { experimentOperation }); // Display the results. if (experimentRetVal != null && experimentRetVal.value != null && experimentRetVal.value. Length > 0) { long experimentId = 0; Experiment newExperiment = experimentRetVal.value[0]; Console.WriteLine("Experiment with name = \"{0}\" and id = \"{1}\" was added.\n", newExperiment.name, newExperiment.id); experimentId = newExperiment.id; // Set ad group for the experiment. AdGroup adGroup = new AdGroup(); adGroup.id = adGroupId; // Create experiment bid multiplier rule that will modify ad group bid // for the experiment. ManualCPCAdGroupExperimentBidMultipliers adGroupBidMultiplier = new ManualCPCAdGroupExperimentBidMultipliers(); adGroupBidMultiplier.maxCpcMultiplier = new BidMultiplier(); adGroupBidMultiplier.maxCpcMultiplier.multiplier = 1.5; // Set experiment data to the ad group. AdGroupExperimentData adGroupExperimentData = new AdGroupExperimentData(); adGroupExperimentData.experimentId = experimentId; adGroupExperimentData.experimentDeltaStatus = ExperimentDeltaStatus.MODIFIED; adGroupExperimentData.experimentBidMultipliers = adGroupBidMultiplier; adGroup.experimentData = adGroupExperimentData; // Create the operation. AdGroupOperation adGroupOperation = new AdGroupOperation(); adGroupOperation.operand = adGroup; adGroupOperation.@operator = Operator.SET; // Update the ad group. AdGroupReturnValue adGroupRetVal = adGroupService.mutate(new AdGroupOperation[] { adGroupOperation }); // Display the results. if (adGroupRetVal != null && adGroupRetVal.value != null && adGroupRetVal.value.Length > 0) { AdGroup updatedAdGroup = adGroupRetVal.value[0]; Console.WriteLine("Ad group with name = \"{0}\", id = \"{1}\" and status = \"{2}\" " + "was updated for the experiment.\n", updatedAdGroup.name, updatedAdGroup.id, updatedAdGroup.status); } else { Console.WriteLine("No ad groups were updated."); } // Set ad group criteria for the experiment. Criterion criterion = new Criterion(); criterion.id = criterionId; BiddableAdGroupCriterion adGroupCriterion = new BiddableAdGroupCriterion(); adGroupCriterion.adGroupId = adGroupId; adGroupCriterion.criterion = criterion; // Create experiment bid multiplier rule that will modify criterion bid // for the experiment. ManualCPCAdGroupCriterionExperimentBidMultiplier bidMultiplier = new ManualCPCAdGroupCriterionExperimentBidMultiplier(); bidMultiplier.maxCpcMultiplier = new BidMultiplier(); bidMultiplier.maxCpcMultiplier.multiplier = 1.5; // Set experiment data to the criterion. BiddableAdGroupCriterionExperimentData adGroupCriterionExperimentData = new BiddableAdGroupCriterionExperimentData(); adGroupCriterionExperimentData.experimentId = experimentId; adGroupCriterionExperimentData.experimentDeltaStatus = ExperimentDeltaStatus.MODIFIED; adGroupCriterionExperimentData.experimentBidMultiplier = bidMultiplier; adGroupCriterion.experimentData = adGroupCriterionExperimentData; // Create the operation. AdGroupCriterionOperation adGroupCriterionOperation = new AdGroupCriterionOperation(); adGroupCriterionOperation.operand = adGroupCriterion; adGroupCriterionOperation.@operator = Operator.SET; // Update the ad group criteria. AdGroupCriterionReturnValue adGroupCriterionRetVal = adGroupCriterionService.mutate( new AdGroupCriterionOperation[] { adGroupCriterionOperation }); // Display the results. if (adGroupCriterionRetVal != null && adGroupCriterionRetVal.value != null && adGroupCriterionRetVal.value.Length > 0) { AdGroupCriterion updatedAdGroupCriterion = adGroupCriterionRetVal.value[0]; Console.WriteLine("Ad group criterion with ad group id = \"{0}\", criterion id = " + "\"{1}\" and type = \"{2}\" was updated for the experiment.\n", updatedAdGroupCriterion.adGroupId, updatedAdGroupCriterion.criterion.id, updatedAdGroupCriterion.criterion.CriterionType); } else { Console.WriteLine("No ad group criteria were updated."); } } else { Console.WriteLine("No experiments were added."); } } catch (Exception e) { throw new System.ApplicationException("Failed to add experiment.", e); } }
public IActionResult Create([FromBody] ExperimentCreateDto requestData) { /* Performance statistics */ Stopwatch sw = new Stopwatch(); sw.Start(); if (!ModelState.IsValid) { return(BadRequest(ModelState)); } if (requestData.Parameters.Select(p => p.Name).Distinct().Count() != requestData.Parameters.Count) { return(BadRequest("Parameter names are not unique.")); } if (!requestData.Parameters.All(p => p.Values.All(v => ParameterValidator.IsValid(p.Type, v)))) { return(BadRequest("Parameter values are not valid.")); } string errMsg = ""; string msg = ""; if (!ExecuteLocalMACIScript(requestData.FileName, ref msg, ref errMsg)) { return(new ObjectResult(new { Failed = true, Message = msg, ErrorMessage = errMsg })); } // Create the experiment. var experiment = new Experiment { Created = DateTime.UtcNow, Script = requestData.Script, ScriptInstall = requestData.ScriptInstall, Parameters = new List <Parameter>(), RequiredCapabilities = requestData.RequiredCapabilities, Language = requestData.Language, PermutationFilter = requestData.PermutationFilter, Repetitions = requestData.Repetitions, RunName = requestData.RunName, FileName = requestData.FileName, Timeout = 60 // in minutes }; // Create parameters and parameter values. foreach (var parameterDto in requestData.Parameters) { var experimentParam = new Parameter { Name = parameterDto.Name, Type = parameterDto.Type, Purpose = parameterDto.Purpose, Unit = parameterDto.Unit, Experiment = experiment, Values = new List <ParameterValue>() }; experiment.Parameters.Add(experimentParam); foreach (var value in parameterDto.Values) { experimentParam.Values.Add(new ParameterValue { Parameter = experimentParam, Value = value }); } } AddSeedParameter(experiment, requestData.Seeds); // Create one ExperimentInstance for each unique combination of parameters. var instances = FindAllInstancePermutations(experiment.Parameters); TimeSpan ts = sw.Elapsed; Console.WriteLine("Generated " + instances.Count() + " experiment instances in " + String.Format("{0:00}:{1:00}:{2:00}", ts.Hours, ts.Minutes, ts.Seconds)); try { instances = FilterPermutations(instances, experiment.PermutationFilter); } catch (Exception e) { return(BadRequest(e.Message)); } ts = sw.Elapsed; Console.WriteLine("Filtered to " + instances.Count() + " experiment instances in " + String.Format("{0:00}:{1:00}:{2:00}", ts.Hours, ts.Minutes, ts.Seconds)); instances = DuplicateExperimentInstances(instances, requestData.Repetitions); if (!instances.Any()) { return(BadRequest("No experiment instances generated after filtering and duplicating.")); } if (requestData.TestRun) { instances = instances.Take(1).ToList(); instances.ForEach(i => i.Priority = 100); experiment.RunName = "TEST: " + experiment.RunName; } experiment.ExperimentInstances = instances; // Add everything to the database. _context.Add(experiment); _context.SaveChanges(); /* Event logging */ TimeSpan totalTime = sw.Elapsed; _context.Add(new GlobalEventLogMessage { Message = String.Format("Created Experiment {0:0} with {1:0} instances in {2:00}:{3:00}:{4:00}", experiment.Id, experiment.ExperimentInstances.Count(), totalTime.Hours, totalTime.Minutes, totalTime.Seconds), ExperimentId = experiment.Id, Time = DateTime.Now, Type = GlobalEventLogMessageType.Info, ExperimentInstanceId = -1 }); _context.SaveChanges(); /* Create folder and copy relevant files (persistent snapshot) * after storing in database (storing generates the experimentId) */ CreatePersistentSnapshotFolder(experiment); _scalingService.Scale(experiment); return(new ObjectResult(new { ExperimentId = experiment.Id, ExperimentInstanceId = experiment.ExperimentInstances.First().Id })); }
public void PipelineSweeperNoTransforms() { // Set up inputs for experiment string pathData = GetDataPath("adult.train"); string pathDataTest = GetDataPath("adult.test"); const int numOfSampleRows = 1000; const string schema = "sep=, col=Features:R4:0,2,4,10-12 col=Label:R4:14 header=+"; var inputFileTrain = new SimpleFileHandle(Env, pathData, false, false); #pragma warning disable 0618 var datasetTrain = ImportTextData.ImportText(Env, new ImportTextData.Input { InputFile = inputFileTrain, CustomSchema = schema }).Data.Take(numOfSampleRows); var inputFileTest = new SimpleFileHandle(Env, pathDataTest, false, false); var datasetTest = ImportTextData.ImportText(Env, new ImportTextData.Input { InputFile = inputFileTest, CustomSchema = schema }).Data.Take(numOfSampleRows); #pragma warning restore 0618 const int batchSize = 5; const int numIterations = 20; const int numTransformLevels = 2; using (var env = new TlcEnvironment()) { SupportedMetric metric = PipelineSweeperSupportedMetrics.GetSupportedMetric(PipelineSweeperSupportedMetrics.Metrics.Auc); // Using the simple, uniform random sampling (with replacement) engine PipelineOptimizerBase autoMlEngine = new UniformRandomEngine(Env); // Create search object var amls = new AutoInference.AutoMlMlState(Env, metric, autoMlEngine, new IterationTerminator(numIterations), MacroUtils.TrainerKinds.SignatureBinaryClassifierTrainer, datasetTrain, datasetTest); // Infer search space amls.InferSearchSpace(numTransformLevels); // Create macro object var pipelineSweepInput = new Microsoft.ML.Models.PipelineSweeper() { BatchSize = batchSize, }; var exp = new Experiment(Env); var output = exp.Add(pipelineSweepInput); exp.Compile(); exp.SetInput(pipelineSweepInput.TrainingData, datasetTrain); exp.SetInput(pipelineSweepInput.TestingData, datasetTest); exp.SetInput(pipelineSweepInput.State, amls); exp.SetInput(pipelineSweepInput.CandidateOutputs, new IDataView[0]); exp.Run(); // Make sure you get back an AutoMlState, and that it ran for correct number of iterations // with at least minimal performance values (i.e., best should have AUC better than 0.1 on this dataset). AutoInference.AutoMlMlState amlsOut = (AutoInference.AutoMlMlState)exp.GetOutput(output.State); Assert.NotNull(amlsOut); Assert.Equal(amlsOut.GetAllEvaluatedPipelines().Length, numIterations); Assert.True(amlsOut.GetBestPipeline().PerformanceSummary.MetricValue > 0.8); } }
public static CommonOutputs.MacroOutput <Output> CrossValidateBinary( IHostEnvironment env, Arguments input, EntryPointNode node) { // This will be the final resulting list of nodes that is returned from the macro. var subGraphNodes = new List <EntryPointNode>(); // Split the input data into folds. var exp = new Experiment(env); var cvSplit = new Legacy.Models.CrossValidatorDatasetSplitter(); cvSplit.Data.VarName = node.GetInputVariable("Data").ToJson(); cvSplit.NumFolds = input.NumFolds; cvSplit.StratificationColumn = input.StratificationColumn; var cvSplitOutput = exp.Add(cvSplit); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes())); var predModelVars = new Var <IPredictorModel> [input.NumFolds]; var warningsVars = new Var <IDataView> [input.NumFolds]; var overallMetricsVars = new Var <IDataView> [input.NumFolds]; var instanceMetricsVars = new Var <IDataView> [input.NumFolds]; var confusionMatrixVars = new Var <IDataView> [input.NumFolds]; // Instantiate the subgraph for each fold. for (int k = 0; k < input.NumFolds; k++) { // Parse the nodes in input.Nodes into a temporary run context. var context = new RunContext(env); var graph = EntryPointNode.ValidateNodes(env, context, input.Nodes); // Rename all the variables such that they don't conflict with the ones in the outer run context. var mapping = new Dictionary <string, string>(); foreach (var entryPointNode in graph) { entryPointNode.RenameAllVariables(mapping); } // Instantiate a TrainTest entry point for this fold. var args = new TrainTestBinaryMacro.Arguments { Nodes = new JArray(graph.Select(n => n.ToJson()).ToArray()) }; args.Inputs.Data = new Var <IDataView> { VarName = mapping[input.Inputs.Data.VarName] }; args.Outputs.Model = new Var <IPredictorModel> { VarName = mapping[input.Outputs.Model.VarName] }; // Set the input bindings for the TrainTest entry point. var inputBindingMap = new Dictionary <string, List <ParameterBinding> >(); var inputMap = new Dictionary <ParameterBinding, VariableBinding>(); var trainingData = new SimpleParameterBinding(nameof(args.TrainingData)); inputBindingMap.Add(nameof(args.TrainingData), new List <ParameterBinding> { trainingData }); inputMap.Add(trainingData, new ArrayIndexVariableBinding(cvSplitOutput.TrainData.VarName, k)); var testingData = new SimpleParameterBinding(nameof(args.TestingData)); inputBindingMap.Add(nameof(args.TestingData), new List <ParameterBinding> { testingData }); inputMap.Add(testingData, new ArrayIndexVariableBinding(cvSplitOutput.TestData.VarName, k)); var outputMap = new Dictionary <string, string>(); var predModelVar = new Var <IPredictorModel>(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.PredictorModel), predModelVar.VarName); predModelVars[k] = predModelVar; var warningVar = new Var <IDataView>(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.Warnings), warningVar.VarName); warningsVars[k] = warningVar; var overallMetric = new Var <IDataView>(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.OverallMetrics), overallMetric.VarName); overallMetricsVars[k] = overallMetric; var instanceMetric = new Var <IDataView>(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.PerInstanceMetrics), instanceMetric.VarName); instanceMetricsVars[k] = instanceMetric; var confusionMatrix = new Var <IDataView>(); outputMap.Add(nameof(TrainTestBinaryMacro.Output.ConfusionMatrix), confusionMatrix.VarName); confusionMatrixVars[k] = confusionMatrix; subGraphNodes.Add(EntryPointNode.Create(env, "Models.TrainTestBinaryEvaluator", args, node.Context, inputBindingMap, inputMap, outputMap)); } exp.Reset(); var outModels = new Legacy.Data.PredictorModelArrayConverter { Model = new ArrayVar <IPredictorModel>(predModelVars) }; var outModelsOutput = new Legacy.Data.PredictorModelArrayConverter.Output(); outModelsOutput.OutputModel.VarName = node.GetOutputVariableName(nameof(Output.PredictorModel)); exp.Add(outModels, outModelsOutput); var warnings = new Legacy.Data.IDataViewArrayConverter { Data = new ArrayVar <IDataView>(warningsVars) }; var warningsOutput = new Legacy.Data.IDataViewArrayConverter.Output(); warningsOutput.OutputData.VarName = node.GetOutputVariableName(nameof(Output.Warnings)); exp.Add(warnings, warningsOutput); var overallMetrics = new Legacy.Data.IDataViewArrayConverter { Data = new ArrayVar <IDataView>(overallMetricsVars) }; var overallMetricsOutput = new Legacy.Data.IDataViewArrayConverter.Output(); overallMetricsOutput.OutputData.VarName = node.GetOutputVariableName(nameof(Output.OverallMetrics)); exp.Add(overallMetrics, overallMetricsOutput); var instanceMetrics = new Legacy.Data.IDataViewArrayConverter { Data = new ArrayVar <IDataView>(instanceMetricsVars) }; var instanceMetricsOutput = new Legacy.Data.IDataViewArrayConverter.Output(); instanceMetricsOutput.OutputData.VarName = node.GetOutputVariableName(nameof(Output.PerInstanceMetrics)); exp.Add(instanceMetrics, instanceMetricsOutput); var confusionMatrices = new Legacy.Data.IDataViewArrayConverter { Data = new ArrayVar <IDataView>(confusionMatrixVars) }; var confusionMatricesOutput = new Legacy.Data.IDataViewArrayConverter.Output(); confusionMatricesOutput.OutputData.VarName = node.GetOutputVariableName(nameof(Output.ConfusionMatrix)); exp.Add(confusionMatrices, confusionMatricesOutput); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes())); return(new CommonOutputs.MacroOutput <Output>() { Nodes = subGraphNodes }); }
public static void Export(Node ch, Parameters param, IData data, Experiment experiment, string strFilePath) { try { // open selected file and retrieve the content using (TextWriter tw = new StreamWriter(strFilePath)) { tw.Flush(); //Add Data. var cols = experiment.GetColumnsFromInput(); var outCol = experiment.GetColumnsFromOutput(false).FirstOrDefault(); string cmd = "training_data_set={"; for (int i = 0; i < experiment.GetRowCount(false); i++) { string line = "{"; //get normalized and numeric row var row = experiment.GetRowFromInput(i, false); var row_norm = experiment.GetEncodedInput(i, false, false); //input columns for (int j = 0; j < row_norm.Length; j++) { line += row_norm[j].ToString(CultureInfo.InvariantCulture); if (j + 1 != row_norm.Length) { line += ","; } else { if (outCol.ColumnDataType == ColumnType.Category) { line += "," + outCol.GetNumericValue(i).Value.ToString(CultureInfo.InvariantCulture); } else { line += "," + experiment.GetRowFromOutput(i, false)[0].ToString(CultureInfo.InvariantCulture); } line += "}"; } // } cmd += line; if (i + 1 < experiment.GetRowCount(false)) { cmd += ","; } else { cmd += "};"; } } tw.WriteLine(cmd); //GP Model formula string formula = NodeDecoding.Decode(ch, param, EncodeType.Mathematica); List <string> inputArgs = new List <string>(); AlphaCharEnum alphaEnum = new AlphaCharEnum(); var totCols = experiment.GetEncodedColumnInputCount(); var diff = totCols - cols.Count;//difference between column count and normalized column count due to Category column clusterization for (int i = totCols - 1; i >= 0; i--) { string var = "x" + (i + 1).ToString() + " "; //make a formula to de normalize value var col = cols[i - diff]; if (col.ColumnDataType == ColumnType.Category) { //formula = formula.Replace(var, replCell); if (diff > 0) { diff -= 1; } } else if (col.ColumnDataType == ColumnType.Binary) { //formula = formula.Replace(var, replCell); } else { var replCell = GPToExcel.createNormalizationFormulaForColumn(col, var); formula = formula.Replace(var, replCell); } // inputArgs.Add(var); } //Replace random constants with real values var consts = data.GetRandConsts(); for (int i = 0; i < consts.Length; i++) { string var = "r" + (i + 1).ToString() + " "; string constValue = consts[i].ToString(CultureInfo.InvariantCulture); if (constValue[0] == '-') { constValue = "(" + constValue + ")"; } formula = formula.Replace(var, constValue); } //in case of category output //category output is pre calculated with sigmoid multiply with Class count. if (outCol.ColumnDataType == ColumnType.Numeric)//for numeric output we need to de normalize formula { var normFormula = GPToExcel.createDeNormalizationFormulaForOutput(outCol, formula); formula = normFormula; } //in case of softMax we must defined separate function in R var customFun = ""; if (param.RootFunctionNode != null && param.RootFunctionNode.Name == "Softmax") { customFun = "Softmax[x_List] := Ordering[Exp[x - Max[x]] / Total[Exp[x - Max[x]]], 1][[1]] -1; " + Environment.NewLine; } //add model name and arguments formula = "gpModel[{0}]:=" + formula; //add arguments to the model string arguments = ""; for (int i = 0; i < inputArgs.Count; i++) { var a = inputArgs[i]; if (formula.Contains(a)) { if (i == 0) { a = a.Replace(" ", "_"); } else { a = a.Replace(" ", "_,"); }; // arguments = a + arguments; } } if (arguments.EndsWith(",")) { arguments = arguments.Substring(0, arguments.Length - 1); } formula = string.Format(formula, arguments); tw.WriteLine(formula + ";"); tw.Close(); } } catch (Exception) { throw; } }
public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { using (var env = new TlcEnvironment()) { var subgraph = env.CreateExperiment(); subgraph.Add(_trainer); var ova = new OneVersusAll(); if (previousStep != null) { if (!(previousStep is ILearningPipelineDataStep dataStep)) { throw new InvalidOperationException($"{ nameof(OneVersusAll)} only supports an { nameof(ILearningPipelineDataStep)} as an input."); } _data = dataStep.Data; ova.TrainingData = dataStep.Data; ova.UseProbabilities = _useProbabilities; ova.Nodes = subgraph; } Output output = experiment.Add(ova); return(new OvaPipelineStep(output)); } }
public void Update(Measurement m, Experiment e) { var data = ToJSON(m, e); Db.Update("Measurements", data, String.Format("MEASUREMENTS.ID = {0}", m.Id)); }
public void DefiningBenchmarkTest() { Assert.Fail("Test temporarily broken. Ignored till contest feature is going to be revisited."); string baseExperimentFilename = "DefiningBenchmarkTestExperiment.teml"; string testingSolutionFilename = "DefiningBenchmarkTestingSolution.teml"; //create temporary directory for defining benchmark string benchmarkTemporaryDirectory = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTest"); System.IO.Directory.CreateDirectory(benchmarkTemporaryDirectory); string newBenchmarkFilePath = System.IO.Path.Combine(benchmarkTemporaryDirectory, "newDefinedBenchmark.tbml"); //copy the test data into temporary benchmark directory string testData = System.IO.Path.Combine(AppContext.BaseTestDirectory, "DefiningBenchmarkTestData.xml"); System.IO.File.Copy(testData, System.IO.Path.Combine(benchmarkTemporaryDirectory, "DefiningBenchmarkTestData.xml")); // load the experiment from which the benchmark is going to be defined from string baseExperimentFilePath = System.IO.Path.Combine(AppContext.BaseTestDirectory, baseExperimentFilename); Experiment baseExperimentForDefiningBenchmark = ExperimentManager.Load(baseExperimentFilePath, AppContext.Components); var benchmarkDefiner = new DefiningBenchmark(baseExperimentForDefiningBenchmark, AppContext.Components, AppContext.WorkspaceInstance, AppContext.PackageManager, AppContext.WorkspaceInstance.TypeDirectories, null); Assert.AreEqual(1, benchmarkDefiner.TemplatizableComponents.Count); Assert.AreEqual("Preprocessor", benchmarkDefiner.TemplatizableComponents[0].Data.Metadata.Label); //select preprocessor template as Component Template for benchmarking benchmarkDefiner.SelectedTemplateNode = benchmarkDefiner.TemplatizableComponents[0]; //select new benchmark path benchmarkDefiner.BenchmarkInfo.FilePath = newBenchmarkFilePath; //set some values for benchmark info string benchmarkName = "Testing defining new benchmark"; string author = "Re test author"; string contributors = "Re test contributors"; string description = "Re test description"; string shortDescription = "Re test short description"; DateTime deadline = DateTime.Now; string fakeExperimentResultsUnitname = "fakeunitname"; string webpageLink = "test://test.webpage.link"; benchmarkDefiner.BenchmarkInfo.Name = benchmarkName; benchmarkDefiner.BenchmarkInfo.Author = author; benchmarkDefiner.BenchmarkInfo.Contributors = contributors; benchmarkDefiner.BenchmarkInfo.Description = description; benchmarkDefiner.BenchmarkInfo.ShortDescription = shortDescription; benchmarkDefiner.BenchmarkInfo.Deadline = deadline; benchmarkDefiner.BenchmarkInfo.ExperimentResultsUnitname = fakeExperimentResultsUnitname; benchmarkDefiner.BenchmarkInfo.WebPageLink = new Uri(webpageLink); //assure file does not exists prior defining Assert.IsFalse(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //set some mock experiment results as baseline TraceLabSDK.Types.Contests.TLExperimentResults fakeBaseline = CreateDummyExperimentResults("FAKE-BASELINE"); benchmarkDefiner.SelectedExperimentResults = fakeBaseline; //call define benchmark benchmarkDefiner.Define(); //check if new benchmark has been created Assert.IsTrue(System.IO.File.Exists(benchmarkDefiner.BenchmarkInfo.FilePath)); //load newly defined benchmark List <Benchmark> benchmarks = BenchmarkLoader.LoadBenchmarksInfo(benchmarkTemporaryDirectory); Benchmark testBenchmark = benchmarks[0]; //there should be only 1, since the directory has been just created //check if new test benchmark has previously defined properties Assert.AreEqual(benchmarkName, testBenchmark.BenchmarkInfo.Name); Assert.AreEqual(author, testBenchmark.BenchmarkInfo.Author); Assert.AreEqual(contributors, testBenchmark.BenchmarkInfo.Contributors); Assert.AreEqual(description, testBenchmark.BenchmarkInfo.Description); Assert.AreEqual(shortDescription, testBenchmark.BenchmarkInfo.ShortDescription); Assert.AreEqual(deadline.ToString(), testBenchmark.BenchmarkInfo.Deadline.ToString()); Assert.AreEqual(fakeExperimentResultsUnitname, testBenchmark.BenchmarkInfo.ExperimentResultsUnitname); //check if baseline results has been saved properly, by loading it from xml TraceLabSDK.Types.Contests.TLExperimentResults baseline = BenchmarkLoader.ReadBaseline(benchmarkDefiner.BenchmarkInfo.FilePath); Assert.AreEqual(fakeBaseline.TechniqueName, baseline.TechniqueName); Assert.AreEqual(fakeBaseline.Score, baseline.Score); Assert.AreEqual(fakeBaseline.AcrossAllDatasetsResults, baseline.AcrossAllDatasetsResults); Assert.IsTrue(fakeBaseline.DatasetsResults.SequenceEqual(baseline.DatasetsResults)); // load the experiment to be run against new defined benchmark string experimentFilename = System.IO.Path.Combine(AppContext.BaseTestDirectory, testingSolutionFilename); Experiment testingSolutionExperiment = ExperimentManager.Load(experimentFilename, AppContext.Components); //finally prepare benchmark experiment testBenchmark.PrepareBenchmarkExperiment(testingSolutionExperiment, AppContext.Components); //run benchmark MockProgress progress = new MockProgress(); using (var dispatcher = CreateExperiment(testBenchmark.BenchmarkExperiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); Assert.AreEqual(5, progress.NumSteps); Assert.IsFalse(progress.HasError); } }
void Start () { exp = GameObject.FindGameObjectWithTag ("Experiment").GetComponent<Experiment>(); if (!exp.isReplay) { myLoggerQueue = new LoggerQueue (); myLoggerWriter = new LoggerWriter (fileName, myLoggerQueue); myLoggerWriter.Start (); myLoggerWriter.log ("DATE: " + DateTime.Now.ToString ("M/d/yyyy")); //might not be needed } }
/// <summary> /// Creates the experiment. /// </summary> /// <param name="name">The name.</param> private void CreateExperiment(string name) { var experiment = new Experiment(name); experiment.Save(Settings.ExperimentDirectory); }
public IActionResult StoneCloneExperiment(int id) { var experimentTemplate = _context.Experiments .Include(s => s.ExperimentInstances). ThenInclude(si => si.ParameterValues) .Include(s => s.Parameters). ThenInclude(p => p.Values) .Single(s => s.Id == id); if (experimentTemplate == null) { return(NotFound()); } string errMsg = ""; string msg = ""; if (!ExecuteLocalMACIScript(experimentTemplate.FileName, ref msg, ref errMsg)) { return(new ObjectResult(new { Failed = true, Message = msg, ErrorMessage = errMsg })); } // Clone the experiment. var experimentClone = new Experiment { Created = DateTime.UtcNow, Script = experimentTemplate.Script, ScriptInstall = experimentTemplate.ScriptInstall, RequiredCapabilities = experimentTemplate.RequiredCapabilities, Language = experimentTemplate.Language, Parameters = new List <Parameter>(), PermutationFilter = experimentTemplate.PermutationFilter, Repetitions = experimentTemplate.Repetitions, RunName = "Clone of " + experimentTemplate.RunName, FileName = experimentTemplate.FileName, Timeout = experimentTemplate.Timeout }; // Clone parameters and parameter values. foreach (var parameter in experimentTemplate.Parameters) { var experimentParam = new Parameter { Name = parameter.Name, Type = parameter.Type, Purpose = parameter.Purpose, Unit = parameter.Unit, Experiment = experimentClone, Values = new List <ParameterValue>() }; experimentClone.Parameters.Add(experimentParam); foreach (var value in parameter.Values) { experimentParam.Values.Add(new ParameterValue { Parameter = experimentParam, Value = value.Value }); } } /* Clone experiment instances */ experimentClone.ExperimentInstances = new List <ExperimentInstance>(); foreach (var experimentInstance in experimentTemplate.ExperimentInstances) { experimentClone.ExperimentInstances.Add(new ExperimentInstance { ParameterValues = experimentInstance.ParameterValues. Select(pv => new ExperimentParameterAssignment { /* a bit complex due to reference handling :-( */ ParameterValue = experimentClone.Parameters. Where(p => p.Name == pv.ParameterValue.Parameter.Name). SelectMany(p => p.Values). Where(v => v.Value == pv.ParameterValue.Value). FirstOrDefault() }).ToList(), Experiment = experimentClone }); } // Add everything to the database. _context.Add(experimentClone); _context.SaveChanges(); /* Create folder and copy relevant files (persistent snapshot) * after storing in database (storing generates the experimentId) */ CreatePersistentSnapshotFolder(experimentClone); return(new ObjectResult(new { ExperimentId = experimentClone.Id })); }
public ActionResult <Experiment> Insert(Experiment expt) { _exptSvc.Insert(expt); return(Ok(expt)); }
public void CanSuccessfullyRetrieveSparseData() { string dataPath = GetDataPath("SparseData.txt"); var loader = new Legacy.Data.TextLoader(dataPath).CreateFrom <SparseInput>(useHeader: true, allowQuotedStrings: false, supportSparse: true); var environment = new MLContext(); Experiment experiment = environment.CreateExperiment(); Legacy.ILearningPipelineDataStep output = loader.ApplyStep(null, experiment) as Legacy.ILearningPipelineDataStep; experiment.Compile(); loader.SetInput(environment, experiment); experiment.Run(); IDataView data = experiment.GetOutput(output.Data); Assert.NotNull(data); using (var cursor = data.GetRowCursor((a => true))) { var getters = new ValueGetter <float>[] { cursor.GetGetter <float>(0), cursor.GetGetter <float>(1), cursor.GetGetter <float>(2), cursor.GetGetter <float>(3), cursor.GetGetter <float>(4) }; Assert.True(cursor.MoveNext()); float[] targets = new float[] { 1, 2, 3, 4, 5 }; for (int i = 0; i < getters.Length; i++) { float value = 0; getters[i](ref value); Assert.Equal(targets[i], value); } Assert.True(cursor.MoveNext()); targets = new float[] { 0, 0, 0, 4, 5 }; for (int i = 0; i < getters.Length; i++) { float value = 0; getters[i](ref value); Assert.Equal(targets[i], value); } Assert.True(cursor.MoveNext()); targets = new float[] { 0, 2, 0, 0, 0 }; for (int i = 0; i < getters.Length; i++) { float value = 0; getters[i](ref value); Assert.Equal(targets[i], value); } Assert.False(cursor.MoveNext()); } }
public static CommonOutputs.MacroOutput <Output> OneVersusAll( IHostEnvironment env, Arguments input, EntryPointNode node) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); env.Assert(input.Nodes.Count > 0); var numClasses = GetNumberOfClasses(env, input, out var label); var predModelVars = new Var <IPredictorModel> [numClasses]; // This will be the final resulting list of nodes that is returned from the macro. var macroNodes = new List <EntryPointNode>(); // Instantiate the subgraph for each label value. for (int k = 0; k < numClasses; k++) { var result = ProcessClass(env, k, label, input, node); predModelVars[k] = result.Item2; macroNodes.AddRange(result.Item1); } // Use OVA model combiner to combine these models into one. // Takes in array of models that are binary predictor models and // produces single multiclass predictor model. var macroExperiment = new Experiment(env); var combinerNode = new Models.OvaModelCombiner { ModelArray = new ArrayVar <IPredictorModel>(predModelVars), TrainingData = new Var <IDataView> { VarName = node.GetInputVariable(nameof(input.TrainingData)).VariableName }, Caching = (Models.CachingOptions)input.Caching, FeatureColumn = input.FeatureColumn, NormalizeFeatures = (Models.NormalizeOption)input.NormalizeFeatures, LabelColumn = input.LabelColumn, UseProbabilities = input.UseProbabilities }; // Get output model variable. if (!node.OutputMap.TryGetValue(nameof(Output.PredictorModel), out var outVariableName)) { throw new Exception("Cannot find OVA model output."); } // Map macro's output back to OVA combiner (so OVA combiner will set the value on our output variable). var combinerOutput = new Models.OvaModelCombiner.Output { PredictorModel = new Var <IPredictorModel> { VarName = outVariableName } }; // Add to experiment (must be done AFTER we assign variable name to output). macroExperiment.Add(combinerNode, combinerOutput); // Add nodes to main experiment. var nodes = macroExperiment.GetNodes(); var expNodes = EntryPointNode.ValidateNodes(env, node.Context, nodes, node.Catalog); macroNodes.AddRange(expNodes); return(new CommonOutputs.MacroOutput <Output>() { Nodes = macroNodes }); }
private static Tuple <List <EntryPointNode>, Var <IPredictorModel> > ProcessClass(IHostEnvironment env, int k, string label, Arguments input, EntryPointNode node) { var macroNodes = new List <EntryPointNode>(); // Convert label into T,F based on k. var remapper = new ML.Transforms.LabelIndicator { ClassIndex = k, Column = new[] { new ML.Transforms.LabelIndicatorTransformColumn { ClassIndex = k, Name = label, Source = label } }, Data = { VarName = node.GetInputVariable(nameof(input.TrainingData)).ToJson() } }; var exp = new Experiment(env); var remapperOutNode = exp.Add(remapper); var subNodes = EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes(), node.Catalog); macroNodes.AddRange(subNodes); // Parse the nodes in input.Nodes into a temporary run context. var subGraphRunContext = new RunContext(env); var subGraphNodes = EntryPointNode.ValidateNodes(env, subGraphRunContext, input.Nodes, node.Catalog); // Rename all the variables such that they don't conflict with the ones in the outer run context. var mapping = new Dictionary <string, string>(); bool foundOutput = false; Var <IPredictorModel> predModelVar = null; foreach (var entryPointNode in subGraphNodes) { // Rename variables in input/output maps, and in subgraph context. entryPointNode.RenameAllVariables(mapping); foreach (var kvp in mapping) { subGraphRunContext.RenameContextVariable(kvp.Key, kvp.Value); } // Grab a hold of output model from this subgraph. if (entryPointNode.GetOutputVariableName("PredictorModel") is string mvn) { predModelVar = new Var <IPredictorModel> { VarName = mvn }; foundOutput = true; } // Connect label remapper output to wherever training data was expected within the input graph. if (entryPointNode.GetInputVariable(nameof(input.TrainingData)) is VariableBinding vb) { vb.Rename(remapperOutNode.OutputData.VarName); } // Change node to use the main context. entryPointNode.SetContext(node.Context); } // Move the variables from the subcontext to the main context. node.Context.AddContextVariables(subGraphRunContext); // Make sure we found the output variable for this model. if (!foundOutput) { throw new Exception("Invalid input graph. Does not output predictor model."); } // Add training subgraph to our context. macroNodes.AddRange(subGraphNodes); return(new Tuple <List <EntryPointNode>, Var <IPredictorModel> >(macroNodes, predModelVar)); }
internal override void ProcessMappingsFromRowValues(RowValues values) { Experiment = new Experiment(); values.ConvertToEntity(this, Mappings); }
public bool CreateExperiment(Experiment experiment, List <EnergyCellExtend> cellExtends, List <AtomExtend> atomExtends, out List <EnergyCellExtend> newCells, out List <AtomExtend> newAtoms) { List <Tuple <EnergyCellExtend, EnergyCell> > cellPairs = new List <Tuple <EnergyCellExtend, EnergyCell> >(); List <EnergyCell> energyCells = new List <EnergyCell>(); cellExtends.ForEach(m => { var energyCell = new EnergyCell(); energyCell.CellX = m.CellX; energyCell.CellY = m.CellY; energyCell.Order = m.Order; m.Experiment = experiment; energyCells.Add(energyCell); cellPairs.Add(new Tuple <EnergyCellExtend, EnergyCell>(m, energyCell)); }); List <Tuple <AtomExtend, Atom> > atomPairs = new List <Tuple <AtomExtend, Atom> >(); List <Atom> atoms = new List <Atom>(); atomExtends.ForEach(m => { var atom = new Atom(); atom.VolumeRadius = m.VolumeRadius; atom.AttractionRadius = m.AttractionRadius; atom.AttractionForce = m.AttractionForce; atom.Order = m.Order; atom.Experiment = experiment; atoms.Add(atom); atomPairs.Add(new Tuple <AtomExtend, Atom>(m, atom)); }); experiment.StartCells = energyCells.Count; experiment.StartHotPots = cellExtends.Count(m => m.IsHotPot); experiment.StartAtoms = atoms.Count; experiment.EnergyCells = energyCells; experiment.Atoms = atoms; DbContext.Experiments.Add(experiment); DbContext.SaveChanges(); Experiment = experiment; cellPairs.ForEach(m => { m.Item1.EnergyCellID = m.Item2.EnergyCellID; m.Item1.ExperimentID = m.Item2.ExperimentID; }); newCells = cellPairs.Select(m => m.Item1).ToList(); atomPairs.ForEach(m => { m.Item1.AtomID = m.Item2.AtomID; m.Item1.ExperimentID = m.Item2.ExperimentID; }); newAtoms = atomPairs.Select(m => m.Item1).ToList(); return(true); }
public void CanSuccessfullyEnumerated() { var collection = CollectionDataSource.Create(new List <Input>() { new Input { Number1 = 1, String1 = "1" }, new Input { Number1 = 2, String1 = "2" }, new Input { Number1 = 3, String1 = "3" } }); var environment = new MLContext(); Experiment experiment = environment.CreateExperiment(); Legacy.ILearningPipelineDataStep output = collection.ApplyStep(null, experiment) as Legacy.ILearningPipelineDataStep; experiment.Compile(); collection.SetInput(environment, experiment); experiment.Run(); IDataView data = experiment.GetOutput(output.Data); Assert.NotNull(data); using (var cursor = data.GetRowCursor((a => true))) { var IDGetter = cursor.GetGetter <float>(0); var TextGetter = cursor.GetGetter <ReadOnlyMemory <char> >(1); Assert.True(cursor.MoveNext()); float ID = 0; IDGetter(ref ID); Assert.Equal(1, ID); ReadOnlyMemory <char> Text = new ReadOnlyMemory <char>(); TextGetter(ref Text); Assert.Equal("1", Text.ToString()); Assert.True(cursor.MoveNext()); ID = 0; IDGetter(ref ID); Assert.Equal(2, ID); Text = new ReadOnlyMemory <char>(); TextGetter(ref Text); Assert.Equal("2", Text.ToString()); Assert.True(cursor.MoveNext()); ID = 0; IDGetter(ref ID); Assert.Equal(3, ID); Text = new ReadOnlyMemory <char>(); TextGetter(ref Text); Assert.Equal("3", Text.ToString()); Assert.False(cursor.MoveNext()); } }
private static ITransformModel CreateKcHousePricePredictorModel(string dataPath) { Experiment experiment = s_environment.CreateExperiment(); var importData = new Data.TextLoader(dataPath) { Arguments = new TextLoaderArguments { Separator = new[] { ',' }, HasHeader = true, Column = new[] { new TextLoaderColumn() { Name = "Id", Source = new [] { new TextLoaderRange(0) }, Type = Data.DataKind.Text }, new TextLoaderColumn() { Name = "Date", Source = new [] { new TextLoaderRange(1) }, Type = Data.DataKind.Text }, new TextLoaderColumn() { Name = "Label", Source = new [] { new TextLoaderRange(2) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Bedrooms", Source = new [] { new TextLoaderRange(3) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Bathrooms", Source = new [] { new TextLoaderRange(4) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLiving", Source = new [] { new TextLoaderRange(5) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLot", Source = new [] { new TextLoaderRange(6) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Floors", Source = new [] { new TextLoaderRange(7) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Waterfront", Source = new [] { new TextLoaderRange(8) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "View", Source = new [] { new TextLoaderRange(9) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Condition", Source = new [] { new TextLoaderRange(10) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Grade", Source = new [] { new TextLoaderRange(11) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftAbove", Source = new [] { new TextLoaderRange(12) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftBasement", Source = new [] { new TextLoaderRange(13) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "YearBuilt", Source = new [] { new TextLoaderRange(14) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "YearRenovated", Source = new [] { new TextLoaderRange(15) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Zipcode", Source = new [] { new TextLoaderRange(16) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Lat", Source = new [] { new TextLoaderRange(17) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "Long", Source = new [] { new TextLoaderRange(18) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLiving15", Source = new [] { new TextLoaderRange(19) }, Type = Data.DataKind.Num }, new TextLoaderColumn() { Name = "SqftLot15", Source = new [] { new TextLoaderRange(20) }, Type = Data.DataKind.Num }, } } //new Data.CustomTextLoader(); // importData.CustomSchema = dataSchema; // }; Data.TextLoader.Output imported = experiment.Add(importData); var numericalConcatenate = new Transforms.ColumnConcatenator(); numericalConcatenate.Data = imported.Data; numericalConcatenate.AddColumn("NumericalFeatures", "SqftLiving", "SqftLot", "SqftAbove", "SqftBasement", "Lat", "Long", "SqftLiving15", "SqftLot15"); Transforms.ColumnConcatenator.Output numericalConcatenated = experiment.Add(numericalConcatenate); var categoryConcatenate = new Transforms.ColumnConcatenator(); categoryConcatenate.Data = numericalConcatenated.OutputData; categoryConcatenate.AddColumn("CategoryFeatures", "Bedrooms", "Bathrooms", "Floors", "Waterfront", "View", "Condition", "Grade", "YearBuilt", "YearRenovated", "Zipcode"); Transforms.ColumnConcatenator.Output categoryConcatenated = experiment.Add(categoryConcatenate); var categorize = new Transforms.CategoricalOneHotVectorizer(); categorize.AddColumn("CategoryFeatures"); categorize.Data = categoryConcatenated.OutputData; Transforms.CategoricalOneHotVectorizer.Output categorized = experiment.Add(categorize); var featuresConcatenate = new Transforms.ColumnConcatenator(); featuresConcatenate.Data = categorized.OutputData; featuresConcatenate.AddColumn("Features", "NumericalFeatures", "CategoryFeatures"); Transforms.ColumnConcatenator.Output featuresConcatenated = experiment.Add(featuresConcatenate); var learner = new Trainers.StochasticDualCoordinateAscentRegressor(); learner.TrainingData = featuresConcatenated.OutputData; learner.NumThreads = 1; Trainers.StochasticDualCoordinateAscentRegressor.Output learnerOutput = experiment.Add(learner); var combineModels = new Transforms.ManyHeterogeneousModelCombiner(); combineModels.TransformModels = new ArrayVar <ITransformModel>(numericalConcatenated.Model, categoryConcatenated.Model, categorized.Model, featuresConcatenated.Model); combineModels.PredictorModel = learnerOutput.PredictorModel; Transforms.ManyHeterogeneousModelCombiner.Output combinedModels = experiment.Add(combineModels); var scorer = new Transforms.Scorer { PredictorModel = combinedModels.PredictorModel }; var scorerOutput = experiment.Add(scorer); experiment.Compile(); experiment.SetInput(importData.InputFile, new SimpleFileHandle(s_environment, dataPath, false, false)); experiment.Run(); return(experiment.GetOutput(scorerOutput.ScoringTransform)); }
void recovery(Experiment e) { // Recovery job. if (!haveJobs(e)) return; requeueInfrastructureErrors(e); for (Job j = getJob(e, -1); j != null; j = getJob(e, -1)) { runJob(e, j); if (results.Count > 50) saveResults(); } saveResults(); }
public int Submit(CourseExperiment courseExperiment, Student whichStudent, Experiment exp) { exp.CourseExperimentId = courseExperiment.CourseExperimentId; exp.StudentId = whichStudent.StudentId; return(_experimentDal.InsertExperiment(exp)); }
void requeueInfrastructureErrors(Experiment e) { SqlCommand cmd = new SqlCommand("SELECT Data.ID, Strings.s as Filename FROM Data, Strings WHERE FilenameP=Strings.ID AND ExperimentID=" + e.ID + " AND ResultCode=4 AND (stderr like 'INFRASTRUCTURE ERROR%' OR ReturnValue=-1073741515)", sql); cmd.CommandTimeout = 0; SqlDataReader r = cmd.ExecuteReader(); Dictionary<int, string> d = new Dictionary<int, string>(); while (r.Read()) { d[(int)r["ID"]] = (string)r["Filename"]; } r.Close(); int cnt = 0; foreach (KeyValuePair<int, string> kvp in d) { SqlCommand cmd2 = new SqlCommand("AQ " + e.ID + ",'" + kvp.Value + "';", sql); cmd2.CommandTimeout = 0; cmd2.ExecuteNonQuery(); cmd2 = new SqlCommand("DELETE FROM Data WHERE ExperimentID=" + e.ID + " AND ID=" + kvp.Key, sql); cmd2.CommandTimeout = 0; cmd2.ExecuteNonQuery(); cnt++; } System.Console.WriteLine("Re-queued " + cnt + " infrastructure errors."); }
public void Post([FromBody] Experiment experiment) { _experimentRepository.Insert(experiment); }
void getBinary(Experiment e) { if (!Directory.Exists(e.localDir)) throw new Exception("Local scratch directory does not exist: " + e.localDir); if (!File.Exists(e.localExecutable)) { Console.WriteLine("Downloading binary..."); FileStream fs = null; try { fs = new FileStream(e.localExecutable, FileMode.CreateNew); } catch (IOException) { if (File.Exists(e.localExecutable)) { // All is good, someone else downloaded it! return; } } byte[] data = new byte[0]; Dictionary<string, Object> r = SQLRead("SELECT Binary FROM Binaries WHERE ID=" + e.binaryID, sql); data = (byte[])r["Binary"]; int sz = data.GetUpperBound(0); fs.Write(data, 0, sz); fs.Flush(); fs.Close(); if (data[0] == 0x50 && data[1] == 0x4b && data[2] == 0x03 && data[3] == 0x04) { // This is a zip file. string tfn = Path.Combine(Path.GetTempFileName() + ".zip"); File.Move(e.localExecutable, tfn); e.localExecutable = null; Package pkg = Package.Open(tfn, FileMode.Open); PackageRelationshipCollection rels = pkg.GetRelationships(); if (rels.Count() != 1) throw new Exception("Missing package relationships"); PackageRelationship main = rels.First(); foreach (PackagePart part in pkg.GetParts()) { // Uri uriDocumentTarget = PackUriHelper.ResolvePartUri(new Uri("/", UriKind.Relative), rel.TargetUri); // PackagePart part = pkg.GetPart(uriDocumentTarget); Stream s = part.GetStream(FileMode.Open, FileAccess.Read); string fn = CreateFilenameFromUri(part.Uri).Substring(1); fs = new FileStream(e.localDir + @"\" + fn, FileMode.OpenOrCreate); CopyStream(s, fs); fs.Close(); if (part.Uri == main.TargetUri) e.localExecutable = e.localDir + @"\" + fn; } pkg.Close(); if (e.localExecutable == null) throw new Exception("Main executable not found in zip."); try { File.Delete(tfn); } catch (Exception) { } } string ext = Path.GetExtension(e.localExecutable); string localname = e.localExecutable + "-" + myName + ext; File.Move(e.localExecutable, localname); e.localExecutable += "-" + myName + ext; int retry_count = 1000; while (!File.Exists(localname)) { Thread.Sleep(100); retry_count--; if (retry_count == 0) throw new Exception("Local binary missing."); } retry_count = 1000; retry: try { FileStream tmp = File.OpenRead(localname); tmp.Close(); } catch { Thread.Sleep(100); retry_count--; if (retry_count == 0) throw new Exception("Local binary is not readable."); goto retry; } } }
public void SetInputData(IDataView data, Experiment experiment) { experiment.SetInput(_entryPointObj.TrainingData, data); }
/// <summary> /// Opens the experiment. /// </summary> /// <param name="name">The name.</param> private void OpenExperiment(string name) { var path = Path.Combine(Settings.ExperimentDirectory, name); Experiment = Experiment.Load(path); FileSystemTree.RootPath = path; //MetaResearcher.Text = Experiment.Researcher; //MetaName.Text = Experiment.Name; }
public DataAndModel <IPredictorModel> Add(Experiment experiment) { var output = experiment.Add(_entryPointObj); return(new DataAndModel <IPredictorModel>(_entryPointObj.TrainingData, output.PredictorModel)); }
private void GenExperiment(Experiment experiment) { var fields = new FieldText[experiment.NbFactor]; for (int i = 0; i < experiment.NbFactor; i++) { fields[i] = new FieldText("F" + (i + 1).ToString() + " values (" + experiment.NbLevels[i] + ")", "List of values seperated by a semicolon", ""); fields[i].MaxValues = experiment.NbLevels[i]; } new InputBox(_excelapp, "Generate experiment", fields).Show(delegate { var valuesList = new List<string[]>(experiment.NbFactor); for (int i = 0; i < fields.Length; i++) { if (string.IsNullOrEmpty(fields[i].Value)) valuesList.Add(null); else { valuesList.Add(fields[i].Value.Split(';')); } } CellsOperator.AddDataToRange(_range, experiment.GetArray(valuesList)); }); }
public void PauseCurrentTrial() { Experiment.CurrentTrial <CogTrial>().IsPaused = true; }
private static void NT_Ingredient(ref Experiment exp) { for (;;) { switch (Syn.Interpret()) { case 0: return; case 1: Lex.GETidentifierAttr(out name); break; case 2: Lex.GETnumberAttr(out amount); break; case 3: // SEM exp.AddIngredient(new Ingredient(name, amount)); break; } // switch } // for }
public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) { Contracts.Assert(previousStep == null); _dataViewEntryPoint = new Data.DataViewReference(); var importOutput = experiment.Add(_dataViewEntryPoint); return(new CollectionDataSourcePipelineStep(importOutput.Data)); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="campaignId">Id of the campaign to which experiments are /// added.</param> /// <param name="adGroupId">Id of the ad group to which experiments are /// added.</param> /// <param name="criterionId">Id of the criterion for which experiments /// are added.</param> public void Run(AdWordsUser user, long campaignId, long adGroupId, long criterionId) { // Get the ExperimentService. ExperimentService experimentService = (ExperimentService) user.GetService(AdWordsService.v201601.ExperimentService); // Get the AdGroupService. AdGroupService adGroupService = (AdGroupService) user.GetService(AdWordsService.v201601.AdGroupService); // Get the AdGroupCriterionService. AdGroupCriterionService adGroupCriterionService = (AdGroupCriterionService) user.GetService(AdWordsService.v201601.AdGroupCriterionService); // Create the experiment. Experiment experiment = new Experiment(); experiment.campaignId = campaignId; experiment.name = "Interplanetary Cruise #" + ExampleUtilities.GetRandomString(); experiment.queryPercentage = 10; experiment.startDateTime = DateTime.Now.AddDays(1).ToString("yyyyMMdd HHmmss"); // Optional: Set the end date. experiment.endDateTime = DateTime.Now.AddDays(30).ToString("yyyyMMdd HHmmss"); // Optional: Set the status. experiment.status = ExperimentStatus.ENABLED; // Create the operation. ExperimentOperation experimentOperation = new ExperimentOperation(); experimentOperation.@operator = Operator.ADD; experimentOperation.operand = experiment; try { // Add the experiment. ExperimentReturnValue experimentRetVal = experimentService.mutate( new ExperimentOperation[] {experimentOperation}); // Display the results. if (experimentRetVal != null && experimentRetVal.value != null && experimentRetVal.value. Length > 0) { long experimentId = 0; Experiment newExperiment = experimentRetVal.value[0]; Console.WriteLine("Experiment with name = \"{0}\" and id = \"{1}\" was added.\n", newExperiment.name, newExperiment.id); experimentId = newExperiment.id; // Set ad group for the experiment. AdGroup adGroup = new AdGroup(); adGroup.id = adGroupId; // Create experiment bid multiplier rule that will modify ad group bid // for the experiment. ManualCPCAdGroupExperimentBidMultipliers adGroupBidMultiplier = new ManualCPCAdGroupExperimentBidMultipliers(); adGroupBidMultiplier.maxCpcMultiplier = new BidMultiplier(); adGroupBidMultiplier.maxCpcMultiplier.multiplier = 1.5; // Set experiment data to the ad group. AdGroupExperimentData adGroupExperimentData = new AdGroupExperimentData(); adGroupExperimentData.experimentId = experimentId; adGroupExperimentData.experimentDeltaStatus = ExperimentDeltaStatus.MODIFIED; adGroupExperimentData.experimentBidMultipliers = adGroupBidMultiplier; adGroup.experimentData = adGroupExperimentData; // Create the operation. AdGroupOperation adGroupOperation = new AdGroupOperation(); adGroupOperation.operand = adGroup; adGroupOperation.@operator = Operator.SET; // Update the ad group. AdGroupReturnValue adGroupRetVal = adGroupService.mutate(new AdGroupOperation[] { adGroupOperation}); // Display the results. if (adGroupRetVal != null && adGroupRetVal.value != null && adGroupRetVal.value.Length > 0) { AdGroup updatedAdGroup = adGroupRetVal.value[0]; Console.WriteLine("Ad group with name = \"{0}\", id = \"{1}\" and status = \"{2}\" " + "was updated for the experiment.\n", updatedAdGroup.name, updatedAdGroup.id, updatedAdGroup.status); } else { Console.WriteLine("No ad groups were updated."); } // Set ad group criteria for the experiment. Criterion criterion = new Criterion(); criterion.id = criterionId; BiddableAdGroupCriterion adGroupCriterion = new BiddableAdGroupCriterion(); adGroupCriterion.adGroupId = adGroupId; adGroupCriterion.criterion = criterion; // Create experiment bid multiplier rule that will modify criterion bid // for the experiment. ManualCPCAdGroupCriterionExperimentBidMultiplier bidMultiplier = new ManualCPCAdGroupCriterionExperimentBidMultiplier(); bidMultiplier.maxCpcMultiplier = new BidMultiplier(); bidMultiplier.maxCpcMultiplier.multiplier = 1.5; // Set experiment data to the criterion. BiddableAdGroupCriterionExperimentData adGroupCriterionExperimentData = new BiddableAdGroupCriterionExperimentData(); adGroupCriterionExperimentData.experimentId = experimentId; adGroupCriterionExperimentData.experimentDeltaStatus = ExperimentDeltaStatus.MODIFIED; adGroupCriterionExperimentData.experimentBidMultiplier = bidMultiplier; adGroupCriterion.experimentData = adGroupCriterionExperimentData; // Create the operation. AdGroupCriterionOperation adGroupCriterionOperation = new AdGroupCriterionOperation(); adGroupCriterionOperation.operand = adGroupCriterion; adGroupCriterionOperation.@operator = Operator.SET; // Update the ad group criteria. AdGroupCriterionReturnValue adGroupCriterionRetVal = adGroupCriterionService.mutate( new AdGroupCriterionOperation[] {adGroupCriterionOperation}); // Display the results. if (adGroupCriterionRetVal != null && adGroupCriterionRetVal.value != null && adGroupCriterionRetVal.value.Length > 0) { AdGroupCriterion updatedAdGroupCriterion = adGroupCriterionRetVal.value[0]; Console.WriteLine("Ad group criterion with ad group id = \"{0}\", criterion id = " + "\"{1}\" and type = \"{2}\" was updated for the experiment.\n", updatedAdGroupCriterion.adGroupId, updatedAdGroupCriterion.criterion.id, updatedAdGroupCriterion.criterion.CriterionType); } else { Console.WriteLine("No ad group criteria were updated."); } } else { Console.WriteLine("No experiments were added."); } } catch (Exception e) { throw new System.ApplicationException("Failed to add experiment.", e); } }
public void SetInput(IHostEnvironment environment, Experiment experiment) { _dataView = GetDataView(environment); environment.CheckValue(_dataView, nameof(_dataView)); experiment.SetInput(_dataViewEntryPoint.Data, _dataView); }
public TwoD_DFTSolver(Experiment exp) : this(exp, Carrier.electron) { }
public static CommonOutputs.MacroOutput <Output> TrainTest( IHostEnvironment env, Arguments input, EntryPointNode node) { // Create default pipeline ID if one not given. input.PipelineId = input.PipelineId ?? Guid.NewGuid().ToString("N"); // Parse the subgraph. var subGraphRunContext = new RunContext(env); var subGraphNodes = EntryPointNode.ValidateNodes(env, subGraphRunContext, input.Nodes, node.Catalog); // Change the subgraph to use the training data as input. var varName = input.Inputs.Data.VarName; VariableBinding transformModelVarName = null; if (input.TransformModel != null) { transformModelVarName = node.GetInputVariable(nameof(input.TransformModel)); } if (!subGraphRunContext.TryGetVariable(varName, out var dataVariable)) { throw env.Except($"Invalid variable name '{varName}'."); } var trainingVar = node.GetInputVariable(nameof(input.TrainingData)); foreach (var subGraphNode in subGraphNodes) { subGraphNode.RenameInputVariable(dataVariable.Name, trainingVar); } subGraphRunContext.RemoveVariable(dataVariable); // Change the subgraph to use the model variable as output. varName = input.Outputs.PredictorModel == null ? input.Outputs.TransformModel.VarName : input.Outputs.PredictorModel.VarName; if (!subGraphRunContext.TryGetVariable(varName, out dataVariable)) { throw env.Except($"Invalid variable name '{varName}'."); } string outputVarName = input.Outputs.PredictorModel == null?node.GetOutputVariableName(nameof(Output.TransformModel)) : node.GetOutputVariableName(nameof(Output.PredictorModel)); foreach (var subGraphNode in subGraphNodes) { subGraphNode.RenameOutputVariable(dataVariable.Name, outputVarName); } subGraphRunContext.RemoveVariable(dataVariable); // Move the variables from the subcontext to the main context. node.Context.AddContextVariables(subGraphRunContext); // Change all the subgraph nodes to use the main context. foreach (var subGraphNode in subGraphNodes) { subGraphNode.SetContext(node.Context); } // Testing using test data set var testingVar = node.GetInputVariable(nameof(input.TestingData)); var exp = new Experiment(env); DatasetScorer.Output scoreNodeOutput = null; ML.Models.DatasetTransformer.Output datasetTransformNodeOutput = null; if (input.Outputs.PredictorModel == null) { //combine the predictor model with any potential transfrom model passed from the outer graph if (transformModelVarName != null && transformModelVarName.VariableName != null) { var modelCombine = new ML.Transforms.ModelCombiner { Models = new ArrayVar <ITransformModel>( new Var <ITransformModel>[] { new Var <ITransformModel> { VarName = transformModelVarName.VariableName }, new Var <ITransformModel> { VarName = outputVarName } } ) }; var modelCombineOutput = exp.Add(modelCombine); outputVarName = modelCombineOutput.OutputModel.VarName; } var datasetTransformerNode = new Models.DatasetTransformer { Data = { VarName = testingVar.ToJson() }, TransformModel = { VarName = outputVarName } }; datasetTransformNodeOutput = exp.Add(datasetTransformerNode); } else { //combine the predictor model with any potential transfrom model passed from the outer graph if (transformModelVarName != null && transformModelVarName.VariableName != null) { var modelCombine = new TwoHeterogeneousModelCombiner { TransformModel = { VarName = transformModelVarName.VariableName }, PredictorModel = { VarName = outputVarName } }; var modelCombineOutput = exp.Add(modelCombine); outputVarName = modelCombineOutput.PredictorModel.VarName; } // Add the scoring node for testing. var scoreNode = new DatasetScorer { Data = { VarName = testingVar.ToJson() }, PredictorModel = { VarName = outputVarName } }; scoreNodeOutput = exp.Add(scoreNode); } subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes(), node.Catalog)); // Do not double-add previous nodes. exp.Reset(); // REVIEW: we need to extract the proper label column name here to pass to the evaluators. // This is where you would add code to do it. var settings = new MacroUtils.EvaluatorSettings { LabelColumn = DefaultColumnNames.Label }; string outVariableName; if (input.IncludeTrainingMetrics) { DatasetScorer.Output scoreNodeTrainingOutput = null; ML.Models.DatasetTransformer.Output datasetTransformNodeTrainingOutput = null; if (input.Outputs.PredictorModel == null) { var datasetTransformerNode = new Models.DatasetTransformer { Data = { VarName = testingVar.ToJson() }, TransformModel = { VarName = outputVarName } }; datasetTransformNodeTrainingOutput = exp.Add(datasetTransformerNode); } else { // Add the scoring node for training. var scoreNodeTraining = new DatasetScorer { Data = { VarName = trainingVar.ToJson() }, PredictorModel = { VarName = outputVarName } }; scoreNodeTrainingOutput = exp.Add(scoreNodeTraining); } subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes(), node.Catalog)); // Do not double-add previous nodes. exp.Reset(); // Add the evaluator node for training. var evalInputOutputTraining = MacroUtils.GetEvaluatorInputOutput(input.Kind, settings); var evalNodeTraining = evalInputOutputTraining.Item1; var evalOutputTraining = evalInputOutputTraining.Item2; evalNodeTraining.Data.VarName = input.Outputs.PredictorModel == null ? datasetTransformNodeTrainingOutput.OutputData.VarName : scoreNodeTrainingOutput.ScoredData.VarName; if (node.OutputMap.TryGetValue(nameof(Output.TrainingWarnings), out outVariableName)) { evalOutputTraining.Warnings.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.TrainingOverallMetrics), out outVariableName)) { evalOutputTraining.OverallMetrics.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.TrainingPerInstanceMetrics), out outVariableName)) { evalOutputTraining.PerInstanceMetrics.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.TrainingConfusionMatrix), out outVariableName) && evalOutputTraining is CommonOutputs.IClassificationEvaluatorOutput eoTraining) { eoTraining.ConfusionMatrix.VarName = outVariableName; } exp.Add(evalNodeTraining, evalOutputTraining); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes(), node.Catalog)); } // Do not double-add previous nodes. exp.Reset(); // Add the evaluator node for testing. var evalInputOutput = MacroUtils.GetEvaluatorInputOutput(input.Kind, settings); var evalNode = evalInputOutput.Item1; var evalOutput = evalInputOutput.Item2; evalNode.Data.VarName = input.Outputs.PredictorModel == null ? datasetTransformNodeOutput.OutputData.VarName : scoreNodeOutput.ScoredData.VarName; if (node.OutputMap.TryGetValue(nameof(Output.Warnings), out outVariableName)) { evalOutput.Warnings.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.OverallMetrics), out outVariableName)) { evalOutput.OverallMetrics.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.PerInstanceMetrics), out outVariableName)) { evalOutput.PerInstanceMetrics.VarName = outVariableName; } if (node.OutputMap.TryGetValue(nameof(Output.ConfusionMatrix), out outVariableName) && evalOutput is CommonOutputs.IClassificationEvaluatorOutput eo) { eo.ConfusionMatrix.VarName = outVariableName; } exp.Add(evalNode, evalOutput); subGraphNodes.AddRange(EntryPointNode.ValidateNodes(env, node.Context, exp.GetNodes(), node.Catalog)); // Marks as an atomic unit that can be run in // a distributed fashion. foreach (var subGraphNode in subGraphNodes) { subGraphNode.StageId = input.PipelineId; } return(new CommonOutputs.MacroOutput <Output>() { Nodes = subGraphNodes }); }
//Methods public void Init() { Parameters inst = Parameters.Instance; entityManager = World.Active.GetOrCreateManager <EntityManager>(); city = new BioCity(); city.CellMeshes = new List <MeshInstanceRenderer>(); city.AgentMeshes = new List <MeshInstanceRenderer>(); city.CloudMeshes = new List <MeshInstanceRenderer>(); if (!inst.BioCloudsActive) { DeactivateBioclouds(); return; } city.BioEntityManager = entityManager; city.BioParameters = Object.FindObjectOfType <Parameters>(); var folder = System.Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments); var bioCloudsFolder = System.IO.Directory.CreateDirectory(folder + "\\VHLAB\\BioClouds"); var expFolder = System.IO.Directory.CreateDirectory(folder + "\\VHLAB\\BioClouds\\Experiments"); exp = LoadExperiment(city.BioParameters.ExperimentPath); r.InitState((uint)exp.SeedState); Debug.Log("domain: " + exp.Domain); city.BioParameters.DefaultDomainMinX = exp.Domain.minX; //0 city.BioParameters.DefaultDomainMinY = exp.Domain.minY; //1 city.BioParameters.DefaultDomainMaxX = exp.Domain.maxX; //2 city.BioParameters.DefaultDomainMaxY = exp.Domain.maxY; //3 densityQuad.transform.position = new Vector3((exp.Domain.minX + exp.Domain.maxX) / 2, (exp.Domain.minY + exp.Domain.maxY) / 2, 5); densityQuad.transform.localScale = new Vector3(exp.Domain.maxX - exp.Domain.minX, exp.Domain.maxY - exp.Domain.minY, 1); background.transform.position = new Vector3((exp.Domain.minX + exp.Domain.maxX) / 2, (exp.Domain.minY + exp.Domain.maxY) / 2, 10); background.transform.localScale = new Vector3(exp.Domain.maxX - exp.Domain.minX, exp.Domain.maxY - exp.Domain.minY, 1); collisionMask.transform.position = new Vector3((exp.Domain.minX + exp.Domain.maxX) / 2, (exp.Domain.minY + exp.Domain.maxY) / 2, 10); collisionMask.transform.localScale = new Vector3(exp.Domain.maxX - exp.Domain.minX, exp.Domain.maxY - exp.Domain.minY, 1.01f); inst.IDToRecord = exp.IDToRecord; float z = 0; Camera cam = mainCamera.GetComponent <Camera>(); if (cam.orthographic) { z = -15; } else { z = -((exp.Domain.maxX - exp.Domain.minX) / 2) / math.tan(math.radians(cam.fieldOfView / 2)); } mainCamera.transform.localPosition = new Vector3((exp.Domain.minX + exp.Domain.maxX) / 2, (exp.Domain.minY + exp.Domain.maxY) / 2, z); Parameters.Instance.MaxSimulationFrames = exp.FramesToRecord; GridConverter.Width = city.BioParameters.CellWidth; GridConverter.SetDomain(city.BioParameters.DefaultDomainMinX, city.BioParameters.DefaultDomainMinY, city.BioParameters.DefaultDomainMaxX, city.BioParameters.DefaultDomainMaxY); city.CloudArchetype = city.BioEntityManager.CreateArchetype(typeof(Position), typeof(Rotation), typeof(CloudData), typeof(CloudGoal), typeof(CloudMoveStep), typeof(SpawnedAgentsCounter), typeof(CloudSplitData)); city.CellArchetype = city.BioEntityManager.CreateArchetype(typeof(Position), typeof(Rotation), typeof(CellData)); foreach (MeshMaterial m in city.BioParameters.FixedParameters.CloudRendererData) { city.CloudMeshes.Add(new MeshInstanceRenderer() { mesh = m.mesh, material = m.mat }); } Texture2D noise = CreateNoiseTexture.GetNoiseTexture(512, 512, 1, new float2(0.0f, 0.0f)); noise.wrapMode = TextureWrapMode.Mirror; Texture2D density = new Texture2D(inst.Rows, inst.Cols); density.wrapMode = TextureWrapMode.Clamp; density.filterMode = FilterMode.Point; CloudHeatMap.DensityRenderer = densityQuad.GetComponent <MeshRenderer>(); CloudHeatMap.DensityRenderer.material.SetTexture("_DensityTex", density); CloudHeatMap.DensityRenderer.material.SetTexture("_NoiseTex", noise); CloudHeatMap.DensityRenderer.material.SetInt("_Rows", inst.Rows); CloudHeatMap.DensityRenderer.material.SetInt("_Cols", inst.Cols); CloudHeatMap.DensityRenderer.material.SetFloat("_CellWidth", inst.CellWidth); CloudHeatMap.DensityRenderer.material.SetTexture("_HeatMapScaleTex", inst.GetHeatScaleTexture()); if (!city.BioParameters.DrawCloudToMarkerLines) { World.Active.GetExistingManager <CloudCellDrawLineSystem>().Enabled = false; } if (!city.BioParameters.EnableRightPreference) { World.Active.GetExistingManager <CloudRightPreferenceSystem>().Enabled = false; } // Setup the Collision Mask GetComponent <CollisionMaskSettings>().Init(); }
public void PrepareAndRunBenchmarkExperiment() { Assert.Fail("Test temporarily broken. Ignored till contest feature is going to be revisited."); List <Benchmark> benchmarks = BenchmarkLoader.LoadBenchmarksInfo(BenchmarkDirectory); Benchmark testBenchmark = benchmarks[0]; // load the experiment to be run against benchmark string experimentFilename = System.IO.Path.Combine(AppContext.BaseTestDirectory, "experiment_to_be_benchmarked.gml"); Experiment experimentToBeBenchmarked = ExperimentManager.Load(experimentFilename, AppContext.Components); //prepare matching io testBenchmark.PrepareMatchingIOByType(experimentToBeBenchmarked); Assert.AreEqual(2, testBenchmark.BenchmarkInputSetting.Count); Assert.AreEqual(1, testBenchmark.BenchmarkOutputsSetting.Count); //match benchmarkSourceArtifact with original source artifacts foreach (BenchmarkItemSetting <IOItem> pair in testBenchmark.BenchmarkInputSetting) { IOItem item = pair.Item; ItemSettingCollection candidates = pair.CandidateSettings; if (item.MappedTo.Equals("benchmarkSourceArtifacts")) { //we found the item we want to remap pair.SelectedSetting = candidates["originalSourceArtifacts"]; } } //finally prepare benchmark experiment testBenchmark.PrepareBenchmarkExperiment(experimentToBeBenchmarked, AppContext.Components); //assert that only two inputs are included in the export settings and one output int includedInputs = 0; foreach (KeyValuePair <string, ItemSetting> pair in testBenchmark.Setup.InputSettings) { if (pair.Value.Include == true) { includedInputs++; } } Assert.AreEqual(2, includedInputs); int includedOutputs = 0; foreach (KeyValuePair <string, ItemSetting> pair in testBenchmark.Setup.OutputSettings) { if (pair.Value.Include == true) { includedOutputs++; } } Assert.AreEqual(1, includedOutputs); Assert.IsNotNull(testBenchmark.BenchmarkExperiment); //for debug output file // string path = System.IO.Path.Combine(AppContext.BaseTestDirectory, "benchmarkTest1.gml"); // AppContext.ExperimentManager.Save(testBenchmark.BenchmarkExperiment, path); MockProgress progress = new MockProgress(); using (var dispatcher = ExperimentRunnerHelper.CreateExperimentRunner(testBenchmark.BenchmarkExperiment, AppContext.WorkspaceInstance, AppContext.Components)) { dispatcher.ExecuteExperiment(progress); Assert.AreEqual(7, progress.NumSteps); Assert.IsFalse(progress.HasError); } }
private List<GameObject> exists; //food and the player that exist in the world void Start(){ exp = GameObject.FindGameObjectWithTag ("Experiment").GetComponent<Experiment> (); exists = new List<GameObject> (); exists.Add(player); }
public StartupViewModel(NavigationService navigation, Experiment exp = null) : base(navigation) { StartCommand = new DelegateCommand(Start); Experiment = exp ?? new Experiment(); }