private IDataAnalysisProblem ExportProblem(IDataAnalysisProblem source) { var preprocessedProblem = (IDataAnalysisProblem)source.Clone(); preprocessedProblem.ProblemDataParameter.ActualValue = CreateNewProblemData(); return(preprocessedProblem); }
private void openProblemButton_Click(object sender, EventArgs e) { openFileDialog.Title = "Open Problem"; if (openFileDialog.ShowDialog(this) == DialogResult.OK) { newProblemButton.Enabled = openProblemButton.Enabled = false; algorithmProblemViewHost.Enabled = false; ContentManager.LoadAsync(openFileDialog.FileName, delegate(IStorableContent content, Exception error) { try { if (error != null) { throw error; } IDataAnalysisProblem problem = content as IDataAnalysisProblem; if (problem == null && (Content.Algorithm.ProblemType.IsAssignableFrom(content.GetType()))) { Invoke(new Action(() => MessageBox.Show(this, "The selected file does not contain a DataAnalysisProblem problem.", "Invalid File", MessageBoxButtons.OK, MessageBoxIcon.Error))); } else { Content.Problem = problem; } } catch (Exception ex) { Invoke(new Action(() => ErrorHandling.ShowErrorDialog(this, ex))); } finally { Invoke(new Action(delegate() { algorithmProblemViewHost.Enabled = true; newProblemButton.Enabled = openProblemButton.Enabled = true; })); } }); } }
private void SetNewProblemData(IDataAnalysisProblem problem) { var data = creator.CreateProblemData(); problem.ProblemDataParameter.ActualValue = data; problem.Name = "Preprocessed " + problem.Name; }
public PreprocessingContext(IDataAnalysisProblemData dataAnalysisProblemData, IAlgorithm algorithm, IDataAnalysisProblem problem) { var transactionalPreprocessingData = new TransactionalPreprocessingData(dataAnalysisProblemData); Data = new FilteredPreprocessingData(transactionalPreprocessingData); ProblemData = dataAnalysisProblemData; Algorithm = algorithm; Problem = problem; creator = new ProblemDataCreator(this); }
private void algorithmProblemTabPage_DragDrop(object sender, DragEventArgs e) { if (e.Effect != DragDropEffects.None) { IDataAnalysisProblem problem = e.Data.GetData(HeuristicLab.Common.Constants.DragDropDataFormat) as IDataAnalysisProblem; if ((e.Effect & DragDropEffects.Copy) == DragDropEffects.Copy) { problem = (IDataAnalysisProblem)problem.Clone(); } Content.Problem = problem; } }
public PreprocessingContext(IDataAnalysisProblemData dataAnalysisProblemData, IAlgorithm algorithm, IDataAnalysisProblem problem) { var transactionalPreprocessingData = new TransactionalPreprocessingData(dataAnalysisProblemData); Data = new FilteredPreprocessingData(transactionalPreprocessingData); ProblemData = dataAnalysisProblemData; Algorithm = algorithm; Problem = problem; creator = new ProblemDataCreator(this); }
private void GetMostOuterContent(Control control, out IAlgorithm algorithm, out IDataAnalysisProblem problem) { algorithm = null; problem = null; while (control != null) { var contentView = control as IContentView; if (contentView != null) { var newAlgorithm = contentView.Content as IAlgorithm; if (newAlgorithm != null) algorithm = newAlgorithm; var newProblem = contentView.Content as IDataAnalysisProblem; if (newProblem != null) problem = newProblem; } control = control.Parent; } }
private void Algorithm_ProblemChanged(object sender, EventArgs e) { if (algorithm.Problem != null && !(algorithm.Problem is IDataAnalysisProblem)) { algorithm.Problem = problem; throw new ArgumentException("A cross validation algorithm can only contain DataAnalysisProblems."); } if (problem != null) { problem.Reset -= new EventHandler(Problem_Reset); } problem = (IDataAnalysisProblem)algorithm.Problem; if (problem != null) { problem.Reset += new EventHandler(Problem_Reset); } OnProblemChanged(); }
private void GetMostOuterContent(Control control, out IAlgorithm algorithm, out IDataAnalysisProblem problem) { algorithm = null; problem = null; while (control != null) { var contentView = control as IContentView; if (contentView != null) { var newAlgorithm = contentView.Content as IAlgorithm; if (newAlgorithm != null) { algorithm = newAlgorithm; } var newProblem = contentView.Content as IDataAnalysisProblem; if (newProblem != null) { problem = newProblem; } } control = control.Parent; } }
protected GaussianProcessBase(IDataAnalysisProblem problem) : base() { Problem = problem; Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName, "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true))); Parameters[ScaleInputValuesParameterName].Hidden = true; // necessary for BFGS Parameters.Add(new ValueParameter<BoolValue>("Maximization", new BoolValue(false))); Parameters["Maximization"].Hidden = true; var randomCreator = new HeuristicLab.Random.RandomCreator(); var gpInitializer = new GaussianProcessHyperparameterInitializer(); var bfgsInitializer = new LbfgsInitializer(); var makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); var modelCreator = new Placeholder(); var updateResults = new LbfgsUpdateResults(); var analyzer = new LbfgsAnalyzer(); var finalModelCreator = new Placeholder(); var finalAnalyzer = new LbfgsAnalyzer(); var solutionCreator = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = gpInitializer; gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName; gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; gpInitializer.HyperparameterParameter.ActualName = HyperparameterParameterName; gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name; gpInitializer.Successor = bfgsInitializer; bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName; bfgsInitializer.PointParameter.ActualName = HyperparameterParameterName; bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; bfgsInitializer.Successor = makeStep; makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; makeStep.PointParameter.ActualName = HyperparameterParameterName; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = modelCreator; branch.TrueBranch = finalModelCreator; modelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; modelCreator.Successor = updateResults; updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; updateResults.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; updateResults.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzer; analyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; analyzer.PointParameter.ActualName = HyperparameterParameterName; analyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; analyzer.PointsTableParameter.ActualName = "Hyperparameter table"; analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table"; analyzer.Successor = makeStep; finalModelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; finalModelCreator.Successor = finalAnalyzer; finalAnalyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; finalAnalyzer.PointParameter.ActualName = HyperparameterParameterName; finalAnalyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; finalAnalyzer.Successor = solutionCreator; solutionCreator.OperatorParameter.ActualName = SolutionCreatorParameterName; }
public void Start(CancellationToken cancellationToken) { lock (locker) { if (startPending) { return; } startPending = true; } try { if ((ExecutionState != ExecutionState.Prepared) && (ExecutionState != ExecutionState.Paused)) { throw new InvalidOperationException(string.Format("Start not allowed in execution state \"{0}\".", ExecutionState)); } seed = RandomSeedGenerator.GetSeed(); if (Algorithm == null) { return; } //create cloned algorithms if (clonedAlgorithms.Count == 0) { int testSamplesCount = (SamplesEnd.Value - SamplesStart.Value) / Folds.Value; IDataset shuffledDataset = null; for (int i = 0; i < Folds.Value; i++) { var cloner = new Cloner(); if (ShuffleSamples.Value) { var random = new FastRandom(seed); var dataAnalysisProblem = (IDataAnalysisProblem)algorithm.Problem; var dataset = (Dataset)dataAnalysisProblem.ProblemData.Dataset; shuffledDataset = shuffledDataset ?? dataset.Shuffle(random); cloner.RegisterClonedObject(dataset, shuffledDataset); } IAlgorithm clonedAlgorithm = cloner.Clone(Algorithm); clonedAlgorithm.Name = algorithm.Name + " Fold " + i; IDataAnalysisProblem problem = clonedAlgorithm.Problem as IDataAnalysisProblem; ISymbolicDataAnalysisProblem symbolicProblem = problem as ISymbolicDataAnalysisProblem; int testStart = (i * testSamplesCount) + SamplesStart.Value; int testEnd = (i + 1) == Folds.Value ? SamplesEnd.Value : (i + 1) * testSamplesCount + SamplesStart.Value; problem.ProblemData.TrainingPartition.Start = SamplesStart.Value; problem.ProblemData.TrainingPartition.End = SamplesEnd.Value; problem.ProblemData.TestPartition.Start = testStart; problem.ProblemData.TestPartition.End = testEnd; DataAnalysisProblemData problemData = problem.ProblemData as DataAnalysisProblemData; if (problemData != null) { problemData.TrainingPartitionParameter.Hidden = false; problemData.TestPartitionParameter.Hidden = false; } if (symbolicProblem != null) { symbolicProblem.FitnessCalculationPartition.Start = SamplesStart.Value; symbolicProblem.FitnessCalculationPartition.End = SamplesEnd.Value; } clonedAlgorithm.Prepare(); clonedAlgorithms.Add(clonedAlgorithm); } } OnStarted(); } finally { if (startPending) { startPending = false; } } availableWorkers = new SemaphoreSlim(NumberOfWorkers.Value, NumberOfWorkers.Value); allAlgorithmsFinished = new ManualResetEventSlim(false); var startedTasks = new List <Task>(clonedAlgorithms.Count); //start prepared or paused cloned algorithms foreach (IAlgorithm clonedAlgorithm in clonedAlgorithms) { if (pausePending || stopPending || ExecutionState != ExecutionState.Started) { break; } if (clonedAlgorithm.ExecutionState == ExecutionState.Prepared || clonedAlgorithm.ExecutionState == ExecutionState.Paused) { availableWorkers.Wait(); lock (locker) { if (pausePending || stopPending || ExecutionState != ExecutionState.Started) { break; } var task = clonedAlgorithm.StartAsync(cancellationToken); startedTasks.Add(task); } } } allAlgorithmsFinished.Wait(); Task.WaitAll(startedTasks.ToArray()); // to get exceptions not handled within the tasks }
public void Start() { if ((ExecutionState != ExecutionState.Prepared) && (ExecutionState != ExecutionState.Paused)) { throw new InvalidOperationException(string.Format("Start not allowed in execution state \"{0}\".", ExecutionState)); } if (Algorithm != null) { //create cloned algorithms if (clonedAlgorithms.Count == 0) { int testSamplesCount = (SamplesEnd.Value - SamplesStart.Value) / Folds.Value; for (int i = 0; i < Folds.Value; i++) { IAlgorithm clonedAlgorithm = (IAlgorithm)algorithm.Clone(); clonedAlgorithm.Name = algorithm.Name + " Fold " + i; IDataAnalysisProblem problem = clonedAlgorithm.Problem as IDataAnalysisProblem; ISymbolicDataAnalysisProblem symbolicProblem = problem as ISymbolicDataAnalysisProblem; int testStart = (i * testSamplesCount) + SamplesStart.Value; int testEnd = (i + 1) == Folds.Value ? SamplesEnd.Value : (i + 1) * testSamplesCount + SamplesStart.Value; problem.ProblemData.TrainingPartition.Start = SamplesStart.Value; problem.ProblemData.TrainingPartition.End = SamplesEnd.Value; problem.ProblemData.TestPartition.Start = testStart; problem.ProblemData.TestPartition.End = testEnd; DataAnalysisProblemData problemData = problem.ProblemData as DataAnalysisProblemData; if (problemData != null) { problemData.TrainingPartitionParameter.Hidden = false; problemData.TestPartitionParameter.Hidden = false; } if (symbolicProblem != null) { symbolicProblem.FitnessCalculationPartition.Start = SamplesStart.Value; symbolicProblem.FitnessCalculationPartition.End = SamplesEnd.Value; } clonedAlgorithm.Prepare(); clonedAlgorithms.Add(clonedAlgorithm); } } //start prepared or paused cloned algorithms int startedAlgorithms = 0; foreach (IAlgorithm clonedAlgorithm in clonedAlgorithms) { if (startedAlgorithms < NumberOfWorkers.Value) { if (clonedAlgorithm.ExecutionState == ExecutionState.Prepared || clonedAlgorithm.ExecutionState == ExecutionState.Paused) { // start and wait until the alg is started using (var signal = new ManualResetEvent(false)) { EventHandler signalSetter = (sender, args) => { signal.Set(); }; clonedAlgorithm.Started += signalSetter; clonedAlgorithm.Start(); signal.WaitOne(); clonedAlgorithm.Started -= signalSetter; startedAlgorithms++; } } } } OnStarted(); } }
private IDataAnalysisProblem ExportProblem(IDataAnalysisProblem source) { var preprocessedProblem = (IDataAnalysisProblem)source.Clone(); preprocessedProblem.ProblemDataParameter.ActualValue = CreateNewProblemData(); return preprocessedProblem; }
protected GaussianProcessBase(IDataAnalysisProblem problem) : base() { Problem = problem; Parameters.Add(new ValueParameter <IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); Parameters.Add(new ValueParameter <ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); Parameters.Add(new ValueParameter <IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); Parameters.Add(new ValueParameter <IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter <BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter <BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed Parameters.Add(new FixedValueParameter <BoolValue>(ScaleInputValuesParameterName, "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true))); Parameters[ScaleInputValuesParameterName].Hidden = true; // necessary for BFGS Parameters.Add(new FixedValueParameter <BoolValue>("Maximization (BFGS)", new BoolValue(false))); Parameters["Maximization (BFGS)"].Hidden = true; var randomCreator = new HeuristicLab.Random.RandomCreator(); var gpInitializer = new GaussianProcessHyperparameterInitializer(); var bfgsInitializer = new LbfgsInitializer(); var makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); var modelCreator = new Placeholder(); var updateResults = new LbfgsUpdateResults(); var analyzer = new LbfgsAnalyzer(); var finalModelCreator = new Placeholder(); var finalAnalyzer = new LbfgsAnalyzer(); var solutionCreator = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = gpInitializer; gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName; gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; gpInitializer.HyperparameterParameter.ActualName = HyperparameterParameterName; gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name; gpInitializer.Successor = bfgsInitializer; bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName; bfgsInitializer.PointParameter.ActualName = HyperparameterParameterName; bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; bfgsInitializer.Successor = makeStep; makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; makeStep.PointParameter.ActualName = HyperparameterParameterName; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = modelCreator; branch.TrueBranch = finalModelCreator; modelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; modelCreator.Successor = updateResults; updateResults.MaximizationParameter.ActualName = "Maximization (BFGS)"; updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; updateResults.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; updateResults.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzer; analyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; analyzer.PointParameter.ActualName = HyperparameterParameterName; analyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; analyzer.PointsTableParameter.ActualName = "Hyperparameter table"; analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table"; analyzer.Successor = makeStep; finalModelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; finalModelCreator.Successor = finalAnalyzer; finalAnalyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; finalAnalyzer.PointParameter.ActualName = HyperparameterParameterName; finalAnalyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; finalAnalyzer.Successor = solutionCreator; solutionCreator.OperatorParameter.ActualName = SolutionCreatorParameterName; }
private void SetNewProblemData(IDataAnalysisProblem problem) { var data = creator.CreateProblemData(); problem.ProblemDataParameter.ActualValue = data; problem.Name = "Preprocessed " + problem.Name; }