private LbfgsAlgorithm(LbfgsAlgorithm original, Cloner cloner) : base(original, cloner) { initializer = cloner.Clone(original.initializer); makeStep = cloner.Clone(original.makeStep); updateResults = cloner.Clone(original.updateResults); analyzer = cloner.Clone(original.analyzer); solutionCreator = cloner.Clone(original.solutionCreator); evaluator = cloner.Clone(original.evaluator); RegisterEvents(); }
protected GaussianProcessBase(IDataAnalysisProblem problem) : base() { Problem = problem; Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName, "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true))); Parameters[ScaleInputValuesParameterName].Hidden = true; // necessary for BFGS Parameters.Add(new ValueParameter<BoolValue>("Maximization", new BoolValue(false))); Parameters["Maximization"].Hidden = true; var randomCreator = new HeuristicLab.Random.RandomCreator(); var gpInitializer = new GaussianProcessHyperparameterInitializer(); var bfgsInitializer = new LbfgsInitializer(); var makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); var modelCreator = new Placeholder(); var updateResults = new LbfgsUpdateResults(); var analyzer = new LbfgsAnalyzer(); var finalModelCreator = new Placeholder(); var finalAnalyzer = new LbfgsAnalyzer(); var solutionCreator = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = gpInitializer; gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName; gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; gpInitializer.HyperparameterParameter.ActualName = HyperparameterParameterName; gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name; gpInitializer.Successor = bfgsInitializer; bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName; bfgsInitializer.PointParameter.ActualName = HyperparameterParameterName; bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; bfgsInitializer.Successor = makeStep; makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; makeStep.PointParameter.ActualName = HyperparameterParameterName; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = modelCreator; branch.TrueBranch = finalModelCreator; modelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; modelCreator.Successor = updateResults; updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; updateResults.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; updateResults.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzer; analyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; analyzer.PointParameter.ActualName = HyperparameterParameterName; analyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; analyzer.PointsTableParameter.ActualName = "Hyperparameter table"; analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table"; analyzer.Successor = makeStep; finalModelCreator.OperatorParameter.ActualName = ModelCreatorParameterName; finalModelCreator.Successor = finalAnalyzer; finalAnalyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName; finalAnalyzer.PointParameter.ActualName = HyperparameterParameterName; finalAnalyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName; finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; finalAnalyzer.Successor = solutionCreator; solutionCreator.OperatorParameter.ActualName = SolutionCreatorParameterName; }
public LbfgsAlgorithm() : base() { Parameters.Add(new ValueParameter <IMultiAnalyzer>(AnalyzerParameterName, "The analyzers that will be executed on the solution.", new MultiAnalyzer())); Parameters.Add(new ValueParameter <IntValue>(MaxIterationsParameterName, "The maximal number of iterations for.", new IntValue(20))); Parameters.Add(new ValueParameter <IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter <BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter <BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should be approximated.", new BoolValue(true))); Parameters.Add(new OptionalValueParameter <DoubleValue>(GradientCheckStepSizeParameterName, "Step size for the gradient check (should be used for debugging the gradient calculation only).")); // these parameter should not be changed usually Parameters[ApproximateGradientsParameterName].Hidden = true; Parameters[GradientCheckStepSizeParameterName].Hidden = true; var randomCreator = new RandomCreator(); solutionCreator = new Placeholder(); initializer = new LbfgsInitializer(); makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); evaluator = new Placeholder(); updateResults = new LbfgsUpdateResults(); var analyzerPlaceholder = new Placeholder(); var finalAnalyzerPlaceholder = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = solutionCreator; solutionCreator.Name = "(Solution Creator)"; solutionCreator.Successor = initializer; initializer.IterationsParameter.ActualName = MaxIterationsParameterName; initializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; initializer.Successor = makeStep; makeStep.StateParameter.ActualName = initializer.StateParameter.Name; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = evaluator; branch.TrueBranch = finalAnalyzerPlaceholder; evaluator.Name = "(Evaluator)"; evaluator.Successor = updateResults; updateResults.StateParameter.ActualName = initializer.StateParameter.Name; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzerPlaceholder; analyzerPlaceholder.Name = "(Analyzer)"; analyzerPlaceholder.OperatorParameter.ActualName = AnalyzerParameterName; analyzerPlaceholder.Successor = makeStep; finalAnalyzerPlaceholder.Name = "(Analyzer)"; finalAnalyzerPlaceholder.OperatorParameter.ActualName = AnalyzerParameterName; finalAnalyzerPlaceholder.Successor = null; analyzer = new LbfgsAnalyzer(); analyzer.StateParameter.ActualName = initializer.StateParameter.Name; }
private LbfgsMakeStep(LbfgsMakeStep original, Cloner cloner) : base(original, cloner) { }
public NcaAlgorithm() : base() { Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The seed of the random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "A boolean flag that indicates whether the seed should be randomly reset each time the algorithm is run.", new BoolValue(true))); Parameters.Add(new FixedValueParameter<IntValue>(KParameterName, "The K for the nearest neighbor.", new IntValue(3))); Parameters.Add(new FixedValueParameter<IntValue>(DimensionsParameterName, "The number of dimensions that NCA should reduce the data to.", new IntValue(2))); Parameters.Add(new ConstrainedValueParameter<INcaInitializer>(InitializationParameterName, "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate.")); Parameters.Add(new FixedValueParameter<IntValue>(NeighborSamplesParameterName, "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one will be used.", new IntValue(60))); Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50))); Parameters.Add(new FixedValueParameter<DoubleValue>(RegularizationParameterName, "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0))); Parameters.Add(new ValueParameter<INcaModelCreator>(NcaModelCreatorParameterName, "Creates an NCA model out of the matrix.", new NcaModelCreator())); Parameters.Add(new ValueParameter<INcaSolutionCreator>(NcaSolutionCreatorParameterName, "Creates an NCA solution given a model and some data.", new NcaSolutionCreator())); Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "True if the gradient should be approximated otherwise they are computed exactly.", new BoolValue())); NcaSolutionCreatorParameter.Hidden = true; ApproximateGradientsParameter.Hidden = true; INcaInitializer defaultInitializer = null; foreach (var initializer in ApplicationManager.Manager.GetInstances<INcaInitializer>().OrderBy(x => x.ItemName)) { if (initializer is LdaInitializer) defaultInitializer = initializer; InitializationParameter.ValidValues.Add(initializer); } if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer; var randomCreator = new RandomCreator(); var ncaInitializer = new Placeholder(); var bfgsInitializer = new LbfgsInitializer(); var makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); var gradientCalculator = new NcaGradientCalculator(); var modelCreator = new Placeholder(); var updateResults = new LbfgsUpdateResults(); var analyzer = new LbfgsAnalyzer(); var finalModelCreator = new Placeholder(); var finalAnalyzer = new LbfgsAnalyzer(); var solutionCreator = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = ncaInitializer; ncaInitializer.Name = "(NcaInitializer)"; ncaInitializer.OperatorParameter.ActualName = InitializationParameterName; ncaInitializer.Successor = bfgsInitializer; bfgsInitializer.IterationsParameter.ActualName = IterationsParameterName; bfgsInitializer.PointParameter.ActualName = NcaMatrixParameterName; bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; bfgsInitializer.Successor = makeStep; makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; makeStep.PointParameter.ActualName = NcaMatrixParameterName; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = gradientCalculator; branch.TrueBranch = finalModelCreator; gradientCalculator.Successor = modelCreator; modelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName; modelCreator.Successor = updateResults; updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; updateResults.QualityParameter.ActualName = QualityParameterName; updateResults.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzer; analyzer.QualityParameter.ActualName = QualityParameterName; analyzer.PointParameter.ActualName = NcaMatrixParameterName; analyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName; analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; analyzer.PointsTableParameter.ActualName = "Matrix table"; analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; analyzer.QualitiesTableParameter.ActualName = "Qualities"; analyzer.Successor = makeStep; finalModelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName; finalModelCreator.Successor = finalAnalyzer; finalAnalyzer.QualityParameter.ActualName = QualityParameterName; finalAnalyzer.PointParameter.ActualName = NcaMatrixParameterName; finalAnalyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName; finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; finalAnalyzer.Successor = solutionCreator; solutionCreator.OperatorParameter.ActualName = NcaSolutionCreatorParameterName; Problem = new ClassificationProblem(); }
public LbfgsAlgorithm() : base() { Parameters.Add(new ValueParameter<IMultiAnalyzer>(AnalyzerParameterName, "The analyzers that will be executed on the solution.", new MultiAnalyzer())); Parameters.Add(new ValueParameter<IntValue>(MaxIterationsParameterName, "The maximal number of iterations for.", new IntValue(20))); Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should be approximated.", new BoolValue(true))); Parameters.Add(new OptionalValueParameter<DoubleValue>(GradientCheckStepSizeParameterName, "Step size for the gradient check (should be used for debugging the gradient calculation only).")); // these parameter should not be changed usually Parameters[ApproximateGradientsParameterName].Hidden = true; Parameters[GradientCheckStepSizeParameterName].Hidden = true; var randomCreator = new RandomCreator(); solutionCreator = new Placeholder(); initializer = new LbfgsInitializer(); makeStep = new LbfgsMakeStep(); var branch = new ConditionalBranch(); evaluator = new Placeholder(); updateResults = new LbfgsUpdateResults(); var analyzerPlaceholder = new Placeholder(); var finalAnalyzerPlaceholder = new Placeholder(); OperatorGraph.InitialOperator = randomCreator; randomCreator.SeedParameter.ActualName = SeedParameterName; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; randomCreator.SetSeedRandomlyParameter.Value = null; randomCreator.Successor = solutionCreator; solutionCreator.Name = "(Solution Creator)"; solutionCreator.Successor = initializer; initializer.IterationsParameter.ActualName = MaxIterationsParameterName; initializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; initializer.Successor = makeStep; makeStep.StateParameter.ActualName = initializer.StateParameter.Name; makeStep.Successor = branch; branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; branch.FalseBranch = evaluator; branch.TrueBranch = finalAnalyzerPlaceholder; evaluator.Name = "(Evaluator)"; evaluator.Successor = updateResults; updateResults.StateParameter.ActualName = initializer.StateParameter.Name; updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; updateResults.Successor = analyzerPlaceholder; analyzerPlaceholder.Name = "(Analyzer)"; analyzerPlaceholder.OperatorParameter.ActualName = AnalyzerParameterName; analyzerPlaceholder.Successor = makeStep; finalAnalyzerPlaceholder.Name = "(Analyzer)"; finalAnalyzerPlaceholder.OperatorParameter.ActualName = AnalyzerParameterName; finalAnalyzerPlaceholder.Successor = null; analyzer = new LbfgsAnalyzer(); analyzer.StateParameter.ActualName = initializer.StateParameter.Name; }