コード例 #1
0
 protected NcaGradientCalculator(NcaGradientCalculator original, Cloner cloner) : base(original, cloner) { }
コード例 #2
0
ファイル: NcaAlgorithm.cs プロジェクト: t-h-e/HeuristicLab
    public NcaAlgorithm()
      : base() {
      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The seed of the random number generator.", new IntValue(0)));
      Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "A boolean flag that indicates whether the seed should be randomly reset each time the algorithm is run.", new BoolValue(true)));
      Parameters.Add(new FixedValueParameter<IntValue>(KParameterName, "The K for the nearest neighbor.", new IntValue(3)));
      Parameters.Add(new FixedValueParameter<IntValue>(DimensionsParameterName, "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
      Parameters.Add(new ConstrainedValueParameter<INcaInitializer>(InitializationParameterName, "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
      Parameters.Add(new FixedValueParameter<IntValue>(NeighborSamplesParameterName, "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one will be used.", new IntValue(60)));
      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
      Parameters.Add(new FixedValueParameter<DoubleValue>(RegularizationParameterName, "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
      Parameters.Add(new ValueParameter<INcaModelCreator>(NcaModelCreatorParameterName, "Creates an NCA model out of the matrix.", new NcaModelCreator()));
      Parameters.Add(new ValueParameter<INcaSolutionCreator>(NcaSolutionCreatorParameterName, "Creates an NCA solution given a model and some data.", new NcaSolutionCreator()));
      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "True if the gradient should be approximated otherwise they are computed exactly.", new BoolValue()));

      NcaSolutionCreatorParameter.Hidden = true;
      ApproximateGradientsParameter.Hidden = true;

      INcaInitializer defaultInitializer = null;
      foreach (var initializer in ApplicationManager.Manager.GetInstances<INcaInitializer>().OrderBy(x => x.ItemName)) {
        if (initializer is LdaInitializer) defaultInitializer = initializer;
        InitializationParameter.ValidValues.Add(initializer);
      }
      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;

      var randomCreator = new RandomCreator();
      var ncaInitializer = new Placeholder();
      var bfgsInitializer = new LbfgsInitializer();
      var makeStep = new LbfgsMakeStep();
      var branch = new ConditionalBranch();
      var gradientCalculator = new NcaGradientCalculator();
      var modelCreator = new Placeholder();
      var updateResults = new LbfgsUpdateResults();
      var analyzer = new LbfgsAnalyzer();
      var finalModelCreator = new Placeholder();
      var finalAnalyzer = new LbfgsAnalyzer();
      var solutionCreator = new Placeholder();

      OperatorGraph.InitialOperator = randomCreator;
      randomCreator.SeedParameter.ActualName = SeedParameterName;
      randomCreator.SeedParameter.Value = null;
      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
      randomCreator.SetSeedRandomlyParameter.Value = null;
      randomCreator.Successor = ncaInitializer;

      ncaInitializer.Name = "(NcaInitializer)";
      ncaInitializer.OperatorParameter.ActualName = InitializationParameterName;
      ncaInitializer.Successor = bfgsInitializer;

      bfgsInitializer.IterationsParameter.ActualName = IterationsParameterName;
      bfgsInitializer.PointParameter.ActualName = NcaMatrixParameterName;
      bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
      bfgsInitializer.Successor = makeStep;

      makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
      makeStep.PointParameter.ActualName = NcaMatrixParameterName;
      makeStep.Successor = branch;

      branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
      branch.FalseBranch = gradientCalculator;
      branch.TrueBranch = finalModelCreator;

      gradientCalculator.Successor = modelCreator;

      modelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
      modelCreator.Successor = updateResults;

      updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
      updateResults.QualityParameter.ActualName = QualityParameterName;
      updateResults.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
      updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
      updateResults.Successor = analyzer;

      analyzer.QualityParameter.ActualName = QualityParameterName;
      analyzer.PointParameter.ActualName = NcaMatrixParameterName;
      analyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
      analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
      analyzer.PointsTableParameter.ActualName = "Matrix table";
      analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
      analyzer.QualitiesTableParameter.ActualName = "Qualities";
      analyzer.Successor = makeStep;

      finalModelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
      finalModelCreator.Successor = finalAnalyzer;

      finalAnalyzer.QualityParameter.ActualName = QualityParameterName;
      finalAnalyzer.PointParameter.ActualName = NcaMatrixParameterName;
      finalAnalyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
      finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
      finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
      finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
      finalAnalyzer.Successor = solutionCreator;

      solutionCreator.OperatorParameter.ActualName = NcaSolutionCreatorParameterName;

      Problem = new ClassificationProblem();
    }
コード例 #3
0
        public NcaAlgorithm()
            : base()
        {
            Parameters.Add(new ValueParameter <IntValue>(SeedParameterName, "The seed of the random number generator.", new IntValue(0)));
            Parameters.Add(new ValueParameter <BoolValue>(SetSeedRandomlyParameterName, "A boolean flag that indicates whether the seed should be randomly reset each time the algorithm is run.", new BoolValue(true)));
            Parameters.Add(new FixedValueParameter <IntValue>(KParameterName, "The K for the nearest neighbor.", new IntValue(3)));
            Parameters.Add(new FixedValueParameter <IntValue>(DimensionsParameterName, "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
            Parameters.Add(new ConstrainedValueParameter <INcaInitializer>(InitializationParameterName, "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
            Parameters.Add(new FixedValueParameter <IntValue>(NeighborSamplesParameterName, "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one will be used.", new IntValue(60)));
            Parameters.Add(new FixedValueParameter <IntValue>(IterationsParameterName, "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
            Parameters.Add(new FixedValueParameter <DoubleValue>(RegularizationParameterName, "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
            Parameters.Add(new ValueParameter <INcaModelCreator>(NcaModelCreatorParameterName, "Creates an NCA model out of the matrix.", new NcaModelCreator()));
            Parameters.Add(new ValueParameter <INcaSolutionCreator>(NcaSolutionCreatorParameterName, "Creates an NCA solution given a model and some data.", new NcaSolutionCreator()));
            Parameters.Add(new ValueParameter <BoolValue>(ApproximateGradientsParameterName, "True if the gradient should be approximated otherwise they are computed exactly.", new BoolValue()));

            NcaSolutionCreatorParameter.Hidden   = true;
            ApproximateGradientsParameter.Hidden = true;

            INcaInitializer defaultInitializer = null;

            foreach (var initializer in ApplicationManager.Manager.GetInstances <INcaInitializer>().OrderBy(x => x.ItemName))
            {
                if (initializer is LdaInitializer)
                {
                    defaultInitializer = initializer;
                }
                InitializationParameter.ValidValues.Add(initializer);
            }
            if (defaultInitializer != null)
            {
                InitializationParameter.Value = defaultInitializer;
            }

            var randomCreator      = new RandomCreator();
            var ncaInitializer     = new Placeholder();
            var bfgsInitializer    = new LbfgsInitializer();
            var makeStep           = new LbfgsMakeStep();
            var branch             = new ConditionalBranch();
            var gradientCalculator = new NcaGradientCalculator();
            var modelCreator       = new Placeholder();
            var updateResults      = new LbfgsUpdateResults();
            var analyzer           = new LbfgsAnalyzer();
            var finalModelCreator  = new Placeholder();
            var finalAnalyzer      = new LbfgsAnalyzer();
            var solutionCreator    = new Placeholder();

            OperatorGraph.InitialOperator                     = randomCreator;
            randomCreator.SeedParameter.ActualName            = SeedParameterName;
            randomCreator.SeedParameter.Value                 = null;
            randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
            randomCreator.SetSeedRandomlyParameter.Value      = null;
            randomCreator.Successor = ncaInitializer;

            ncaInitializer.Name = "(NcaInitializer)";
            ncaInitializer.OperatorParameter.ActualName = InitializationParameterName;
            ncaInitializer.Successor = bfgsInitializer;

            bfgsInitializer.IterationsParameter.ActualName           = IterationsParameterName;
            bfgsInitializer.PointParameter.ActualName                = NcaMatrixParameterName;
            bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
            bfgsInitializer.Successor = makeStep;

            makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
            makeStep.PointParameter.ActualName = NcaMatrixParameterName;
            makeStep.Successor = branch;

            branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
            branch.FalseBranch = gradientCalculator;
            branch.TrueBranch  = finalModelCreator;

            gradientCalculator.Successor = modelCreator;

            modelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
            modelCreator.Successor = updateResults;

            updateResults.StateParameter.ActualName                = bfgsInitializer.StateParameter.Name;
            updateResults.QualityParameter.ActualName              = QualityParameterName;
            updateResults.QualityGradientsParameter.ActualName     = NcaMatrixGradientsParameterName;
            updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
            updateResults.Successor = analyzer;

            analyzer.QualityParameter.ActualName               = QualityParameterName;
            analyzer.PointParameter.ActualName                 = NcaMatrixParameterName;
            analyzer.QualityGradientsParameter.ActualName      = NcaMatrixGradientsParameterName;
            analyzer.StateParameter.ActualName                 = bfgsInitializer.StateParameter.Name;
            analyzer.PointsTableParameter.ActualName           = "Matrix table";
            analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
            analyzer.QualitiesTableParameter.ActualName        = "Qualities";
            analyzer.Successor = makeStep;

            finalModelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
            finalModelCreator.Successor = finalAnalyzer;

            finalAnalyzer.QualityParameter.ActualName               = QualityParameterName;
            finalAnalyzer.PointParameter.ActualName                 = NcaMatrixParameterName;
            finalAnalyzer.QualityGradientsParameter.ActualName      = NcaMatrixGradientsParameterName;
            finalAnalyzer.PointsTableParameter.ActualName           = analyzer.PointsTableParameter.ActualName;
            finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
            finalAnalyzer.QualitiesTableParameter.ActualName        = analyzer.QualitiesTableParameter.ActualName;
            finalAnalyzer.Successor = solutionCreator;

            solutionCreator.OperatorParameter.ActualName = NcaSolutionCreatorParameterName;

            Problem = new ClassificationProblem();
        }
コード例 #4
0
 protected NcaGradientCalculator(NcaGradientCalculator original, Cloner cloner) : base(original, cloner)
 {
 }