コード例 #1
0
ファイル: GradientDescent.cs プロジェクト: ChewyMoon/Cupcake
        /// <summary>
        ///     Performs gradient descent to optomise theta parameters.
        /// </summary>
        /// <param name="theta">Initial Theta (Zeros)</param>
        /// <param name="x">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="maxIterations">Maximum number of iterations to run gradient descent</param>
        /// <param name="learningRateAlpha">The learning rate (Alpha)</param>
        /// <param name="costFunction">Cost function to use for gradient descent</param>
        /// <param name="lambda">The regularization constant to apply</param>
        /// <param name="regularizer">The regularization function to apply</param>
        /// <returns></returns>
        public static Tuple<double, Vector> Run(
            Vector theta, 
            Matrix x, 
            Vector y, 
            int maxIterations, 
            double learningRateAlpha, 
            ICostFunction costFunction, 
            double lambda, 
            IRegularizer regularizer)
        {
            var bestTheta = theta.Copy();
            var bestCost = double.PositiveInfinity;

            double currentCost = 0;
            var currentGradient = theta.Copy();

            for (var i = 0; i <= maxIterations; i++)
            {
                currentCost = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer);
                currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer);

                if (currentCost < bestCost)
                {
                    bestTheta = bestTheta - learningRateAlpha * currentGradient;
                    bestCost = currentCost;
                }
                else
                {
                    learningRateAlpha = learningRateAlpha * 0.99;
                }
            }

            return new Tuple<double, Vector>(bestCost, bestTheta);
        }
コード例 #2
0
        /// <summary>
        /// initilazies a new instance of the <see cref="BaseOptimizer"/> class.
        /// </summary>
        /// <param name="neuralNetwork">The neural network.</param>
        /// <param name="costFunction">The cost function.</param>
        protected BaseOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction)
        {
            Contracts.ValueNotNull(neuralNetwork, nameof(neuralNetwork));
            Contracts.ValueNotNull(costFunction, nameof(costFunction));

            _neuralNetwork = neuralNetwork;
            _costFunction  = costFunction;
        }
コード例 #3
0
ファイル: BruteForceLineSearch.cs プロジェクト: olesar/Altaxo
        public BruteForceLineSearch(ICostFunction cost)
        {
            costFunction_ = cost;
            endCriteria_  = new EndCriteria();

            _numberOfInitialDivisions    = 128;
            _numberOfSubsequentDivisions = 2;
            _divisionDepth = 32;
        }
コード例 #4
0
        private string GetCostFunctionName(ICostFunction costFunction)
        {
            if (costFunction is QuadraticCost)
            {
                return("QuadraticCost");
            }

            throw new ArgumentException($"{costFunction} is not a known name for a cost function");
        }
コード例 #5
0
ファイル: BruteForceLineSearch.cs プロジェクト: Altaxo/Altaxo
		public BruteForceLineSearch(ICostFunction cost)
		{
			this.costFunction_ = cost;
			this.endCriteria_ = new EndCriteria();

			_numberOfInitialDivisions = 128;
			_numberOfSubsequentDivisions = 2;
			_divisionDepth = 32;
		}
コード例 #6
0
 public SgdTrainer(int epochs, int minibatch, double η, ICostFunction costFunction, ILogger logger, int?randomSeed)
 {
     _epochs       = epochs;
     _minibatch    = minibatch;
     _η            = η;
     _costFunction = costFunction;
     _logger       = logger;
     _randomSeed   = randomSeed;
 }
コード例 #7
0
        /// <summary>
        /// initilazies a new instance of the <see cref="MomentumOptimizer"/> class.
        /// </summary>
        /// <param name="neuralNetwork">The neural network.</param>
        /// <param name="costFunction">The cost function.</param>
        /// <param name="momentum">The momentum value.</param>
        public MomentumOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction, double momentum) : base(neuralNetwork, costFunction)
        {
            Contracts.ValueGreaterThanZero(momentum, nameof(momentum));
            Contracts.ValueNotNull(neuralNetwork, nameof(neuralNetwork));

            _momentum = momentum;

            InitializeVelocities(neuralNetwork);
        }
コード例 #8
0
 public StochasticGradientDescent(IActivationFunction hiddenActivationFunction, IActivationFunction outputActivationFunction, ICostFunction costFunction,
                                  RegularizationType regularizationType, IRegularizationStrategyFactory regularizationStrategyFactory)
 {
     _hiddenActivationFunction      = hiddenActivationFunction;
     _outputActivationFunction      = outputActivationFunction;
     _costFunction                  = costFunction;
     _regularizationType            = regularizationType;
     _regularizationStrategyFactory = regularizationStrategyFactory; //would normally solve this with IOC
 }
コード例 #9
0
 public SearchPtypeBuilder(string model, ICostFunction costFunc, IPartGetter partGetter,
                           IConstraintGetter constraintGetter, IPartOrderSelecter orderSelecter, IPtypeFromAssignment constructor)
 {
     _model            = model;
     _costFunction     = costFunc;
     _constraintGetter = constraintGetter;
     _partGetter       = partGetter;
     _orderSelecter    = orderSelecter;
     _constructor      = constructor;
 }
コード例 #10
0
 public SearchPtypeBuilder(string model, ICostFunction costFunc, IPartGetter partGetter, 
     IConstraintGetter constraintGetter, IPartOrderSelecter orderSelecter, IPtypeFromAssignment constructor)
 {
     _model = model;
     _costFunction = costFunc;
     _constraintGetter = constraintGetter;
     _partGetter = partGetter;
     _orderSelecter = orderSelecter;
     _constructor = constructor;
 }
コード例 #11
0
        private Network(ICostFunction costFunction,
                        IRegularizationFunction regularizationFunction,
                        DropoutLayerOptions dropoutLayerOptions,
                        Random rand)
        {
            HiddenLayers = new List <HiddenLayer>();

            _costFunction           = costFunction;
            _regularizationFunction = regularizationFunction;
            _dropoutLayerOptions    = dropoutLayerOptions;

            NetworkRandom = rand;
        }
コード例 #12
0
        public NeuralNetwork Deserialize(string json)
        {
            var networkJson = JsonConvert.DeserializeObject <NetworkJson>(json);
            var layers      = networkJson.Layers
                              .Select(layerJson => new Layer(layerJson.Weights, layerJson.Biases, GetActivation(layerJson.Activation)))
                              .ToList();

            ConnectLayers(layers);

            ICostFunction costFunction = GetCostFunction(networkJson.CostFunction);

            return(new NeuralNetwork(layers, costFunction));
        }
コード例 #13
0
ファイル: ANet.cs プロジェクト: kpwelsh/NeuralNet
        private void SetCostFunction(CostFunction costFunc)
        {
            switch (costFunc)
            {
            case CostFunction.MeanSquare:
                CostFunc = new MeanSquare();
                break;

            case CostFunction.CrossEntropy:
                CostFunc = new CrossEntropy();
                break;
            }
        }
コード例 #14
0
        public StochasticGradientDescent(ICostFunction costFunction, ILayer[] layers, int epochs, int trainingBatchSize, double learningRate, double regularizationParam)
        {
            if (learningRate <= 0)
            {
                throw new ArgumentException("learningRate must be greater than zero");
            }

            if (trainingBatchSize <= 0)
            {
                throw new ArgumentException("trainingBatchSize must be greater than zero");
            }

            if (epochs <= 0)
            {
                throw new ArgumentException("epochs must be greater than zero");
            }

            if (regularizationParam < 0)
            {
                throw new ArgumentException("regularizationRate must be greater or equal to zero");
            }

            if (layers.Length < 2)
            {
                throw new NeuralNetworksException("At least two layers are mandatory");
            }

            _costFunction        = costFunction;
            _layers              = layers;
            _epochs              = epochs;
            _trainingBatchSize   = trainingBatchSize;
            _learningRate        = learningRate;
            _regularizationParam = regularizationParam;

            _neuronsBiases               = layers.Select(s => s.NeuronsBiases).ToArray();
            _synapsesWeights             = layers.Select(s => s.SynapsesWeights).ToArray();
            _neuronsGradients            = _neuronsBiases.CopyWithZeros();
            _synapsesGradients           = _synapsesWeights.CopyWithZeros();
            _neuronsAdittionsToGradients = _neuronsBiases.CopyWithZeros();
        }
コード例 #15
0
ファイル: NelderMead.cs プロジェクト: Altaxo/Altaxo
		public NelderMead(ICostFunction costfunction, EndCriteria endcriteria)
		{
			this.costFunction_ = costfunction;
			this.endCriteria_ = endcriteria;
		}
コード例 #16
0
 /// <summary>
 /// Initializes a new instance of the <see cref="GradientDescentOptimizer"/> class.
 /// </summary>
 /// <param name="neuralNetwork">The neural network.</param>
 /// <param name="costFunction">The cost function.</param>
 public GradientDescentOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction)
     : base(neuralNetwork, costFunction)
 {
 }
コード例 #17
0
ファイル: NelderMead.cs プロジェクト: Altaxo/Altaxo
		///<summary>Default constructor for simplex method</summary>
		public NelderMead(ICostFunction costfunction)
			: this(costfunction, new EndCriteria()) { }
コード例 #18
0
 public NeuralNetworkBuilder Using(ICostFunction costFunction)
 {
     this.costFunction = costFunction;
     return(this);
 }
コード例 #19
0
 /// <summary>
 /// Gets the φ(α) function.
 /// </summary>
 /// <param name="function">The function.</param>
 /// <param name="location">The location.</param>
 /// <param name="direction">The direction.</param>
 /// <returns>Func&lt;System.Double, System.Double&gt;.</returns>
 private static Func <double, double> Getφ([NotNull] ICostFunction <double> function, [NotNull] Vector <double> location, [NotNull] Vector <double> direction)
 {
     return(alpha => function.CalculateCost(location + alpha * direction));
 }
コード例 #20
0
        public INetwork LoadNetwork(string xmlFilePath, IDataSet dataset)
        {
            _xmlDoc = new XmlDocument();

            _filepath = xmlFilePath;

            _xmlDoc.Load(_filepath);


            //Create Training Params---------------------8::::::::::::::>----------------------------------------


            TrainingParameters trainingParams = new TrainingParameters()
            {
                epochs = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/Epochs")),

                learningRate = Convert.ToDouble(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/LearningRate")),

                momentum = Convert.ToDouble(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/Momentum"))
            };


            //Create LayerStructure---------------------8::::::::::::::>----------------------------------------


            List <int> hiddenLayersList = new List <int>();

            int numberOfHiddenLayers = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfHiddenLayers"));

            for (int i = 1; i <= numberOfHiddenLayers; i++)
            {
                int numberOfNeuronsInHiddenLayer = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfNeuronsInHiddenLayer" + i.ToString()));

                hiddenLayersList.Add(numberOfNeuronsInHiddenLayer);
            }

            LayerStructure layerStructure = new LayerStructure()
            {
                numberOfInputNodes = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfInputNeurons")),

                numberOfOutputNodes = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfOutputNeurons")),

                HiddenLayerList = hiddenLayersList
            };


            //Create Distribution Type---------------------8::::::::::::::>----------------------------------------


            string distribution = GetXmlValue("NeuralNetwork/Settings/DistributionType/InitialRandomDistributionType");

            var distributionType = Type.GetType(distribution + ",NeuralNetwork.Core");

            IInitialRandomDistributionType initialDistribution = Activator.CreateInstance(distributionType) as IInitialRandomDistributionType;


            //Create Strategy---------------------8::::::::::::::>----------------------------------------


            string hiddenFunction = GetXmlValue("NeuralNetwork/Settings/Strategy/HiddenLayerActivationFunction");

            string outputFunction = GetXmlValue("NeuralNetwork/Settings/Strategy/OutputLayerActivationFunction");

            string costFunc = GetXmlValue("NeuralNetwork/Settings/Strategy/CostFunction");

            string regularizationEnum = GetXmlValue("NeuralNetwork/Settings/Strategy/RegularizationType");

            string regularizationStrategyFactory = GetXmlValue("NeuralNetwork/Settings/Strategy/RegularizationStrategyFactory");

            var hiddenFunctionType = Type.GetType(hiddenFunction + ",NeuralNetwork.Core");

            var outputFunctionType = Type.GetType(outputFunction + ",NeuralNetwork.Core");

            var costFunctionType = Type.GetType(costFunc + ",NeuralNetwork.Core");

            var regularizationStrategyFactoryType = Type.GetType(regularizationStrategyFactory + ",NeuralNetwork.Core");

            IActivationFunction hiddenActivationFunction = Activator.CreateInstance(hiddenFunctionType) as IActivationFunction;

            IActivationFunction outputActivationFunction = Activator.CreateInstance(outputFunctionType) as IActivationFunction;

            ICostFunction costFunction = Activator.CreateInstance(costFunctionType) as ICostFunction;

            RegularizationType regularizationType = (RegularizationType)Enum.Parse(typeof(RegularizationType), regularizationEnum);

            IRegularizationStrategyFactory regularizationStrategyFact = Activator.CreateInstance(regularizationStrategyFactoryType) as IRegularizationStrategyFactory;

            string optimizationStrategy = GetXmlValue("NeuralNetwork/Settings/Strategy/OptimizationStrategy");

            var optStrategy = Type.GetType(optimizationStrategy + ",NeuralNetwork.Core");

            IOptimizationStrategy strategy = Activator.CreateInstance(optStrategy, hiddenActivationFunction, outputActivationFunction, costFunction, regularizationType, regularizationStrategyFact) as IOptimizationStrategy;


            //Create Network---------------------8::::::::::::::>----------------------------------------


            network = new ArtificialNeuralNetwork(layerStructure, trainingParams, dataset, strategy, initialDistribution);


            //Set Weights---------------------8::::::::::::::>----------------------------------------


            network.Weights.Clear();

            LoadInputWeights();

            LoadHiddenWeights();


            //Set Biases---------------------8::::::::::::::>----------------------------------------


            network.Biases.Clear();

            LoadHiddenBiases();

            LoadOutputBiases();


            //Clear XMLDoc---------------------8::::::::::::::>----------------------------------------


            _xmlDoc = null;

            return(network);
        }
コード例 #21
0
ファイル: OptimizationMethod.cs プロジェクト: lulzzz/sito
 /// <summary>
 /// Update and return the Gradient.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Vector</returns>
 public virtual Vector UpdateGradient(ICostFunction costFunction, OptimizerProperties properties)
 {
     return(costFunction.ComputeGradient(properties.Theta));
 }
コード例 #22
0
ファイル: OptimizationMethod.cs プロジェクト: sethjuarez/numl
 /// <summary>
 /// Update and return the Gradient.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Vector</returns>
 public virtual Vector UpdateGradient(ICostFunction costFunction, OptimizerProperties properties)
 {
     return costFunction.ComputeGradient(properties.Theta);
 }
コード例 #23
0
 protected override void OnPreBuild()
 {
     layoutProvider = layoutProvider ?? A.Fake <INetworkLayoutProvider>();
     costFunction   = costFunction ?? new QuadraticCost();
 }
コード例 #24
0
 public PlanetColonizerRoutingAlgorithm(SlaveConfig config, ICostFunction costfunc)
 {
     this.configuration = config;
     this.CostFunction  = costfunc;
 }
コード例 #25
0
ファイル: OptimizationMethod.cs プロジェクト: sethjuarez/numl
 /// <summary>
 /// Update and return the Cost.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Double</returns>
 public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties)
 {
     return costFunction.ComputeCost(properties.Theta);
 }
コード例 #26
0
ファイル: StupidLineSearch.cs プロジェクト: Altaxo/Altaxo
		public StupidLineSearch(ICostFunction cost)
		{
			this.costFunction_ = cost;
			this.endCriteria_ = new EndCriteria();
		}
コード例 #27
0
 /// <summary>
 /// Instantiates a new neural network with the layout provided by the specified <paramref name="layoutProvider"/>.
 /// </summary>
 /// <param name="layoutProvider">Provides the layout of the network</param>
 public NeuralNetwork(INetworkLayoutProvider layoutProvider, ICostFunction costFunction)
     : this(layoutProvider.GetLayers(), costFunction)
 {
 }
コード例 #28
0
 public StupidLineSearch(ICostFunction cost)
 {
     costFunction_ = cost;
     endCriteria_  = new EndCriteria();
 }
コード例 #29
0
 ///<summary>Default constructor for simplex method</summary>
 public NelderMead(ICostFunction costfunction)
     : this(costfunction, new EndCriteria())
 {
 }
コード例 #30
0
ファイル: MinMax.cs プロジェクト: peter-popov/xomango
 public MinMax(ICostFunction<GameState> costFunction, ITurnsGenerator<GameState> turnsGenerator, int level)
 {
     maxDepth = level * 2 + 1;
     this.costFunction = costFunction;
     this.turnsGenerator = turnsGenerator;
 }
コード例 #31
0
 public NelderMead(ICostFunction costfunction, EndCriteria endcriteria)
 {
     this.costFunction_ = costfunction;
     this.endCriteria_  = endcriteria;
 }
コード例 #32
0
 public Trainer(Net net, float learningRate = 0.1f)
 {
     _net          = net;
     _learningRate = learningRate;
     _costFunction = new QuadraticCost();
 }
コード例 #33
0
ファイル: Problem.cs プロジェクト: FoundOPS/TaskOptimizer
        private int[,] _mCachedCosts; // cache of task-to-task costs

        #endregion Fields

        #region Constructors

        /// <summary>
        /// Constructor.
        /// </summary>
        /// <param name="costFunction">Instance of problem-specific implementation of ICostFunction</param>
        /// <param name="numberIterationsWithoutImprovement">Number of iterations to run</param>
        public Problem(ICostFunction costFunction, int numberIterationsWithoutImprovement)
        {
            _numberIterationsWithoutImprovement = numberIterationsWithoutImprovement;
            CostFunction = costFunction;
        }
コード例 #34
0
ファイル: OptimizationMethod.cs プロジェクト: lulzzz/sito
 /// <summary>
 /// Update and return the Cost.
 /// </summary>
 /// <param name="costFunction">The cost function to optimize.</param>
 /// <param name="properties">Properties for the optimization routine.</param>
 /// <returns>Double</returns>
 public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties)
 {
     return(costFunction.ComputeCost(properties.Theta));
 }