/// <summary> /// Performs gradient descent to optomise theta parameters. /// </summary> /// <param name="theta">Initial Theta (Zeros)</param> /// <param name="x">Training set</param> /// <param name="y">Training labels</param> /// <param name="maxIterations">Maximum number of iterations to run gradient descent</param> /// <param name="learningRateAlpha">The learning rate (Alpha)</param> /// <param name="costFunction">Cost function to use for gradient descent</param> /// <param name="lambda">The regularization constant to apply</param> /// <param name="regularizer">The regularization function to apply</param> /// <returns></returns> public static Tuple<double, Vector> Run( Vector theta, Matrix x, Vector y, int maxIterations, double learningRateAlpha, ICostFunction costFunction, double lambda, IRegularizer regularizer) { var bestTheta = theta.Copy(); var bestCost = double.PositiveInfinity; double currentCost = 0; var currentGradient = theta.Copy(); for (var i = 0; i <= maxIterations; i++) { currentCost = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer); currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer); if (currentCost < bestCost) { bestTheta = bestTheta - learningRateAlpha * currentGradient; bestCost = currentCost; } else { learningRateAlpha = learningRateAlpha * 0.99; } } return new Tuple<double, Vector>(bestCost, bestTheta); }
/// <summary> /// initilazies a new instance of the <see cref="BaseOptimizer"/> class. /// </summary> /// <param name="neuralNetwork">The neural network.</param> /// <param name="costFunction">The cost function.</param> protected BaseOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction) { Contracts.ValueNotNull(neuralNetwork, nameof(neuralNetwork)); Contracts.ValueNotNull(costFunction, nameof(costFunction)); _neuralNetwork = neuralNetwork; _costFunction = costFunction; }
public BruteForceLineSearch(ICostFunction cost) { costFunction_ = cost; endCriteria_ = new EndCriteria(); _numberOfInitialDivisions = 128; _numberOfSubsequentDivisions = 2; _divisionDepth = 32; }
private string GetCostFunctionName(ICostFunction costFunction) { if (costFunction is QuadraticCost) { return("QuadraticCost"); } throw new ArgumentException($"{costFunction} is not a known name for a cost function"); }
public BruteForceLineSearch(ICostFunction cost) { this.costFunction_ = cost; this.endCriteria_ = new EndCriteria(); _numberOfInitialDivisions = 128; _numberOfSubsequentDivisions = 2; _divisionDepth = 32; }
public SgdTrainer(int epochs, int minibatch, double η, ICostFunction costFunction, ILogger logger, int?randomSeed) { _epochs = epochs; _minibatch = minibatch; _η = η; _costFunction = costFunction; _logger = logger; _randomSeed = randomSeed; }
/// <summary> /// initilazies a new instance of the <see cref="MomentumOptimizer"/> class. /// </summary> /// <param name="neuralNetwork">The neural network.</param> /// <param name="costFunction">The cost function.</param> /// <param name="momentum">The momentum value.</param> public MomentumOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction, double momentum) : base(neuralNetwork, costFunction) { Contracts.ValueGreaterThanZero(momentum, nameof(momentum)); Contracts.ValueNotNull(neuralNetwork, nameof(neuralNetwork)); _momentum = momentum; InitializeVelocities(neuralNetwork); }
public StochasticGradientDescent(IActivationFunction hiddenActivationFunction, IActivationFunction outputActivationFunction, ICostFunction costFunction, RegularizationType regularizationType, IRegularizationStrategyFactory regularizationStrategyFactory) { _hiddenActivationFunction = hiddenActivationFunction; _outputActivationFunction = outputActivationFunction; _costFunction = costFunction; _regularizationType = regularizationType; _regularizationStrategyFactory = regularizationStrategyFactory; //would normally solve this with IOC }
public SearchPtypeBuilder(string model, ICostFunction costFunc, IPartGetter partGetter, IConstraintGetter constraintGetter, IPartOrderSelecter orderSelecter, IPtypeFromAssignment constructor) { _model = model; _costFunction = costFunc; _constraintGetter = constraintGetter; _partGetter = partGetter; _orderSelecter = orderSelecter; _constructor = constructor; }
private Network(ICostFunction costFunction, IRegularizationFunction regularizationFunction, DropoutLayerOptions dropoutLayerOptions, Random rand) { HiddenLayers = new List <HiddenLayer>(); _costFunction = costFunction; _regularizationFunction = regularizationFunction; _dropoutLayerOptions = dropoutLayerOptions; NetworkRandom = rand; }
public NeuralNetwork Deserialize(string json) { var networkJson = JsonConvert.DeserializeObject <NetworkJson>(json); var layers = networkJson.Layers .Select(layerJson => new Layer(layerJson.Weights, layerJson.Biases, GetActivation(layerJson.Activation))) .ToList(); ConnectLayers(layers); ICostFunction costFunction = GetCostFunction(networkJson.CostFunction); return(new NeuralNetwork(layers, costFunction)); }
private void SetCostFunction(CostFunction costFunc) { switch (costFunc) { case CostFunction.MeanSquare: CostFunc = new MeanSquare(); break; case CostFunction.CrossEntropy: CostFunc = new CrossEntropy(); break; } }
public StochasticGradientDescent(ICostFunction costFunction, ILayer[] layers, int epochs, int trainingBatchSize, double learningRate, double regularizationParam) { if (learningRate <= 0) { throw new ArgumentException("learningRate must be greater than zero"); } if (trainingBatchSize <= 0) { throw new ArgumentException("trainingBatchSize must be greater than zero"); } if (epochs <= 0) { throw new ArgumentException("epochs must be greater than zero"); } if (regularizationParam < 0) { throw new ArgumentException("regularizationRate must be greater or equal to zero"); } if (layers.Length < 2) { throw new NeuralNetworksException("At least two layers are mandatory"); } _costFunction = costFunction; _layers = layers; _epochs = epochs; _trainingBatchSize = trainingBatchSize; _learningRate = learningRate; _regularizationParam = regularizationParam; _neuronsBiases = layers.Select(s => s.NeuronsBiases).ToArray(); _synapsesWeights = layers.Select(s => s.SynapsesWeights).ToArray(); _neuronsGradients = _neuronsBiases.CopyWithZeros(); _synapsesGradients = _synapsesWeights.CopyWithZeros(); _neuronsAdittionsToGradients = _neuronsBiases.CopyWithZeros(); }
public NelderMead(ICostFunction costfunction, EndCriteria endcriteria) { this.costFunction_ = costfunction; this.endCriteria_ = endcriteria; }
/// <summary> /// Initializes a new instance of the <see cref="GradientDescentOptimizer"/> class. /// </summary> /// <param name="neuralNetwork">The neural network.</param> /// <param name="costFunction">The cost function.</param> public GradientDescentOptimizer(INeuralNetwork neuralNetwork, ICostFunction costFunction) : base(neuralNetwork, costFunction) { }
///<summary>Default constructor for simplex method</summary> public NelderMead(ICostFunction costfunction) : this(costfunction, new EndCriteria()) { }
public NeuralNetworkBuilder Using(ICostFunction costFunction) { this.costFunction = costFunction; return(this); }
/// <summary> /// Gets the φ(α) function. /// </summary> /// <param name="function">The function.</param> /// <param name="location">The location.</param> /// <param name="direction">The direction.</param> /// <returns>Func<System.Double, System.Double>.</returns> private static Func <double, double> Getφ([NotNull] ICostFunction <double> function, [NotNull] Vector <double> location, [NotNull] Vector <double> direction) { return(alpha => function.CalculateCost(location + alpha * direction)); }
public INetwork LoadNetwork(string xmlFilePath, IDataSet dataset) { _xmlDoc = new XmlDocument(); _filepath = xmlFilePath; _xmlDoc.Load(_filepath); //Create Training Params---------------------8::::::::::::::>---------------------------------------- TrainingParameters trainingParams = new TrainingParameters() { epochs = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/Epochs")), learningRate = Convert.ToDouble(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/LearningRate")), momentum = Convert.ToDouble(GetXmlValue("NeuralNetwork/Settings/TrainingParameters/Momentum")) }; //Create LayerStructure---------------------8::::::::::::::>---------------------------------------- List <int> hiddenLayersList = new List <int>(); int numberOfHiddenLayers = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfHiddenLayers")); for (int i = 1; i <= numberOfHiddenLayers; i++) { int numberOfNeuronsInHiddenLayer = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfNeuronsInHiddenLayer" + i.ToString())); hiddenLayersList.Add(numberOfNeuronsInHiddenLayer); } LayerStructure layerStructure = new LayerStructure() { numberOfInputNodes = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfInputNeurons")), numberOfOutputNodes = Convert.ToInt32(GetXmlValue("NeuralNetwork/Settings/LayerStructure/NumberOfOutputNeurons")), HiddenLayerList = hiddenLayersList }; //Create Distribution Type---------------------8::::::::::::::>---------------------------------------- string distribution = GetXmlValue("NeuralNetwork/Settings/DistributionType/InitialRandomDistributionType"); var distributionType = Type.GetType(distribution + ",NeuralNetwork.Core"); IInitialRandomDistributionType initialDistribution = Activator.CreateInstance(distributionType) as IInitialRandomDistributionType; //Create Strategy---------------------8::::::::::::::>---------------------------------------- string hiddenFunction = GetXmlValue("NeuralNetwork/Settings/Strategy/HiddenLayerActivationFunction"); string outputFunction = GetXmlValue("NeuralNetwork/Settings/Strategy/OutputLayerActivationFunction"); string costFunc = GetXmlValue("NeuralNetwork/Settings/Strategy/CostFunction"); string regularizationEnum = GetXmlValue("NeuralNetwork/Settings/Strategy/RegularizationType"); string regularizationStrategyFactory = GetXmlValue("NeuralNetwork/Settings/Strategy/RegularizationStrategyFactory"); var hiddenFunctionType = Type.GetType(hiddenFunction + ",NeuralNetwork.Core"); var outputFunctionType = Type.GetType(outputFunction + ",NeuralNetwork.Core"); var costFunctionType = Type.GetType(costFunc + ",NeuralNetwork.Core"); var regularizationStrategyFactoryType = Type.GetType(regularizationStrategyFactory + ",NeuralNetwork.Core"); IActivationFunction hiddenActivationFunction = Activator.CreateInstance(hiddenFunctionType) as IActivationFunction; IActivationFunction outputActivationFunction = Activator.CreateInstance(outputFunctionType) as IActivationFunction; ICostFunction costFunction = Activator.CreateInstance(costFunctionType) as ICostFunction; RegularizationType regularizationType = (RegularizationType)Enum.Parse(typeof(RegularizationType), regularizationEnum); IRegularizationStrategyFactory regularizationStrategyFact = Activator.CreateInstance(regularizationStrategyFactoryType) as IRegularizationStrategyFactory; string optimizationStrategy = GetXmlValue("NeuralNetwork/Settings/Strategy/OptimizationStrategy"); var optStrategy = Type.GetType(optimizationStrategy + ",NeuralNetwork.Core"); IOptimizationStrategy strategy = Activator.CreateInstance(optStrategy, hiddenActivationFunction, outputActivationFunction, costFunction, regularizationType, regularizationStrategyFact) as IOptimizationStrategy; //Create Network---------------------8::::::::::::::>---------------------------------------- network = new ArtificialNeuralNetwork(layerStructure, trainingParams, dataset, strategy, initialDistribution); //Set Weights---------------------8::::::::::::::>---------------------------------------- network.Weights.Clear(); LoadInputWeights(); LoadHiddenWeights(); //Set Biases---------------------8::::::::::::::>---------------------------------------- network.Biases.Clear(); LoadHiddenBiases(); LoadOutputBiases(); //Clear XMLDoc---------------------8::::::::::::::>---------------------------------------- _xmlDoc = null; return(network); }
/// <summary> /// Update and return the Gradient. /// </summary> /// <param name="costFunction">The cost function to optimize.</param> /// <param name="properties">Properties for the optimization routine.</param> /// <returns>Vector</returns> public virtual Vector UpdateGradient(ICostFunction costFunction, OptimizerProperties properties) { return(costFunction.ComputeGradient(properties.Theta)); }
/// <summary> /// Update and return the Gradient. /// </summary> /// <param name="costFunction">The cost function to optimize.</param> /// <param name="properties">Properties for the optimization routine.</param> /// <returns>Vector</returns> public virtual Vector UpdateGradient(ICostFunction costFunction, OptimizerProperties properties) { return costFunction.ComputeGradient(properties.Theta); }
protected override void OnPreBuild() { layoutProvider = layoutProvider ?? A.Fake <INetworkLayoutProvider>(); costFunction = costFunction ?? new QuadraticCost(); }
public PlanetColonizerRoutingAlgorithm(SlaveConfig config, ICostFunction costfunc) { this.configuration = config; this.CostFunction = costfunc; }
/// <summary> /// Update and return the Cost. /// </summary> /// <param name="costFunction">The cost function to optimize.</param> /// <param name="properties">Properties for the optimization routine.</param> /// <returns>Double</returns> public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties) { return costFunction.ComputeCost(properties.Theta); }
public StupidLineSearch(ICostFunction cost) { this.costFunction_ = cost; this.endCriteria_ = new EndCriteria(); }
/// <summary> /// Instantiates a new neural network with the layout provided by the specified <paramref name="layoutProvider"/>. /// </summary> /// <param name="layoutProvider">Provides the layout of the network</param> public NeuralNetwork(INetworkLayoutProvider layoutProvider, ICostFunction costFunction) : this(layoutProvider.GetLayers(), costFunction) { }
public StupidLineSearch(ICostFunction cost) { costFunction_ = cost; endCriteria_ = new EndCriteria(); }
public MinMax(ICostFunction<GameState> costFunction, ITurnsGenerator<GameState> turnsGenerator, int level) { maxDepth = level * 2 + 1; this.costFunction = costFunction; this.turnsGenerator = turnsGenerator; }
public Trainer(Net net, float learningRate = 0.1f) { _net = net; _learningRate = learningRate; _costFunction = new QuadraticCost(); }
private int[,] _mCachedCosts; // cache of task-to-task costs #endregion Fields #region Constructors /// <summary> /// Constructor. /// </summary> /// <param name="costFunction">Instance of problem-specific implementation of ICostFunction</param> /// <param name="numberIterationsWithoutImprovement">Number of iterations to run</param> public Problem(ICostFunction costFunction, int numberIterationsWithoutImprovement) { _numberIterationsWithoutImprovement = numberIterationsWithoutImprovement; CostFunction = costFunction; }
/// <summary> /// Update and return the Cost. /// </summary> /// <param name="costFunction">The cost function to optimize.</param> /// <param name="properties">Properties for the optimization routine.</param> /// <returns>Double</returns> public virtual double UpdateCost(ICostFunction costFunction, OptimizerProperties properties) { return(costFunction.ComputeCost(properties.Theta)); }