public AdvancedNeuralNetwork(string[] inputFieldNames, string outputFieldName, int[] neuronsCount, double learningRate = 0.1, double sigmoidAlphaValue = 2, bool useRegularization = false, bool useNguyenWidrow = false, bool useSameWeights = false, JacobianMethod method = JacobianMethod.ByBackpropagation) { this.neuronsCount = neuronsCount; this.learningRate = learningRate; this.useRegularization = useRegularization; this.useNguyenWidrow = useNguyenWidrow; this.useSameWeights = useSameWeights; this.method = method; this.sigmoidAlphaValue = sigmoidAlphaValue; this.inputFieldNames = inputFieldNames; this.outputFieldName = outputFieldName; // create multi-layer neural network theNetwork = new ActivationNetwork( new BipolarSigmoidFunction(sigmoidAlphaValue), //Andere Function möglich??? inputFieldNames.Length, neuronsCount); if (useNguyenWidrow) { if (useSameWeights) { Accord.Math.Random.Generator.Seed = 0; } NguyenWidrow initializer = new NguyenWidrow(theNetwork); initializer.Randomize(); } // create teacher teacher = new LevenbergMarquardtLearning(theNetwork, useRegularization, method); // set learning rate and momentum teacher.LearningRate = learningRate; }
/// <summary> /// Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class. /// </summary> /// /// <param name="network">Network to teach.</param> /// <param name="useRegularization">True to use Bayesian regularization, false otherwise.</param> /// <param name="method">The method by which the Jacobian matrix will be calculated.</param> /// public LevenbergMarquardtLearning(ActivationNetwork network, bool useRegularization, JacobianMethod method) { this.ParallelOptions = new ParallelOptions(); this.network = network; this.numberOfParameters = getNumberOfParameters(network); this.outputCount = network.Layers[network.Layers.Length - 1].Neurons.Length; this.useBayesianRegularization = useRegularization; this.method = method; this.weights = new float[numberOfParameters]; this.hessian = new float[numberOfParameters][]; for (int i = 0; i < hessian.Length; i++) { hessian[i] = new float[numberOfParameters]; } this.diagonal = new float[numberOfParameters]; this.gradient = new float[numberOfParameters]; this.jacobian = new float[numberOfParameters][]; // Will use Backpropagation method for Jacobian computation if (method == JacobianMethod.ByBackpropagation) { // create weight derivatives arrays this.weightDerivatives = new float[network.Layers.Length][][]; this.thresholdsDerivatives = new float[network.Layers.Length][]; // initialize arrays for (int i = 0; i < network.Layers.Length; i++) { ActivationLayer layer = (ActivationLayer)network.Layers[i]; this.weightDerivatives[i] = new float[layer.Neurons.Length][]; this.thresholdsDerivatives[i] = new float[layer.Neurons.Length]; for (int j = 0; j < layer.Neurons.Length; j++) { this.weightDerivatives[i][j] = new float[layer.InputsCount]; } } } else // Will use finite difference method for Jacobian computation { // create differential coefficient arrays this.differentialCoefficients = createCoefficients(3); this.derivativeStepSize = new double[numberOfParameters]; // initialize arrays for (int i = 0; i < numberOfParameters; i++) { this.derivativeStepSize[i] = derivativeStep; } } }
/// <summary> /// Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class. /// </summary> /// /// <param name="network">Network to teach.</param> /// <param name="useRegularization">True to use bayesian regularization, false otherwise.</param> /// <param name="method">The method by which the Jacobian matrix will be calculated.</param> /// public LevenbergMarquardtLearning(ActivationNetwork network, bool useRegularization, JacobianMethod method) { this.network = network; this.numberOfParameters = getNumberOfParameters(network); this.outputCount = network.Layers[network.Layers.Length - 1].Neurons.Length; this.useBayesianRegularization = useRegularization; this.method = method; this.weights = new float[numberOfParameters]; this.hessian = new float[numberOfParameters][]; for (int i = 0; i < hessian.Length; i++) hessian[i] = new float[numberOfParameters]; this.diagonal = new float[numberOfParameters]; this.gradient = new float[numberOfParameters]; this.jacobian = new float[numberOfParameters][]; // Will use backpropagation method for Jacobian computation if (method == JacobianMethod.ByBackpropagation) { // create weight derivatives arrays this.weightDerivatives = new float[network.Layers.Length][][]; this.thresholdsDerivatives = new float[network.Layers.Length][]; // initialize arrays for (int i = 0; i < network.Layers.Length; i++) { ActivationLayer layer = (ActivationLayer)network.Layers[i]; this.weightDerivatives[i] = new float[layer.Neurons.Length][]; this.thresholdsDerivatives[i] = new float[layer.Neurons.Length]; for (int j = 0; j < layer.Neurons.Length; j++) this.weightDerivatives[i][j] = new float[layer.InputsCount]; } } else // Will use finite difference method for Jacobian computation { // create differential coefficient arrays this.differentialCoefficients = createCoefficients(3); this.derivativeStepSize = new double[numberOfParameters]; // initialize arrays for (int i = 0; i < numberOfParameters; i++) this.derivativeStepSize[i] = derivativeStep; } }
/// <summary> /// Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class. /// </summary> /// /// <param name="network">Network to teach.</param> /// <param name="method">The method by which the Jacobian matrix will be calculated.</param> /// public LevenbergMarquardtLearning(ActivationNetwork network, JacobianMethod method) : this(network, false, method) { }