/// <summary> /// Construct a deep belief neural network. /// </summary> /// <param name="inputCount">The input count.</param> /// <param name="hidden">The counts for the hidden layers.</param> /// <param name="outputCount">The output neuron count.</param> public DeepBeliefNetwork(int inputCount, int[] hidden, int outputCount) { int inputSize; _layers = new HiddenLayer[hidden.Length]; _rbm = new RestrictedBoltzmannMachine[hidden.Length]; for (int i = 0; i < _rbm.Length; i++) { if (i == 0) { inputSize = inputCount; } else { inputSize = hidden[i - 1]; } _layers[i] = new HiddenLayer(this, inputSize, hidden[i]); _rbm[i] = new RestrictedBoltzmannMachine(_layers[i]); } _outputLayer = new DeepLayer(this, hidden[_layers.Length - 1], outputCount); Random = new MersenneTwisterGenerateRandom(); }
/// <summary> /// Construct the backpropagation trainer. /// </summary> /// <param name="theNetwork">The network to train.</param> /// <param name="theTraining">The training data to use.</param> /// <param name="theLearningRate">The learning rate. Can be changed as training runs.</param> /// <param name="theMomentum">The momentum. Can be changed as training runs.</param> public BackPropagation(BasicNetwork theNetwork, IList<BasicData> theTraining, double theLearningRate, double theMomentum) { BatchSize = 500; Stochastic = new MersenneTwisterGenerateRandom(); NesterovUpdate = true; _network = theNetwork; _training = theTraining; LearningRate = theLearningRate; Momentum = theMomentum; _gradients = new GradientCalc(_network, new CrossEntropyErrorFunction(), this); _lastDelta = new double[theNetwork.Weights.Length]; }