public void BackPropagateWeightedCombinerBankIsCorrect() { int numberOfBanks = 3; WeightedCombiner layer = new WeightedCombiner(new WeightsMatrix(new double[, ] { { 1 } })); WeightedCombinerBank bank = new WeightedCombinerBank(layer, numberOfBanks); NetworkVector inputVector = new NetworkVector(new double[] { 1, 2, 3 }); NetworkVector bpVector = new NetworkVector(new double[] { 5, 7, 11 }); bank.Run(inputVector); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 5, 7, 11 }); Assert.AreEqual(inputGradientCheck, bank.InputGradient(bpVector)); bank.BackPropagate(bpVector, inputVector); bank.Update(new GradientDescent()); WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { -51 } }); NetworkVector biasCheck = new NetworkVector(new double[] { -23 }); Assert.AreEqual(biasCheck, bank.Biases); Assert.AreEqual(weightsCheck, bank.Weights); }
public void WhenConstructedThenSizeCanBeReRetrieved() { int size = 5; WeightsMatrix matrix = new WeightsMatrix(size); Assert.AreEqual(size, matrix.Size); }
public void WhenMaxtrixClonedThenValuesIdentical() { int size = 5; WeightsMatrix matrix = new WeightsMatrix(size); for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { int weight = CalculateWeight(i, j); matrix.SetWeight(i, j, weight); } } WeightsMatrix clonedMatrix = matrix.Clone() as WeightsMatrix; Assert.IsNotNull(clonedMatrix); for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { int weight = CalculateWeight(i, j); Assert.AreEqual(weight, matrix.GetWeight(i, j)); } } }
private void populateNeuralNet(ArrayList Structure, float [] Weights, float [] Biases) { //short[] neurons = structure; net.initNeuralNet(Structure); float[] w = Weights; // create links int totalLinks = 0; // calculate total number of links for (int i = 0; i < Structure.Count - 1; i++) { totalLinks += ((short[, ])(Structure[i]))[0, 0] * ((short[, ])(Structure[i + 1]))[0, 0]; } // initialize WeightsCollection WeightsMatrix weights = new WeightsMatrix(totalLinks); for (int i = 0; i < totalLinks; i++) { weights.addWeight(w[i], i); } // initialize Links int linkcount = 0; for (short layerCount = 0; layerCount + 1 < Structure.Count; layerCount++) { // get adjacent layers NeuralNet.Layer layerOne = net.getLayer(layerCount); NeuralNet.Layer layerTwo = net.getLayer(layerCount + 1); // traverse nerons of second layer for (int toNeuronCount = 0; toNeuronCount < layerTwo.getNeuronCount(); toNeuronCount++) { // traverse all neurons of first layer for (int fromNeuronCount = 0; fromNeuronCount < layerOne.getNeuronCount(); fromNeuronCount++) { // create a link NeuralNet.Link link = new NeuralNet.Link(layerOne.getNeuron(fromNeuronCount), layerTwo.getNeuron(toNeuronCount)); link.setWeight(weights.getWeight(linkcount++)); } } } // biases float[] biases = Biases; // initialize neurons with biases int count = 0; for (short layerCount = 1; layerCount < net.layers.Count; layerCount++) { for (int neuronCount = 0; neuronCount < net.getLayer(layerCount).getNeuronCount(); neuronCount++) { net.getLayer(layerCount).getNeuron(neuronCount).setBias(biases[count++]); } } }
public void CanMake() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); WeightedCombiner wc = new WeightedCombiner(matrix); Trainer trainer = new Trainer(wc, new SquaredError(), new GradientDescent()); Assert.AreNotEqual(null, trainer); }
public void CanLeftMultiply() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 0, 1 }, { 1, 0 } }); NetworkVector vector = new NetworkVector(new double[] { 1, 1 }); NetworkVector result = matrix.LeftMultiply(vector); double[] resultValues = result.ToArray(); Assert.AreEqual(1, resultValues[0]); Assert.AreEqual(1, resultValues[1]); }
public void CanBack1() { wc_1b.Run(vector_1); NetworkVector inputGradientCheck = vector_1.Copy(); NetworkVector biasesGradientCheck = vector_1.Copy(); WeightsMatrix weightsGradientCheck = matrix_1.Copy(); Assert.AreEqual(inputGradientCheck, wc_1b.InputGradient(vector_1)); Assert.AreEqual(biasesGradientCheck, wc_1b.BiasesGradient(vector_1)); Assert.AreEqual(weightsGradientCheck, wc_1b.WeightsGradient(vector_1, vector_1)); }
public void CanGetWeightsUpdate() { GradientDescent gd = new GradientDescent(0.5, 1); WeightsMatrix testMatrix = new WeightsMatrix(new double[, ] { { 1, 2, 3 }, { 2, 3, 4 } }); WeightsMatrix result = gd.WeightsUpdate(testMatrix); WeightsMatrix resultCheck = new WeightsMatrix(new double[, ] { { -0.5, -1.0, -1.5 }, { -1.0, -1.5, -2.0 } }); Assert.AreEqual(resultCheck, result); }
public void WhenConstructedThenAllWeightsAreZero() { int size = 5; WeightsMatrix matrix = new WeightsMatrix(size); for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { Assert.AreEqual(0, matrix.GetWeight(i, j)); } } }
public void CanRunOnline_WC() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); WeightedCombiner wc = new WeightedCombiner(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ) }; }
public void TrainOnline_WC_CorrectThreePasses() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); WeightedCombiner wc = new WeightedCombiner(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(wc, new SquaredError(), new GradientDescent()); foreach (TrainingCollection tc in trainingVectors.AsSingletons()) { trainer.Train(tc); } foreach (TrainingCollection tc in trainingVectors.AsSingletons()) { trainer.Train(tc); } foreach (TrainingCollection tc in trainingVectors.AsSingletons()) { trainer.Train(tc); } WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { 3, 7 } }); NetworkVector biasesCheck = new NetworkVector(new double[] { -1 }); Assert.AreEqual(biasesCheck, wc.Biases); Assert.AreEqual(weightsCheck, wc.Weights); }
public void WhenWeightIsSetThenRetrievedWeightMatchesSetValue() { int size = 5; WeightsMatrix matrix = new WeightsMatrix(size); for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { int weight = CalculateWeight(i, j); matrix.SetWeight(i, j, weight); Assert.AreEqual(weight, matrix.GetWeight(i, j)); } } }
public void CanUpdate() { AdaptationStrategy strategy = new GradientDescent(1.0, 1); wc_2.Run(vector_3); wc_2.BackPropagate(vector_2); wc_2.Update(strategy); NetworkVector biasesCheck = new NetworkVector(new double[] { 0, 0 }); WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { 1 - (11 * 111), 2 - (11 * 112), 3 - (11 * 113) }, { 2 - (12 * 111), 3 - (12 * 112), 4 - (12 * 113) } }); Assert.AreEqual(biasesCheck, wc_2.Biases); Assert.AreEqual(weightsCheck, wc_2.Weights); }
public void CanMakeAndReadZeroNetworkMatrix() { int neuronCount = 2; int inputcount = 3; WeightsMatrix matrix = new WeightsMatrix(neuronCount, inputcount); double[,] matrixValues = matrix.ToArray(); Assert.AreEqual(neuronCount, matrix.NumberOfOutputs); Assert.AreEqual(inputcount, matrix.NumberOfInputs); for (int i = 0; i < neuronCount; i++) { for (int j = 0; j < inputcount; j++) { Assert.AreEqual(0.0, matrixValues[i, j]); } } }
public void CanMultiply() { NetworkVector vector1 = new NetworkVector(new double[] { 1, 0 }); NetworkVector vector2 = new NetworkVector(new double[] { 0, 1 }); WeightsMatrix product = vector1.OuterProduct(vector2); double[,] productValues = product.ToArray(); Assert.AreEqual(0, productValues[0, 0]); Assert.AreEqual(1, productValues[0, 1]); Assert.AreEqual(0, productValues[1, 0]); Assert.AreEqual(0, productValues[1, 1]); //double[] array = new double[1000]; //array[0] = 1; //NetworkVector nv = new NetworkVector(array); //nv.LeftMultiply(nv); }
public void TrainOnline_SmallNet_Correct() { int inputs = 3; int inputneurons = 2; int outputneurons = 1; LinearTwoLayerTestNetwork network = new LinearTwoLayerTestNetwork(inputs, inputneurons, outputneurons); List <VectorPair> trainingVectors = new List <VectorPair> { new VectorPair( new NetworkVector(new double[] { 0, 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1, 0 }), new NetworkVector(new double[] { 1 }) ) }; NetworkTrainer trainer = new OnlineNetworkTrainer(network, trainingVectors); trainer.Train(); WeightsMatrix inputWeightsCheck = new WeightsMatrix(new double[, ] { { -35499715, -35499260, 1 }, { -35499715, -35499260, 1 } }); NetworkVector inputBiasesCheck = new NetworkVector(new double[] { -35499265, -35499265 }); WeightsMatrix outputWeightsCheck = new WeightsMatrix(new double[, ] { { -224831362, -224831362 } }); NetworkVector outputBiasesCheck = new NetworkVector(new double[] { -251825 }); Assert.AreEqual(inputWeightsCheck, network.InputLayer.Weights); Assert.AreEqual(inputBiasesCheck, network.InputLayer.Biases); Assert.AreEqual(outputWeightsCheck, network.OutputLayer.Weights); Assert.AreEqual(outputBiasesCheck, network.OutputLayer.Biases); }
public void TrainBatch_SmallNet_Correct() { int inputs = 3; int inputneurons = 2; int outputneurons = 1; LinearTwoLayerTestNetwork network = new LinearTwoLayerTestNetwork(inputs, inputneurons, outputneurons); List <VectorPair> trainingVectors = new List <VectorPair> { new VectorPair( new NetworkVector(new double[] { 0, 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1, 0 }), new NetworkVector(new double[] { 1 }) ) }; NetworkTrainer trainer = new BatchNetworkTrainer(network, trainingVectors); trainer.Train(); WeightsMatrix inputWeightsCheck = new WeightsMatrix(new double[, ] { { -4, -4, 1 }, { -4, -4, 1 } }); NetworkVector inputBiasesCheck = new NetworkVector(new double[] { -6, -6 }); WeightsMatrix outputWeightsCheck = new WeightsMatrix(new double[, ] { { -9, -9 } }); NetworkVector outputBiasesCheck = new NetworkVector(new double[] { -6 }); Assert.AreEqual(inputWeightsCheck, network.InputLayer.Weights); Assert.AreEqual(inputBiasesCheck, network.InputLayer.Biases); Assert.AreEqual(outputWeightsCheck, network.OutputLayer.Weights); Assert.AreEqual(outputBiasesCheck, network.OutputLayer.Biases); }
public void CanRunOnline_LogisticLayer() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); Layer layer = Layer.CreateLogisticLayer(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(layer, new SquaredError(), new GradientDescent()); trainer.Train(trainingVectors); }
public void CanSubtract() { int neuronCount = 2; int inputcount = 3; WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 0, 1, 2 }, { 1, 2, 3 } }); matrix.Subtract(matrix); double[,] matrixValues = matrix.ToArray(); for (int i = 0; i < neuronCount; i++) { for (int j = 0; j < inputcount; j++) { Assert.AreEqual(0, matrixValues[i, j]); } } }
public void CanBack2() { wc_2.Run(vector_3); NetworkVector inputGradientCheck = new NetworkVector( new double[] { 11 * 1 + 12 * 2, 11 * 2 + 12 * 3, 11 * 3 + 12 * 4 } ); NetworkVector biasesGradientCheck = new NetworkVector( new double[] { 11, 12 } ); WeightsMatrix weightsGradientCheck = new WeightsMatrix( new double[, ] { { 11 * 111, 11 * 112, 11 * 113 }, { 12 * 111, 12 * 112, 12 * 113 } } ); Assert.AreEqual(inputGradientCheck, wc_2.InputGradient(vector_2)); Assert.AreEqual(biasesGradientCheck, wc_2.BiasesGradient(vector_2)); Assert.AreEqual(weightsGradientCheck, wc_2.WeightsGradient(vector_2, vector_3)); }
public void TrainOnline_LinearLayer_CorrectOnePass() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); Layer layer = Layer.CreateLinearLayer(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(layer, new SquaredError(), new GradientDescent()); foreach (TrainingCollection tc in trainingVectors.AsSingletons()) { trainer.Train(tc); } WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { 1, 3 } }); NetworkVector biasesCheck = new NetworkVector(new double[] { 1 }); Assert.AreEqual(biasesCheck, layer.Biases); Assert.AreEqual(weightsCheck, layer.Weights); }
public void BatchTrainCorrectThreePasses_WC() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); WeightedCombiner wc = new WeightedCombiner(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(wc, new SquaredError(), new GradientDescent()); trainer.Train(trainingVectors); trainer.Train(trainingVectors); trainer.Train(trainingVectors); WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { -37, -37 } }); NetworkVector biasesCheck = new NetworkVector(new double[] { -62 }); Assert.AreEqual(biasesCheck, wc.Biases); Assert.AreEqual(weightsCheck, wc.Weights); }
//private NetComponent _softMaxLayer; #endregion #region constructors public FourthWordNetwork() { WeightsMatrix embeddingWeights = MatrixProvider.GetRandom(__embeddingOutputs, __inputs); Layer embedding = Layer.CreateLinearLayer(embeddingWeights); _embeddingLayer = new TrainableComponentBank(embedding, __inputWords); WeightsMatrix hiddenWeights = MatrixProvider.GetRandom(__hiddenOutputs, __embeddingOutputs * __inputWords); _hiddenLayer = Layer.CreateLogisticLayer(hiddenWeights); WeightsMatrix outputweights = MatrixProvider.GetRandom(__outputs, __hiddenOutputs); _outputLayer = Layer.CreateLinearLayer(outputweights); //_softMaxLayer = new SoftMaxUnit(__outputs); this.AddTrainable(_embeddingLayer); this.AddTrainable(_hiddenLayer); this.AddTrainable(_outputLayer); //this.AddFixed(_softMaxLayer); }
public void BatchTrainCorrectOnePass_LinearLayer() { WeightsMatrix matrix = new WeightsMatrix(new double[, ] { { 1, 1 } }); Layer layer = Layer.CreateLinearLayer(matrix); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(layer, new SquaredError(), new GradientDescent()); trainer.Train(trainingVectors); WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { -1, -1 } }); NetworkVector biasesCheck = new NetworkVector(new double[] { -2 }); Assert.AreEqual(biasesCheck, layer.Biases); Assert.AreEqual(weightsCheck, layer.Weights); }
public void CanUpdateBatch() { AdaptationStrategy strategy = new GradientDescent(1.0, 1); VectorBatch result = wc_2.Run(input_batch); wc_2.BackPropagate(gradient_batch); VectorBatch inputGradient = wc_2.InputGradient(gradient_batch); wc_2.Update(strategy); NetworkVector biasesCheck = new NetworkVector(new double[] { 8, 7 }); WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] { { -4, -6, -8 }, { -6, -10, -14 } }); Assert.AreEqual(biasesCheck, wc_2.Biases); Assert.AreEqual(weightsCheck, wc_2.Weights); for (int i = 0; i < inputGradient.Count; i++) { Assert.AreEqual(inputgradient_check[i], inputGradient[i]); } }
public void TrainBatch_SmallChain_CorrectOnePass() { int inputs = 3; int inputneurons = 2; int outputneurons = 1; double[,] inputWeights = new double[inputneurons, inputs]; double[,] outputWeights = new double[outputneurons, inputneurons]; for (int i = 0; i < inputneurons; i++) { for (int j = 0; j < inputs; j++) { inputWeights[i, j] = 1; } } for (int i = 0; i < outputneurons; i++) { for (int j = 0; j < inputneurons; j++) { outputWeights[i, j] = 1; } } Layer InputLayer = Layer.CreateLinearLayer(new WeightsMatrix(inputWeights), new NetworkVector(inputneurons)); Layer OutputLayer = Layer.CreateLinearLayer(new WeightsMatrix(outputWeights), new NetworkVector(outputneurons)); NetComponentChain network = new NetComponentChain(); network.AddTrainable(InputLayer); network.AddTrainable(OutputLayer); TrainingCollection trainingVectors = new TrainingCollection { new VectorPair( new NetworkVector(new double[] { 0, 0, 0 }), new NetworkVector(new double[] { 1 }) ), new VectorPair( new NetworkVector(new double[] { 1, 0, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 0, 1, 0 }), new NetworkVector(new double[] { 0 }) ), new VectorPair( new NetworkVector(new double[] { 1, 1, 0 }), new NetworkVector(new double[] { 1 }) ) }; Trainer trainer = new Trainer(network, new SquaredError(), new GradientDescent()); trainer.Train(trainingVectors); WeightsMatrix inputWeightsCheck = new WeightsMatrix(new double[, ] { { -4, -4, 1 }, { -4, -4, 1 } }); NetworkVector inputBiasesCheck = new NetworkVector(new double[] { -6, -6 }); WeightsMatrix outputWeightsCheck = new WeightsMatrix(new double[, ] { { -9, -9 } }); NetworkVector outputBiasesCheck = new NetworkVector(new double[] { -6 }); Assert.AreEqual(inputWeightsCheck, InputLayer.Weights); Assert.AreEqual(inputBiasesCheck, InputLayer.Biases); Assert.AreEqual(outputWeightsCheck, OutputLayer.Weights); Assert.AreEqual(outputBiasesCheck, OutputLayer.Biases); }