Пример #1
0
 public void setNeuralNetwork(FunctionApproximator fapp)
 {
     FeedForwardNeuralNetwork ffnn = (FeedForwardNeuralNetwork)fapp;
     this.hiddenLayer = ffnn.getHiddenLayer();
     this.outputLayer = ffnn.getOutputLayer();
     this.hiddenSensitivity = new LayerSensitivity(hiddenLayer);
     this.outputSensitivity = new LayerSensitivity(outputLayer);
 }
Пример #2
0
        public void setNeuralNetwork(FunctionApproximator fapp)
        {
            FeedForwardNeuralNetwork ffnn = (FeedForwardNeuralNetwork)fapp;

            this.hiddenLayer       = ffnn.getHiddenLayer();
            this.outputLayer       = ffnn.getOutputLayer();
            this.hiddenSensitivity = new LayerSensitivity(hiddenLayer);
            this.outputSensitivity = new LayerSensitivity(outputLayer);
        }
Пример #3
0
	public Matrix sensitivityMatrixFromSucceedingLayer(
			LayerSensitivity nextLayerSensitivity) {
		Layer nextLayer = nextLayerSensitivity.getLayer();
		Matrix derivativeMatrix = createDerivativeMatrix(layer
				.getLastInducedField());
		Matrix weightTranspose = nextLayer.getWeightMatrix().transpose();
		Matrix calculatedSensitivityMatrix = derivativeMatrix.times(
				weightTranspose).times(
				nextLayerSensitivity.getSensitivityMatrix());
		sensitivityMatrix = calculatedSensitivityMatrix.copy();
		return sensitivityMatrix;
	}
Пример #4
0
 public Matrix calculateWeightUpdates(LayerSensitivity layerSensitivity,
         Vector previousLayerActivationOrInput, double alpha, double momentum)
 {
     Layer layer = layerSensitivity.getLayer();
     Matrix activationTranspose = previousLayerActivationOrInput.transpose();
     Matrix momentumLessUpdate = layerSensitivity.getSensitivityMatrix()
             .times(activationTranspose).times(alpha).times(-1.0);
     Matrix updateWithMomentum = layer.getLastWeightUpdateMatrix().times(
             momentum).plus(momentumLessUpdate.times(1.0 - momentum));
     layer.acceptNewWeightUpdate(updateWithMomentum.copy());
     return updateWithMomentum;
 }
Пример #5
0
        public static Matrix calculateWeightUpdates(
            LayerSensitivity layerSensitivity,
            Vector previousLayerActivationOrInput, double alpha)
        {
            Layer  layer = layerSensitivity.getLayer();
            Matrix activationTranspose = previousLayerActivationOrInput.transpose();
            Matrix weightUpdateMatrix  = layerSensitivity.getSensitivityMatrix()
                                         .times(activationTranspose).times(alpha).times(-1.0);

            layer.acceptNewWeightUpdate(weightUpdateMatrix.copy());
            return(weightUpdateMatrix);
        }
Пример #6
0
        public Matrix calculateWeightUpdates(LayerSensitivity layerSensitivity,
                                             Vector previousLayerActivationOrInput, double alpha, double momentum)
        {
            Layer  layer = layerSensitivity.getLayer();
            Matrix activationTranspose = previousLayerActivationOrInput.transpose();
            Matrix momentumLessUpdate  = layerSensitivity.getSensitivityMatrix()
                                         .times(activationTranspose).times(alpha).times(-1.0);
            Matrix updateWithMomentum = layer.getLastWeightUpdateMatrix().times(
                momentum).plus(momentumLessUpdate.times(1.0 - momentum));

            layer.acceptNewWeightUpdate(updateWithMomentum.copy());
            return(updateWithMomentum);
        }
Пример #7
0
        public Matrix sensitivityMatrixFromSucceedingLayer(
            LayerSensitivity nextLayerSensitivity)
        {
            Layer  nextLayer        = nextLayerSensitivity.getLayer();
            Matrix derivativeMatrix = createDerivativeMatrix(layer
                                                             .getLastInducedField());
            Matrix weightTranspose             = nextLayer.getWeightMatrix().transpose();
            Matrix calculatedSensitivityMatrix = derivativeMatrix.times(
                weightTranspose).times(
                nextLayerSensitivity.getSensitivityMatrix());

            sensitivityMatrix = calculatedSensitivityMatrix.copy();
            return(sensitivityMatrix);
        }
Пример #8
0
        public static Vector calculateBiasUpdates(
            LayerSensitivity layerSensitivity, double alpha)
        {
            Layer  layer            = layerSensitivity.getLayer();
            Matrix biasUpdateMatrix = layerSensitivity.getSensitivityMatrix()
                                      .times(alpha).times(-1.0);

            Vector result = new Vector(biasUpdateMatrix.getRowDimension());

            for (int i = 0; i < biasUpdateMatrix.getRowDimension(); i++)
            {
                result.setValue(i, biasUpdateMatrix.get(i, 0));
            }
            layer.acceptNewBiasUpdate(result.copyVector());
            return(result);
        }
Пример #9
0
        public Vector calculateBiasUpdates(LayerSensitivity layerSensitivity,
                                           double alpha, double momentum)
        {
            Layer  layer = layerSensitivity.getLayer();
            Matrix biasUpdateMatrixWithoutMomentum = layerSensitivity
                                                     .getSensitivityMatrix().times(alpha).times(-1.0);

            Matrix biasUpdateMatrixWithMomentum = layer.getLastBiasUpdateVector()
                                                  .times(momentum).plus(
                biasUpdateMatrixWithoutMomentum.times(1.0 - momentum));
            Vector result = new Vector(biasUpdateMatrixWithMomentum
                                       .getRowDimension());

            for (int i = 0; i < biasUpdateMatrixWithMomentum.getRowDimension(); i++)
            {
                result.setValue(i, biasUpdateMatrixWithMomentum.get(i, 0));
            }
            layer.acceptNewBiasUpdate(result.copyVector());
            return(result);
        }
Пример #10
0
        public void testSensitivityMatrixCalculationFromSucceedingLayer()
        {
            Matrix weightMatrix1 = new Matrix(2, 1);
            weightMatrix1.set(0, 0, -0.27);
            weightMatrix1.set(1, 0, -0.41);

            Vector biasVector1 = new Vector(2);
            biasVector1.setValue(0, -0.48);
            biasVector1.setValue(1, -0.13);

            Layer layer1 = new Layer(weightMatrix1, biasVector1,
                    new LogSigActivationFunction());
            LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

            Vector inputVector1 = new Vector(1);
            inputVector1.setValue(0, 1);

            layer1.feedForward(inputVector1);

            Matrix weightMatrix2 = new Matrix(1, 2);
            weightMatrix2.set(0, 0, 0.09);
            weightMatrix2.set(0, 1, -0.17);

            Vector biasVector2 = new Vector(1);
            biasVector2.setValue(0, 0.48);

            Layer layer2 = new Layer(weightMatrix2, biasVector2,
                    new PureLinearActivationFunction());
            Vector inputVector2 = layer1.getLastActivationValues();
            layer2.feedForward(inputVector2);

            Vector errorVector = new Vector(1);
            errorVector.setValue(0, 1.261);
            LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
            layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

            layer1Sensitivity
                    .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);
            Matrix sensitivityMatrix = layer1Sensitivity.getSensitivityMatrix();

            Assert.AreEqual(2, sensitivityMatrix.getRowDimension());
            Assert.AreEqual(1, sensitivityMatrix.getColumnDimension());
            Assert.AreEqual(-0.0495, sensitivityMatrix.get(0, 0), 0.001);
            Assert.AreEqual(0.0997, sensitivityMatrix.get(1, 0), 0.001);
        }
Пример #11
0
        public void testWeightsAndBiasesUpdatedCorrectly()
        {
            Matrix weightMatrix1 = new Matrix(2, 1);
            weightMatrix1.set(0, 0, -0.27);
            weightMatrix1.set(1, 0, -0.41);

            Vector biasVector1 = new Vector(2);
            biasVector1.setValue(0, -0.48);
            biasVector1.setValue(1, -0.13);

            Layer layer1 = new Layer(weightMatrix1, biasVector1,
                    new LogSigActivationFunction());
            LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

            Vector inputVector1 = new Vector(1);
            inputVector1.setValue(0, 1);

            layer1.feedForward(inputVector1);

            Matrix weightMatrix2 = new Matrix(1, 2);
            weightMatrix2.set(0, 0, 0.09);
            weightMatrix2.set(0, 1, -0.17);

            Vector biasVector2 = new Vector(1);
            biasVector2.setValue(0, 0.48);

            Layer layer2 = new Layer(weightMatrix2, biasVector2,
                    new PureLinearActivationFunction());
            Vector inputVector2 = layer1.getLastActivationValues();
            layer2.feedForward(inputVector2);

            Vector errorVector = new Vector(1);
            errorVector.setValue(0, 1.261);
            LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
            layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

            layer1Sensitivity
                    .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

            BackPropLearning.calculateWeightUpdates(layer2Sensitivity, layer1
                    .getLastActivationValues(), 0.1);

            BackPropLearning.calculateBiasUpdates(layer2Sensitivity, 0.1);

            BackPropLearning.calculateWeightUpdates(layer1Sensitivity,
                    inputVector1, 0.1);

            BackPropLearning.calculateBiasUpdates(layer1Sensitivity, 0.1);

            layer2.updateWeights();
            Matrix newWeightMatrix2 = layer2.getWeightMatrix();
            Assert.AreEqual(0.171, newWeightMatrix2.get(0, 0), 0.001);
            Assert.AreEqual(-0.0772, newWeightMatrix2.get(0, 1), 0.001);

            layer2.updateBiases();
            Vector newBiasVector2 = layer2.getBiasVector();
            Assert.AreEqual(0.7322, newBiasVector2.getValue(0), 0.00001);

            layer1.updateWeights();
            Matrix newWeightMatrix1 = layer1.getWeightMatrix();

            Assert.AreEqual(-0.265, newWeightMatrix1.get(0, 0), 0.001);
            Assert.AreEqual(-0.419, newWeightMatrix1.get(1, 0), 0.001);

            layer1.updateBiases();
            Vector newBiasVector1 = layer1.getBiasVector();

            Assert.AreEqual(-0.475, newBiasVector1.getValue(0), 0.001);
            Assert.AreEqual(-0.139, newBiasVector1.getValue(1), 0.001);
        }
Пример #12
0
        public void testBiasUpdateMatrixesFormedCorrectly()
        {
            Matrix weightMatrix1 = new Matrix(2, 1);
            weightMatrix1.set(0, 0, -0.27);
            weightMatrix1.set(1, 0, -0.41);

            Vector biasVector1 = new Vector(2);
            biasVector1.setValue(0, -0.48);
            biasVector1.setValue(1, -0.13);

            Layer layer1 = new Layer(weightMatrix1, biasVector1,
                    new LogSigActivationFunction());
            LayerSensitivity layer1Sensitivity = new LayerSensitivity(layer1);

            Vector inputVector1 = new Vector(1);
            inputVector1.setValue(0, 1);

            layer1.feedForward(inputVector1);

            Matrix weightMatrix2 = new Matrix(1, 2);
            weightMatrix2.set(0, 0, 0.09);
            weightMatrix2.set(0, 1, -0.17);

            Vector biasVector2 = new Vector(1);
            biasVector2.setValue(0, 0.48);

            Layer layer2 = new Layer(weightMatrix2, biasVector2,
                    new PureLinearActivationFunction());
            LayerSensitivity layer2Sensitivity = new LayerSensitivity(layer2);
            Vector inputVector2 = layer1.getLastActivationValues();
            layer2.feedForward(inputVector2);

            Vector errorVector = new Vector(1);
            errorVector.setValue(0, 1.261);
            layer2Sensitivity.sensitivityMatrixFromErrorMatrix(errorVector);

            layer1Sensitivity
                    .sensitivityMatrixFromSucceedingLayer(layer2Sensitivity);

            Vector biasUpdateVector2 = BackPropLearning.calculateBiasUpdates(
                    layer2Sensitivity, 0.1);
            Assert.AreEqual(0.2522, biasUpdateVector2.getValue(0), 0.001);

            Vector lastBiasUpdateVector2 = layer2.getLastBiasUpdateVector();
            Assert.AreEqual(0.2522, lastBiasUpdateVector2.getValue(0), 0.001);

            Vector penultimateBiasUpdateVector2 = layer2
                    .getPenultimateBiasUpdateVector();
            Assert.AreEqual(0.0, penultimateBiasUpdateVector2.getValue(0),
                    0.001);

            Vector biasUpdateVector1 = BackPropLearning.calculateBiasUpdates(
                    layer1Sensitivity, 0.1);
            Assert.AreEqual(0.00495, biasUpdateVector1.getValue(0), 0.001);
            Assert.AreEqual(-0.00997, biasUpdateVector1.getValue(1), 0.001);

            Vector lastBiasUpdateVector1 = layer1.getLastBiasUpdateVector();

            Assert.AreEqual(0.00495, lastBiasUpdateVector1.getValue(0), 0.001);
            Assert.AreEqual(-0.00997, lastBiasUpdateVector1.getValue(1), 0.001);

            Vector penultimateBiasUpdateVector1 = layer1
                    .getPenultimateBiasUpdateVector();
            Assert.AreEqual(0.0, penultimateBiasUpdateVector1.getValue(0),
                    0.001);
            Assert.AreEqual(0.0, penultimateBiasUpdateVector1.getValue(1),
                    0.001);
        }
Пример #13
0
        public Vector calculateBiasUpdates(LayerSensitivity layerSensitivity,
                double alpha, double momentum)
        {
            Layer layer = layerSensitivity.getLayer();
            Matrix biasUpdateMatrixWithoutMomentum = layerSensitivity
                    .getSensitivityMatrix().times(alpha).times(-1.0);

            Matrix biasUpdateMatrixWithMomentum = layer.getLastBiasUpdateVector()
                    .times(momentum).plus(
                            biasUpdateMatrixWithoutMomentum.times(1.0 - momentum));
            Vector result = new Vector(biasUpdateMatrixWithMomentum
                    .getRowDimension());
            for (int i = 0; i < biasUpdateMatrixWithMomentum.getRowDimension(); i++)
            {
                result.setValue(i, biasUpdateMatrixWithMomentum.get(i, 0));
            }
            layer.acceptNewBiasUpdate(result.copyVector());
            return result;
        }
Пример #14
0
 public static Matrix calculateWeightUpdates(
         LayerSensitivity layerSensitivity,
         Vector previousLayerActivationOrInput, double alpha)
 {
     Layer layer = layerSensitivity.getLayer();
     Matrix activationTranspose = previousLayerActivationOrInput.transpose();
     Matrix weightUpdateMatrix = layerSensitivity.getSensitivityMatrix()
             .times(activationTranspose).times(alpha).times(-1.0);
     layer.acceptNewWeightUpdate(weightUpdateMatrix.copy());
     return weightUpdateMatrix;
 }
Пример #15
0
        public static Vector calculateBiasUpdates(
                LayerSensitivity layerSensitivity, double alpha)
        {
            Layer layer = layerSensitivity.getLayer();
            Matrix biasUpdateMatrix = layerSensitivity.getSensitivityMatrix()
                    .times(alpha).times(-1.0);

            Vector result = new Vector(biasUpdateMatrix.getRowDimension());
            for (int i = 0; i < biasUpdateMatrix.getRowDimension(); i++)
            {
                result.setValue(i, biasUpdateMatrix.get(i, 0));
            }
            layer.acceptNewBiasUpdate(result.copyVector());
            return result;
        }