getValue() public méthode

public getValue ( int i ) : double
i int
Résultat double
	//
	// PRIVATE METHODS
	//

	private Matrix createDerivativeMatrix(Vector lastInducedField) {
		List<Double> lst = new List<Double>();
		for (int i = 0; i < lastInducedField.size(); i++) {
			lst.Add(layer.getActivationFunction().deriv(
					lastInducedField.getValue(i)));
		}
		return Matrix.createDiagonalMatrix(lst);
	}
Exemple #2
0
 public Vector plus(Vector v)
 {
     Vector result = new Vector(size());
     for (int i = 0; i < size(); i++)
     {
         result.setValue(i, getValue(i) + v.getValue(i));
     }
     return result;
 }
Exemple #3
0
        public void testFeedForward()
        {
            // example 11.14 of Neural Network Design by Hagan, Demuth and Beale
            // lots of tedious tests necessary to ensure nn is fundamentally correct
            Matrix weightMatrix1 = new Matrix(2, 1);
            weightMatrix1.set(0, 0, -0.27);
            weightMatrix1.set(1, 0, -0.41);

            Vector biasVector1 = new Vector(2);
            biasVector1.setValue(0, -0.48);
            biasVector1.setValue(1, -0.13);

            Layer layer1 = new Layer(weightMatrix1, biasVector1,
                    new LogSigActivationFunction());

            Vector inputVector1 = new Vector(1);
            inputVector1.setValue(0, 1);

            Vector expected = new Vector(2);
            expected.setValue(0, 0.321);
            expected.setValue(1, 0.368);

            Vector result1 = layer1.feedForward(inputVector1);
            Assert.AreEqual(expected.getValue(0), result1.getValue(0), 0.001);
            Assert.AreEqual(expected.getValue(1), result1.getValue(1), 0.001);

            Matrix weightMatrix2 = new Matrix(1, 2);
            weightMatrix2.set(0, 0, 0.09);
            weightMatrix2.set(0, 1, -0.17);

            Vector biasVector2 = new Vector(1);
            biasVector2.setValue(0, 0.48);

            Layer layer2 = new Layer(weightMatrix2, biasVector2,
                    new PureLinearActivationFunction());
            Vector inputVector2 = layer1.getLastActivationValues();
            Vector result2 = layer2.feedForward(inputVector2);
            Assert.AreEqual(0.446, result2.getValue(0), 0.001);
        }
Exemple #4
0
	public Vector feedForward(Vector inputVector) {
		lastInput = inputVector;
		Matrix inducedField = weightMatrix.times(inputVector).plus(biasVector);

		Vector inducedFieldVector = new Vector(numberOfNeurons());
		for (int i = 0; i < numberOfNeurons(); i++) {
			inducedFieldVector.setValue(i, inducedField.get(i, 0));
		}

		lastInducedField = inducedFieldVector.copyVector();
		Vector resultVector = new Vector(numberOfNeurons());
		for (int i = 0; i < numberOfNeurons(); i++) {
			resultVector.setValue(i, activationFunction
					.activation(inducedFieldVector.getValue(i)));
		}
		// set the result as the last activation value
		lastActivationValues = resultVector.copyVector();
		return resultVector;
	}