/// <summary> /// Iteratively calls the forward ropagation method of each layer on the output of the previous layer /// </summary> /// <param name="input">The inputs to the neural network for the current problem</param> /// <returns></returns> public override Vector <double> Predict(Vector <double> input) { //Layers[0].LayerActivationsSumInputs = CreateVector.DenseOfVector(input); Layers[0].LayerActivations = CreateVector.DenseOfVector(input);// Layers[0].LayerActivationsSumInputs; /* if (Layers[0].Bias) * { * var d = Layers[0].LayerActivations.ToList<double>(); * d.Add(1); * Layers[0].LayerActivations = CreateVector.Dense(d.ToArray()); * //Layers[0].LayerActivations = Layers[0].LayerActivationsSumInputs; * * }*/ for (int i = 1; i < Layers.Count; i++) { if (Debug) { Console.WriteLine("Input to Layer:{0}", i); Console.WriteLine(input); Console.WriteLine("WeightsMatrix:"); Console.WriteLine(Weights[i - 1]); Console.WriteLine("Layer ActivationsSigmoid:{0}", i); } if (Math.Abs(Layers[i - 1].NumberOfNeurons - input.Count) < 1 && Layers[i - 1].Bias) { var d = input.ToList <double>(); d.Add(1); input = CreateVector.Dense(d.ToArray()); } input = Layers[i].ForwardPropagation(input, Weights[i - 1], Debug); if (Debug) { Console.WriteLine("Output Of Layer:{0}", i); Console.WriteLine(input); } } return(input); }
public static Vector <double> Response(Matrix <double>[] weights, Vector <double> input, int func) { if (input.Count != weights[0].ColumnCount) { throw new Exception("Dimension mismatch."); } Vector <double> temp = CreateVector.DenseOfVector(input); var fx = Activators.dict[func]; for (int i = 0; i < weights.Length; i++) { temp = weights[i] * temp; for (int j = 0; j < temp.Count; j++) { temp[j] = fx(temp[j]); } } return(temp); }