public List <double> Execute(List <double> inputValues, List <double> desiredOutputs) { var outputs = new List <double>(); if (inputValues.Count != _numberInputs) { Debug.Log("ERROR: Number of Inputs must be " + _numberInputs); return(outputs); } var inputs = new List <double>(inputValues); bool firstRun = true; for (int i = 0; i < _numberHiddenLayers + 1; i++) { //init the first run with the input values passed (this is the input layer), after use whats in outputs (the hidden layers or the output layer) if (firstRun) { firstRun = false; } else { inputs = new List <double>(outputs); } var layer = _layers[i]; outputs.Clear(); for (int j = 0; j < layer.Neurons.Count; j++) { var neuron = layer.Neurons[j]; double dotProduct = 0; neuron.Inputs.Clear(); for (int k = 0; k < neuron.NumberInputs; k++) { neuron.Inputs.Add(inputs[k]); dotProduct += neuron.Weights[k] * inputs[k]; //this is essentially our dot product that the perceptron does } dotProduct -= neuron.Bias; if (layer.LayerType == NeuronLayer.eLayerType.HiddenLayer) { neuron.Output = _activationFunctionHiddenLayers.Execute(dotProduct); } else { neuron.Output = _activationFunctionInputOutput.Execute(dotProduct); } outputs.Add(neuron.Output); } } return(outputs); //the answer from the inputs }
public List <double> Execute(List <double> inputValues, List <double> desiredOutputs) { var outputs = new List <double>(); if (inputValues.Count != _numberInputs) { throw new InvalidOperationException($"The number of inputs provided does not match the expected count of {_numberInputs}"); } var inputs = new List <double>(inputValues); bool firstRun = true; foreach (var layer in _layers) { //init the first run with the input values passed (this is the input layer), after use whats in outputs (the hidden layers or the output layer) if (firstRun) { firstRun = false; } else { inputs = new List <double>(outputs); } outputs.Clear(); foreach (var neuron in layer.Neurons) { double dotProduct = 0; neuron.Inputs.Clear(); for (int k = 0; k < neuron.NumberInputs; k++) { neuron.Inputs.Add(inputs[k]); dotProduct += neuron.Weights[k] * inputs[k]; //this is essentially our dot product that the perceptron does } dotProduct -= neuron.Bias; if (layer.LayerType == NeuronLayer.eLayerType.HiddenLayer) { neuron.Output = _activationFunctionHiddenLayers.Execute(dotProduct); } else { neuron.Output = _activationFunctionInputOutput.Execute(dotProduct); } outputs.Add(neuron.Output); } } UpdateWeights(outputs, desiredOutputs); return(outputs); //the answer from the inputs }
/// <summary> /// Runs the network and outputs the values of all the output neurons /// </summary> /// <returns></returns> public double[] RunNetwork(double[] inputs) { var inputsLength = Neurons[0].Length; if (inputs.Length != inputsLength) { throw new InvalidNumberOfNeuronsException("Number of inputs and input neurons does not match"); } for (int n = 0; n < Neurons[0].Length; n++) { var neuron = Neurons[0][n]; neuron.Output = inputs[n]; } for (int l = 1; l < Neurons.Length; l++) { var layer = Neurons[l]; for (int n = 0; n < layer.Length; n++) { var neuron = Neurons[l][n]; var outputSum = 0.0; for (int w = 0; w < neuron.Weights.Length; w++) { var layerPrevious = Neurons[l - 1]; outputSum += neuron.Weights[w] * layerPrevious[w].Output; } outputSum += neuron.BiasWeight; neuron.Output = ActivatorFunction.Execute(outputSum); } } var outputNeurons = Neurons[Neurons.Length - 1]; var outputNeuronsLength = outputNeurons.Length; var output = new double[outputNeuronsLength]; for (int n = 0; n < outputNeuronsLength; n++) { Neuron neuron = outputNeurons[n]; output[n] = neuron.Output; } return(output); }