public void CanRunWithZeroInput() { Layer layer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] { { 1 } })); NetComponentChain layerlist = new NetComponentChain(layer); NeuralNet.NetworkVector vector = new NeuralNet.NetworkVector(new double[] { 0 }); NetworkVector result = layerlist.Run(vector); Assert.AreEqual(vector, result); }
public void CannotRunWithInputOfWrongSize() { Layer layer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] { { 1 } })); NetComponentChain layerlist = new NetComponentChain(layer); NeuralNet.NetworkVector input = new NeuralNet.NetworkVector(new double[] { 0, 0 }); try { layerlist.Run(input); Assert.Fail("Run should throw an ArgumentException for input of the wrong size, but did not."); } catch (ArgumentException) { } }
public void CanRunTwoLayersWithZeroInput() { Layer layer1 = new Layer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1 }, { 1, 1 } })); Layer layer2 = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1 } })); NetComponentChain layerlist = new NetComponentChain(layer1); layerlist.AddFixed(layer2); NeuralNet.NetworkVector vector = new NeuralNet.NetworkVector(new double[] { 0, 0 }); NetworkVector result = layerlist.Run(vector); NeuralNet.NetworkVector outputCheck = new NeuralNet.NetworkVector(new double[] { 0 }); Assert.AreEqual(outputCheck, result); }
public void CanRunTwoLayerNetWithOneInput() { Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1, 1 }, { 1, 1, 1 } })); Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1 } })); NetComponentChain network = new NetComponentChain(); network.AddFixed(inputlayer); network.AddTrainable(outputlayer); NeuralNet.NetworkVector inputvector = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 }); NetworkVector result = network.Run(inputvector); NeuralNet.NetworkVector outputCheck = new NeuralNet.NetworkVector(new double[] { 2 }); Assert.AreEqual(outputCheck, result); }
public void BackPropagate(NetworkVector outputgradient) { NetworkVector currentGradient = outputgradient.Copy(); NetComponent currentComponent; _networkComponentNode node = _tail; { while (node != null) { currentComponent = node.Component; if (node.IsTrainable) { (currentComponent as TrainableComponent).BackPropagate(currentGradient); } currentGradient = currentComponent.InputGradient(currentGradient); node = node.Previous; } } }
public void CanBackPropagateTwoLayerNetGradient1() { Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1, 1 }, { 1, 1, 1 } })); Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] { { 1, 1 } })); NetComponentChain network = new NetComponentChain(); network.AddFixed(inputlayer); network.AddTrainable(outputlayer); NeuralNet.NetworkVector inputvector = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 }); NeuralNet.NetworkVector outputgradient = new NeuralNet.NetworkVector(new double[] { 1 }); network.Run(inputvector); NeuralNet.NetworkVector inputGradientCheck = new NeuralNet.NetworkVector(new double[] { 2, 2, 2 }); Assert.AreEqual(inputGradientCheck, network.InputGradient(outputgradient)); }
public override NetworkVector Run(NetworkVector input) { if (NumberOfComponents == 0) { throw new InvalidOperationException("Attempt to run a network with no layers."); } if (input.Dimension != NumberOfInputs) { throw new ArgumentException(string.Format("The network accepts {0} inputs, but input has dimension {1}", NumberOfInputs, input.Dimension)); } NetworkVector result = input; foreach (NetComponent component in ForwardEnumeration) { result = component.Run(result); } return(result); }
public override NetworkVector InputGradient(NetworkVector outputgradient) { if (NumberOfComponents == 0) { throw new InvalidOperationException("Attempt to back propogate in a network with no layers."); } if (outputgradient.Dimension != _tail.Component.NumberOfOutputs) { throw new ArgumentException(string.Format("The network has {0} outputs, but outputgradient has dimension {1}", _tail.Component.NumberOfOutputs, outputgradient.Dimension)); } NetworkVector gradient = outputgradient.Copy(); foreach (NetComponent component in BackwardsEnumeration) { gradient = component.InputGradient(gradient); } return(gradient); }
public WeightedCombiner(WeightsMatrix weights, NetworkVector biases) : base(weights.NumberOfOutputs, weights.NumberOfInputs) { if (weights == null) { throw new ArgumentException("Attempt to make a WeightedCombineer with weights == null."); } if (biases == null) { throw new ArgumentException("Attempt to make a WeightedCombineer with biases == null."); } if (biases.Dimension != weights.NumberOfOutputs) { throw new ArgumentException("Dimension of biases must the the same of the outputs."); } Weights = weights.Copy(); Biases = biases.Copy(); VectorInput = new NetworkVector(weights.NumberOfInputs); //Output = new NetworkVector(weights.NumberOfOutputs); }
public static Layer CreateLogisticLayer(WeightsMatrix weights, NetworkVector biases) { return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative)); }
public override void BackPropagate(NetworkVector outputgradient, NetworkVector input) { _biasesGradientAccumulator.Add(BiasesGradient(outputgradient)); _weightsGradientAccumulator.Add(WeightsGradient(outputgradient, input)); }
public override NetworkVector BiasesGradient(NetworkVector outputgradient) { return(NetworkVector.Sum(_segment(outputgradient))); }
protected IEnumerable <VectorPair> _segmentAndPair(NetworkVector first, NetworkVector second) { return(_segment(first).Zip(_segment(second), (a, b) => new VectorPair(a, b))); }
protected List <NetworkVector> _segment(NetworkVector vectortoSegment) { return(vectortoSegment.Segment(_repetitions)); }
public WeightedCombiner(NetworkMatrix weights, NetworkVector biases) : this(weights, biases, TrainingMode.ONLINE) { }
public WeightsMatrix WeightsGradient(NetworkVector outputgradient, NetworkVector input) { return(outputgradient.OuterProduct(input)); }
public NetworkVector LeftMultiply(NetworkVector vector) { return(new NetworkVector(_matrix.Multiply(vector.Vector))); }
public WeightsMatrix WeightsGradient(NetworkVector outputgradient, NetworkVector input) { return(_combiner.WeightsGradient(ActivationGradient(outputgradient), input)); }
public override WeightsMatrix WeightsGradient(NetworkVector outputgradient) { return(_combiner.WeightsGradient(ActivationGradient(outputgradient))); }
public Layer(WeightsMatrix weights, NetworkVector biases) : this(weights, biases, null, null) { }
public NetworkVector InputGradient(NetworkVector outputgradient, NetworkVector input, NetworkVector output) { if (outputgradient == null || outputgradient.Dimension != NumberOfOutputs) { throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons."); } if (_neuralFunctionDerivative == null) { return(outputgradient.Copy()); } return(NetworkVector.ApplyFunctionComponentWise(_inputVector, _outputVector, (x, y) => _neuralFunctionDerivative(x, y))); //NetworkVector derivative = NetworkVector.ApplyFunctionComponentWise(_inputVector, _outputVector, (x, y) => _neuralFunctionDerivative(x, y)); //NetworkVector result = NetworkVector.ApplyFunctionComponentWise(derivative, outputgradient, (x, y) => x * y); //return result; }
protected double _errorFunction(NetworkVector result, NetworkVector expected) { NetworkVector _getSquaredErrors = NetworkVector.ApplyFunctionComponentWise(result, expected, (x, y) => (x - y) * (x - y)); return(_getSquaredErrors.SumValues() / 2); }
public override NetworkVector BiasesGradient(NetworkVector outputgradient) { return(_combiner.BiasesGradient(ActivationGradient(outputgradient))); }
public abstract NetworkVector InputGradient(NetworkVector outputgradient);
public static Layer CreateLinearLayer(WeightsMatrix weights, NetworkVector biases) { return(new Layer(weights, biases, null, null)); }
public abstract NetworkVector Run(NetworkVector input);
public override void BackPropagate(NetworkVector outputgradient) { BackPropagate(outputgradient, VectorInput); }
public OnlineWeightedCombiner(NetworkMatrix weights, NetworkVector biases) : base(weights, biases) { }
public LayerState(NetworkMatrix weights, NetworkVector biases) { Biases = biases.ToArray(); Weights = weights.ToArray(); }
public BatchWeightedCombiner(NetworkMatrix weights, NetworkVector biases) : base(weights, biases) { _biasesDelta = new NetworkVector(NumberOfOutputs); _weightsDelta = new NetworkMatrix(NumberOfOutputs, NumberOfInputs); }