private void AddWeightedLayer(int neuronCount, IActivation activation, bool hasBias) { WeightedLayer adding = new WeightedLayer(neuronCount, activation, hasBias); weightedLayers.Add(adding); layers.Add(adding); }
public void TestNextLayerReferenceIsCorrect() { InputLayer il = new InputLayer(3); WeightedLayer wl = new WeightedLayer(5, new UnipolarSigmoidActivation(), true); il.ConnectTo(wl); Assert.AreEqual(wl.NeuronCount, il.Next.NeuronCount); }
private void ConnectLayersForward() { for (int i = 1; i < layers.Count; i++) { WeightedLayer currentLayer = (WeightedLayer)layers[i]; // cast or throw exception if it's not the right type layers[i - 1].ConnectTo(currentLayer); } }
public void TestRandomizeLeavesNoZerosInBias() { InputLayer il = new InputLayer(3); WeightedLayer wl = new WeightedLayer(5, new UnipolarSigmoidActivation(), true); il.ConnectTo(wl); wl.RandomizeWeights(); Assert.IsTrue(wl.BiasWeights.Enumerate().All(x => x != 0.0)); }
public void SetBiasForLayer(int layerIndex, double[] bias) { WeightedLayer weightedLayer = (WeightedLayer)layers[layerIndex]; if (bias.Length != weightedLayer.NeuronCount) { throw new ArgumentException("Number of bias values provided does not match neuron count"); } weightedLayer.SetBias(bias); }
public void SetIncomingWeightsForLayer(int layerIndex, List <double[]> incomingWeights) { // important: the matrices in Math.NET are column-major! WeightedLayer weightedLayer = (WeightedLayer)layers[layerIndex]; if (incomingWeights.Count != weightedLayer.NeuronCount) { throw new ArgumentException("Number of weight vectors passed " + "is different than the neuron count in this layer"); } weightedLayer.SetIncomingWeights(incomingWeights); }
public RecursiveNetwork(Type type, IActivation activation, bool useBiases, int inputNeurons, int hiddenNeurons, int outputNeurons) { IsInitialized = false; Activation = activation; NetworkType = type; inputLayer = new InputLayer(inputNeurons); hiddenLayer = new WeightedLayer(hiddenNeurons, activation, useBiases); contextLayer = new ContextLayer(NetworkType == Type.Jordan ? outputNeurons : hiddenNeurons); outputLayer = new WeightedLayer(outputNeurons, activation, useBiases); hiddenLayer.ConnectFrom(inputLayer, contextLayer); outputLayer.ConnectFrom(hiddenLayer); }
public void SetBiasForNeuron(int layerIndex, int neuronIndex, double bias) // TODO: write test { WeightedLayer weightedLayer = (WeightedLayer)layers[layerIndex]; weightedLayer.SetBiasForNeuron(neuronIndex, bias); }
public void SetIncomingWeightsForNeuron(int layerIndex, int neuronIndex, double[] weights) { WeightedLayer weightedLayer = (WeightedLayer)layers[layerIndex]; weightedLayer.SetIncomingWeightsForNeuron(neuronIndex, weights); }