public void ActivateNeuron() { BiasNeuron bias = new BiasNeuron(); double w0 = 0; neuron.Connect(bias, w0); Assert.Throws(typeof(NotConfiguredException), () => neuron.Activation()); InputNeuron i1 = new InputNeuron(); InputNeuron i2 = new InputNeuron(); InputNeuron i3 = new InputNeuron(); i1.Input = 1; i2.Input = 1; i3.Input = 1; double w1 = 1; double w2 = 1; double w3 = 1; neuron.Connect(i1, w1); neuron.Connect(i2, w2); neuron.Connect(i3, w3); double tx = i1.Input * w1 + i2.Input * w2 + i3.Input * w3; double expected_activation = 1 / (1 + Math.Pow(Math.E, -tx)); MyAssert.CloseTo(neuron.Activation(), expected_activation); }
public void TestAccumulatesWeightShift() { Neuron n31 = new Neuron(); BiasNeuron bias2 = new BiasNeuron();; Neuron n21 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); input.Input = 1; n31.SetAnswer(0.9); double[] ws = n31.GetWeightShifts(); double acc = ws[1]; Assert.AreEqual(acc, 0); n31.PropagateBackwards(); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreNotEqual(acc, 0); n31.ApplyTraining(0, 1); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreEqual(acc, 0); }
public void TanhActivation() { TanhNeuron tn = new TanhNeuron(); BiasNeuron bias = new BiasNeuron(); double w0 = 0; tn.Connect(bias, w0); Assert.Throws(typeof(NotConfiguredException), () => tn.Activation()); InputNeuron i1 = new InputNeuron(); InputNeuron i2 = new InputNeuron(); InputNeuron i3 = new InputNeuron(); i1.Input = 1; i2.Input = 1; i3.Input = 1; double w1 = 1; double w2 = 1; double w3 = 1; tn.Connect(i1, w1); tn.Connect(i2, w2); tn.Connect(i3, w3); double z = i1.Input * w1 + i2.Input * w2 + i3.Input * w3; double expected_activation = (Math.Exp(z) - Math.Exp(-z)) / (Math.Exp(z) + Math.Exp(-z)); MyAssert.CloseTo(tn.Activation(), expected_activation); }
public BiasNeuron AddBiasNeuron( IEnumerable <NeuronValueModifier> neuronValueModifiers = null) { var bias = new BiasNeuron(-1) { ValueModifiers = neuronValueModifiers?.ToArray() }; return(AddNeurons(bias, 1).First() as BiasNeuron); }
readonly ValueNeuron[] neurons; // inputNeurons + Bias public InputLayer(int size) { var neurons = new ValueNeuron[size + 1]; for (int i = 0; i < size; i++) { neurons[i] = new ValueNeuron(); } neurons[size] = new BiasNeuron(); this.neurons = neurons; }
public HiddenLayerBuilder Bias(Action <BiasNeuronBuilder> action) { var neuronBuilder = new BiasNeuronBuilder(Context); action.Invoke(neuronBuilder); biasNeuron = neuronBuilder.Build(); NeuronsInLayer.Add(biasNeuron); AllNeuronsInNetwork.Add(biasNeuron.Id, biasNeuron); return(this); }
public NeuralNetworkBuilder WithInputLayer(int neuronCount, ActivationType activationType, double biasOutput = 1) { var neurons = new List <Neuron>(); for (var i = 0; i < neuronCount; i++) { neurons.Add(Neuron.For(context, activationType)); } inputLayer = InputLayer.For(neurons, BiasNeuron.For(context, activationType, biasOutput)); return(this); }
public void TestXNOR_Manualy() { Neuron a3_1 = new Neuron(); BiasNeuron bias_2 = new BiasNeuron(); a3_1.Connect(bias_2); Neuron a2_1 = new Neuron(); a3_1.Connect(a2_1); Neuron a2_2 = new Neuron(); a3_1.Connect(a2_2); BiasNeuron bias_1 = new BiasNeuron(); a2_1.Connect(bias_1); a2_2.Connect(bias_1); InputNeuron a1_1 = new InputNeuron(); a2_1.Connect(a1_1); a2_2.Connect(a1_1); InputNeuron a1_2 = new InputNeuron(); a2_1.Connect(a1_2); a2_2.Connect(a1_2); a3_1.SetWeight(0, -10); a3_1.SetWeight(1, 20); a3_1.SetWeight(2, 20); a2_1.SetWeight(0, -30); a2_1.SetWeight(1, 20); a2_1.SetWeight(2, 20); a2_2.SetWeight(0, 10); a2_2.SetWeight(1, -20); a2_2.SetWeight(2, -20); a1_1.Input = 0; a1_2.Input = 0; MyAssert.CloseTo(a3_1.Activation(), 1); a1_1.Input = 0; a1_2.Input = 1; MyAssert.CloseTo(a3_1.Activation(), 0); a1_1.Input = 1; a1_2.Input = 0; MyAssert.CloseTo(a3_1.Activation(), 0); a1_1.Input = 1; a1_2.Input = 1; MyAssert.CloseTo(a3_1.Activation(), 1); }
public void ConnectBias( BiasNeuron bias, IEnumerable <IEnumerable <Neuron> > layers, WeightInitializer weightInitializer = null) { var biasLayer = new[] { bias }; foreach (var layer in layers) { ConnectNeurons(biasLayer, layer, weightInitializer).ToArray(); } }
public void InputLayer_InstantiatedWithInputAndBias_MakesCorrectLayer() { InputNeuron n1 = new InputNeuron(); InputNeuron n2 = new InputNeuron(); BiasNeuron b = new BiasNeuron(); List <Neuron> expectedList = new List <Neuron> { n1, n2, b }; List <Neuron> actualList = inputLayer.Neurons; CollectionAssert.AreEqual(expectedList, actualList, new NeuronListComparer()); }
/// <summary> /// Creates connections with shared weights between two feature maps Assumes /// that toMap is from Convolutional layer. /// <p/> /// Kernel is used as a sliding window, and kernel positions overlap. Kernel /// is shifting right by one position at a time. Neurons at the same kernel /// position share the same weights /// </summary> /// <param name="fromMap"> source feature map </param> /// <param name="toMap"> destination feature map </param> public override void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap) { Kernel kernel = toMap.Kernel; kernel.initWeights(-0.15, 0.15); // zasto ove vrednosti ??? // int numberOfSharedWeights = kernel.getArea(); // Weight[,] weights = new Weight[kernel.getHeight(),kernel.getWidth()]; // //double coefficient = getWeightCoeficient(toMap); // // initialize kernel with random weights // // ovo prebaciti u kernel // for (int i = 0; i < kernel.getHeight(); i++) { // for (int j = 0; j < kernel.getWidth(); j++) { // Weight weight = new Weight(); // weight.randomize(-0.15, 0.15); // zasto ove vrednosti? // weights[i,j] = weight; // } // } // kernel.setWeights(weights); // na kraju svi kerneli od svih feature mapa imaju iste tezine jer gadjaju istu instancu kernela od nadklase!!!! // // kernel prebaciti u Layer2D preimenovati ga u FeatureMapLayer i dodati mu kernel... // // pored kernela dodati mu i BiasNeuron... BiasNeuron biasNeuron = new BiasNeuron(); fromMap.addNeuron(biasNeuron); // ovo se koristi samo za povezivanje dva konvoluciona sloja !!! // dodati step za from - ne mora da bude samo 1 // ostaje pitanje kako se primenjuje na ivici - trebalo bi od centra - dodati onaj okvir sa strane!!!! for (int y = 0; y < toMap.Height; y++) // iterate all neurons by height in toMap -- verovatno bi i ovde trebalo zameniti redosled x i y!!! { for (int x = 0; x < toMap.Width; x++) // iterate all neurons by width in toMap { Neuron toNeuron = toMap.getNeuronAt(x, y); // get neuron at specified position in toMap for (int ky = 0; ky < kernel.Height; ky++) // iterate kernel positions by y { for (int kx = 0; kx < kernel.Width; kx++) // iterate kernel positions by x { int fromX = x + kx; // calculate the x position of from neuron int fromY = y + ky; // calculate the y position of from neuron //int currentWeightIndex = kx + ky * kernel.getHeight(); // find the idx of the shared weight Weight[,] concreteKernel = kernel.Weights; Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY); ConnectionFactory.createConnection(fromNeuron, toNeuron, concreteKernel[kx, ky]); // - da li je ovo dobro ??? // also create connection from bias ConnectionFactory.createConnection(biasNeuron, toNeuron); } } } } }
public NeuralNetworkBuilder WithHiddenLayer(int neuronCount, ActivationType activationType, double biasOutput = 1) { var neurons = new List <Neuron>(); for (var i = 0; i < neuronCount; i++) { neurons.Add(Neuron.For( context, activationType, randomNumberGenerator, PreviousLayer.Neurons)); } hiddenLayers.Add(HiddenLayer.For(neurons, BiasNeuron.For(context, activationType, biasOutput))); return(this); }
public void Update_CalledWithInputValues_UpdatesWithCorrectValues() { InputNeuron n1 = new InputNeuron(); InputNeuron n2 = new InputNeuron(); BiasNeuron b = new BiasNeuron(); List <Neuron> expectedList = new List <Neuron> { n1, n2, b }; double[] newInput = { 8, 10 }; inputLayer.Update(newInput); List <Neuron> actualList = inputLayer.Neurons; CollectionAssert.AreEqual(expectedList, actualList, new NeuronListComparer()); }
public void HiddenLayer_InstantiedWithNumberOfNeurons_MakesCorrectNumberOfNeurons() { HiddenNeuron n1 = new HiddenNeuron(); HiddenNeuron n2 = new HiddenNeuron(); HiddenNeuron n3 = new HiddenNeuron(); BiasNeuron b = new BiasNeuron(); List <Neuron> l = new List <Neuron> { n1, n2, n3, b }; int expectedValue = l.Count; int actualValue = hiddenLayer.Neurons.Count; Assert.AreEqual(expectedValue, actualValue); }
public void ThrowsIfPropagateMidLayerWasNeverPropagatedTo() { Neuron n31 = new Neuron(); BiasNeuron bias2 = new BiasNeuron();; Neuron n21 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); input.Input = 1; n31.SetAnswer(1); Assert.Throws(typeof(CannotPropagateWithEmptyAcc), () => n21.PropagateBackwards()); }
public void LastLayerGivesDeltasAndWeightsToTheOneBefore() { Neuron n31 = new Neuron(); Neuron n32 = new Neuron(); BiasNeuron bias2 = new BiasNeuron();; Neuron n21 = new Neuron(); Neuron n22 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); n31.Connect(n22); n32.Connect(bias2); n32.Connect(n21); n32.Connect(n22); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); n22.Connect(bias1); n22.Connect(input); input.Input = 1; n31.SetAnswer(0.9); n32.SetAnswer(0.1); n31.PropagateBackwards(); n32.PropagateBackwards(); double delta31 = n31.GetDelta(); double delta32 = n32.GetDelta(); double n21_n31 = n31.Weights[1]; double n21_n32 = n32.Weights[1]; n21.PropagateBackwards(); double desired_delta_for_n21 = n21_n31 * delta31 + n21_n32 * delta32; Assert.AreEqual(desired_delta_for_n21, n21.GetDelta()); }
internal virtual IList <ICollection <IHiddenNeuron> > BuildHiddenLayers(IEnumerable <INeuron> previousLayer, HiddenLayerSettings settings, ushort recurrentInputs = 0) { var tempLayer = new List <ICollection <IHiddenNeuron> >(settings.LayersCount); IEnumerable <INeuron> prevLayer = previousLayer; for (int i = 0; i < settings.LayersCount; i++) { var tempHidden = new List <IHiddenNeuron>(settings.NeuronsCount); for (int j = 0; j < settings.NeuronsCount; j++) { var neuron = new HiddenNeuron(settings.FunctionType, recurrentInputs); tempHidden.Add(neuron); foreach (var inNeuron in prevLayer) { ConnectAxon(inNeuron, neuron, settings.NeuronsCount); } } if (settings.HasBiasNeuron) { var biasNeuron = new BiasNeuron(); tempHidden.Add(biasNeuron); foreach (var inNeuron in previousLayer) { ConnectAxon(inNeuron, biasNeuron, settings.NeuronsCount); } } prevLayer = tempHidden; tempLayer.Add(tempHidden); } return(tempLayer); }
public static void AddLSTM( this NeuralModelBase model, out Neuron input, out Neuron output, BiasNeuron biasNeuron = null, WeightInitializer weightInitializer = null, string groupName = "LSTM") { var concatNeur = model.AddNeuron( sampleNeuron: new Neuron(-1, null) ); // Multiply Gate var sigmoid1 = model.AddNeuron( sampleNeuron: new Neuron(-1, ActivationFunctions.Sigmoid) ); model.AddConnection(concatNeur, sigmoid1, weightInitializer) .isTransferConnection = true; var multiplyGate = model.AddNeuron( sampleNeuron: new Neuron(-1, null) { ValueCollector = new MultValueCollector() } ); model.AddConnection(sigmoid1, multiplyGate, weightInitializer); // Addition gate var sigmoid2 = model.AddNeuron( sampleNeuron: new Neuron(-1, ActivationFunctions.Sigmoid) ); model.AddConnection(concatNeur, sigmoid2, weightInitializer) .isTransferConnection = true; var tanh = model.AddNeuron( sampleNeuron: new Neuron(-1, ActivationFunctions.TanH) ); model.AddConnection(concatNeur, tanh, weightInitializer) .isTransferConnection = true; var sigmoidAndTanhMultGate = model.AddNeuron( sampleNeuron: new Neuron(-1, null) { ValueCollector = new MultValueCollector() } ); model.AddConnection(sigmoid2, sigmoidAndTanhMultGate, weightInitializer); model.AddConnection(tanh, sigmoidAndTanhMultGate, weightInitializer); var additionGate = model.AddNeuron(new Neuron(-1, null)); model.AddConnection(multiplyGate, additionGate, weightInitializer) .isTransferConnection = true; model.AddConnection(sigmoidAndTanhMultGate, additionGate, weightInitializer) .isTransferConnection = true; // Tanh gate var sigmoid3 = model.AddNeuron( sampleNeuron: new Neuron(-1, ActivationFunctions.Sigmoid) ); model.AddConnection(concatNeur, sigmoid3, weightInitializer) .isTransferConnection = true; var finalMult = model.AddNeuron( sampleNeuron: new Neuron(-1, null) { ValueCollector = new MultValueCollector() } ); var tanhGate = model.AddNeuron( sampleNeuron: new Neuron(-1, ActivationFunctions.TanH) ); model.AddConnection(additionGate, tanhGate, weightInitializer) .isTransferConnection = true; model.AddConnection(sigmoid3, finalMult, weightInitializer); model.AddConnection(tanhGate, finalMult, weightInitializer) .isTransferConnection = true; // Adding memory neurons var finalMultMem = model.AddNeuron( sampleNeuron: new MemoryNeuron(-1, finalMult.InnovationNb) ); model.AddConnection(finalMultMem, concatNeur, weightInitializer) .isTransferConnection = true; var cellStateMem = model.AddNeuron( sampleNeuron: new MemoryNeuron(-1, additionGate.InnovationNb) ); model.AddConnection(cellStateMem, multiplyGate, weightInitializer) .isTransferConnection = true; // Connecting bias if (biasNeuron != null) { model.AddConnection(biasNeuron, sigmoid1); model.AddConnection(biasNeuron, sigmoid2); model.AddConnection(biasNeuron, tanh); model.AddConnection(biasNeuron, sigmoid3); } // Assign neuron group concatNeur.group = groupName; sigmoid1.group = groupName; sigmoid2.group = groupName; sigmoid3.group = groupName; tanh.group = groupName; additionGate.group = groupName; multiplyGate.group = groupName; tanhGate.group = groupName; sigmoidAndTanhMultGate.group = groupName; finalMult.group = groupName; cellStateMem.group = groupName; finalMultMem.group = groupName; // Assigning out's input = concatNeur; output = finalMult; }