private void SetupVariablesFromBrain() { inBias = brain.GetInputNeuronFromName(NAME_IN_BIAS); inFoodValuePosition = brain.GetInputNeuronFromName(NAME_IN_FOODVALUEPOSITION); inFoodValueFeeler = brain.GetInputNeuronFromName(NAME_IN_FOODVALUEFEELER); inOcclusionFeeler = brain.GetInputNeuronFromName(NAME_IN_OCCLUSIONFEELER); inEnergy = brain.GetInputNeuronFromName(NAME_IN_ENERGY); inAge = brain.GetInputNeuronFromName(NAME_IN_AGE); inGeneticDifference = brain.GetInputNeuronFromName(NAME_IN_GENETICDIFFERENCE); inWasAttacked = brain.GetInputNeuronFromName(NAME_IN_WASATTACKED); inWaterOnFeeler = brain.GetInputNeuronFromName(NAME_IN_WATERONFEELER); inWaterOnCreature = brain.GetInputNeuronFromName(NAME_IN_WATERONCREATURE); //inOscilation = brain.GetInputNeuronFromName(NAME_IN_OSCILATION); for (int i = 0; i < AmountOfMemory; i++) { inMemory[i] = brain.GetInputNeuronFromName(NAME_IN_MEMORY + (i + 1)); } outBirth = brain.GetOutputNeuronFromName(NAME_OUT_BIRTH); outRotate = brain.GetOutputNeuronFromName(NAME_OUT_ROTATE); outForward = brain.GetOutputNeuronFromName(NAME_OUT_FORWARD); outFeelerAngle = brain.GetOutputNeuronFromName(NAME_OUT_FEELERANGLE); outAttack = brain.GetOutputNeuronFromName(NAME_OUT_ATTACK); outEat = brain.GetOutputNeuronFromName(NAME_OUT_EAT); //outOscilation = brain.GetOutputNeuronFromName(NAME_OUT_OSCILATION); for (int i = 0; i < AmountOfMemory; i++) { outMemory[i] = brain.GetOutputNeuronFromName(NAME_OUT_MEMORY + (i + 1)); } CalculateCollisionGridPos(); }
public void NeuralNetwork_Neuron_InputNeuronIOTest() { InputNeuron IN = new InputNeuron(1, 1, 1); IN.InValue = new double[] { 0.010101 }; Assert.AreEqual(IN.OutValue, 0.010101); }
public void TestIfNeuronCalculatesCorrectOutput() { double weight1 = 0.5; double weight2 = 2.5; Dendrite dendrite1 = new Dendrite(weight1); Dendrite dendrite2 = new Dendrite(weight2); // Give input to inputneurons InputNeuron n1 = new InputNeuron(); InputNeuron n2 = new InputNeuron(); n1.Output = 5; n2.Output = 2; dendrite1.CalculateValue(n1.Output); dendrite2.CalculateValue(n2.Output); // Make hidden neuron, assign dendrites HiddenNeuron n3 = new HiddenNeuron(); n3.Dendrites.Add(dendrite1); n3.Dendrites.Add(dendrite2); n3.CalculateOutput(); double expectedValue = 7.5; double actualValue = n3.Output; Assert.AreEqual(expectedValue, actualValue); }
public void LastLayerGivesDeltasAndWeightsToTheOneBefore() { Neuron n31 = new Neuron(); Neuron n32 = new Neuron(); BiasNeuron bias2 = new BiasNeuron(); ; Neuron n21 = new Neuron(); Neuron n22 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); n31.Connect(n22); n32.Connect(bias2); n32.Connect(n21); n32.Connect(n22); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); n22.Connect(bias1); n22.Connect(input); input.Input = 1; n31.SetAnswer(0.9); n32.SetAnswer(0.1); n31.PropagateBackwards(); n32.PropagateBackwards(); double delta31 = n31.GetDelta(); double delta32 = n32.GetDelta(); double n21_n31 = n31.Weights[1]; double n21_n32 = n32.Weights[1]; n21.PropagateBackwards(); double desired_delta_for_n21 = n21_n31*delta31 + n21_n32*delta32; Assert.AreEqual(desired_delta_for_n21, n21.GetDelta()); }
private void InitNeuron() { _neurons = new INeuron[Architecture.NumberOfNeurons]; for (int i = 0; i < Architecture.NumberOfNeurons; i++) { switch (Architecture.Type) { case NeuronType.Input: // Has input size of 1 _neurons[i] = new InputNeuron(); break; case NeuronType.Multiplication: _neurons[i] = new MultiplicationNeuron(Architecture.InputSize); break; case NeuronType.Normalization: _neurons[i] = new NormalizationNeuron(Architecture.InputSize); break; case NeuronType.Function: // Has input size of 1 _neurons[i] = new FunctionNeuron(Architecture.InputSize, _numberOfVariables); break; case NeuronType.Output: _neurons[i] = new OutputNeuron(Architecture.InputSize); break; default: throw new ArgumentOutOfRangeException(nameof(Architecture.Type), Architecture.Type, null); } } }
public void GenerateInputMesh(InputNeuron neuron, Random rand) { foreach (var hidden in HiddenLayer) { hidden.Connections.Add(new Connection(neuron, (float)rand.NextDouble() - 0.5f)); } }
private INeuron[] CreateNeurons(int neuronsCount, int inputCount, int outputCount, double bias, double excess, IActivation activation) { var neurons = new INeuron[neuronsCount]; for (var i = 0; i < _inputCount; i++) { neurons[i] = new InputNeuron { B = bias, }; } for (var i = _inputCount; i < neuronsCount - _outputCount; i++) { neurons[i] = new HiddenNeuron { Activation = activation, L = 0.5 + 0.25 * 1, B = bias, }; } for (var i = neuronsCount - _outputCount; i < neuronsCount; i++) { neurons[i] = new OutputNeuron { Activation = activation, L = 0.5 + 0.25 * 3, B = bias, }; } return(neurons); }
private void ConstructLayer(int[] neurons_in_layers_without_bias, int layer) { for (int i = 0; i < neurons_in_layers_without_bias[layer] + 1; i++) { INeuron nn; if (i == 0) { nn = new BiasNeuron(); } else { if (layer == 0) { nn = new InputNeuron(); } else if (layer == LayerCount - 2) { nn = CreateNeuron(); } else { nn = is_combined ? new TanhNeuron() : new Neuron(); } } neurons[layer][i] = nn; ConnectNeuronToLayer(nn, layer); } }
public void CalculateValues(byte[] input) { var rand = new Random(); for (var i = 0; i < input.Length; i++) { if (i >= InputLayer.Count) { var inputN = new InputNeuron(); AddInputNeuron(inputN); GenerateInputMesh(inputN, rand); var neuron = new WorkingNeuron(); AddOutputNeuron(neuron); GenerateOutputMesh(neuron, rand); } InputLayer[i].Setvalue(input[i]); } foreach (var neuron in HiddenLayer) { neuron.GetValue(); } foreach (var neuron in OutputLayer) { neuron.GetValue(); } }
public override Neuron nameCopy() { InputNeuron clone = new InputNeuron(); clone.setName(getName()); return(clone); }
public InputNeuron createNewInput() { InputNeuron _in = new InputNeuron(); inputNeurons.Add(_in); return(_in); }
// create network public InputNeuron CreateInputNeuron(float value) { InputNeuron inputNeuron = new InputNeuron(value); inputNeurons.Add(inputNeuron); return(inputNeuron); }
private NeuralNetwork.NeuralNetwork CreateNn() { NeuralNetwork.NeuralNetwork nn; nn = new NeuralNetwork.NeuralNetwork(); InputLayer inputLayer = nn.CreateInputLayer(); //inHeading = inputLayer.CreateNeuron("heading"); //inVelocityAngle = inputLayer.CreateNeuron("v_angle"); //inVelocityLength = inputLayer.CreateNeuron("v_length"); inNextCheckpointDistance0 = inputLayer.CreateNeuron("c_dist0"); inNextCheckpointDistance1 = inputLayer.CreateNeuron("c_dist1"); inNextCheckpointDistance2 = inputLayer.CreateNeuron("c_dist2"); inNextCheckpointDistance3 = inputLayer.CreateNeuron("c_dist3"); inNextCheckpointAngle0 = inputLayer.CreateNeuron("c_angle0"); inNextCheckpointAngle1 = inputLayer.CreateNeuron("c_angle1"); inNextCheckpointAngle2 = inputLayer.CreateNeuron("c_angle2"); inNextCheckpointAngle3 = inputLayer.CreateNeuron("c_angle3"); inNextCheckpointAngle4 = inputLayer.CreateNeuron("c_angle4"); inNextCheckpointAngle5 = inputLayer.CreateNeuron("c_angle5"); inNextNextCheckpointAngle0 = inputLayer.CreateNeuron("nnc_angle0"); inNextNextCheckpointAngle1 = inputLayer.CreateNeuron("nnc_angle1"); inNextNextCheckpointAngle2 = inputLayer.CreateNeuron("nnc_angle2"); inNextNextCheckpointAngle3 = inputLayer.CreateNeuron("nnc_angle3"); inNextNextCheckpointAngle4 = inputLayer.CreateNeuron("nnc_angle4"); inNextNextCheckpointAngle5 = inputLayer.CreateNeuron("nnc_angle5"); //inNextCheckpointDistance = inputLayer.CreateNeuron("c_dist"); OutputLayer outputLayer = nn.CreateOutputLayer(); outHeading0 = outputLayer.CreateNeuron("o_heading0"); outHeading1 = outputLayer.CreateNeuron("o_heading1"); outHeading2 = outputLayer.CreateNeuron("o_heading2"); outHeading3 = outputLayer.CreateNeuron("o_heading3"); outHeading4 = outputLayer.CreateNeuron("o_heading4"); outHeading5 = outputLayer.CreateNeuron("o_heading5"); outThrust0 = outputLayer.CreateNeuron("o_thrust0"); outThrust1 = outputLayer.CreateNeuron("o_thrust1"); outThrust2 = outputLayer.CreateNeuron("o_thrust2"); outThrust3 = outputLayer.CreateNeuron("o_thrust3"); outThrust4 = outputLayer.CreateNeuron("o_thrust4"); outThrust5 = outputLayer.CreateNeuron("o_thrust5"); outThrust6 = outputLayer.CreateNeuron("o_thrust6"); for (int i = 0; i < 3; i++) { HiddenLayer hiddenLayer = nn.CreateHiddenLayer(); for (int j = 0; j < 32; j++) { HiddenNeuron hiddenNeuron = hiddenLayer.CreateNeuron(string.Format("hidden[{0}][{1}]", i, j)); } } nn.CreateFullConnections(); nn.InitWithRandomValues(); return(nn); }
public void ActivateNeuron() { BiasNeuron bias = new BiasNeuron(); double w0 = 0; neuron.Connect(bias, w0); Assert.Throws(typeof(NotConfiguredException), () => neuron.Activation()); InputNeuron i1 = new InputNeuron(); InputNeuron i2 = new InputNeuron(); InputNeuron i3 = new InputNeuron(); i1.Input = 1; i2.Input = 1; i3.Input = 1; double w1 = 1; double w2 = 1; double w3 = 1; neuron.Connect(i1, w1); neuron.Connect(i2, w2); neuron.Connect(i3, w3); double tx = i1.Input * w1 + i2.Input * w2 + i3.Input * w3; double expected_activation = 1 / (1 + Math.Pow(Math.E, -tx)); MyAssert.CloseTo(neuron.Activation(), expected_activation); }
public void TanhActivation() { TanhNeuron tn = new TanhNeuron(); BiasNeuron bias = new BiasNeuron(); double w0 = 0; tn.Connect(bias, w0); Assert.Throws(typeof(NotConfiguredException), () => tn.Activation()); InputNeuron i1 = new InputNeuron(); InputNeuron i2 = new InputNeuron(); InputNeuron i3 = new InputNeuron(); i1.Input = 1; i2.Input = 1; i3.Input = 1; double w1 = 1; double w2 = 1; double w3 = 1; tn.Connect(i1, w1); tn.Connect(i2, w2); tn.Connect(i3, w3); double z = i1.Input * w1 + i2.Input * w2 + i3.Input * w3; double expected_activation = (Math.Exp(z) - Math.Exp(-z)) / (Math.Exp(z) + Math.Exp(-z)); MyAssert.CloseTo(tn.Activation(), expected_activation); }
public void TestAccumulatesWeightShift() { Neuron n31 = new Neuron(); BiasNeuron bias2 = new BiasNeuron();; Neuron n21 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); input.Input = 1; n31.SetAnswer(0.9); double[] ws = n31.GetWeightShifts(); double acc = ws[1]; Assert.AreEqual(acc, 0); n31.PropagateBackwards(); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreNotEqual(acc, 0); n31.ApplyTraining(0, 1); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreEqual(acc, 0); }
public void ActivateNeuron() { BiasNeuron bias = new BiasNeuron(); double w0 = 0; neuron.Connect(bias, w0); Assert.Throws(typeof(NotConfiguredException), () => neuron.Activation()); InputNeuron i1 = new InputNeuron(); InputNeuron i2 = new InputNeuron(); InputNeuron i3 = new InputNeuron(); i1.Input = 1; i2.Input = 1; i3.Input = 1; double w1 = 1; double w2 = 1; double w3 = 1; neuron.Connect(i1, w1); neuron.Connect(i2, w2); neuron.Connect(i3, w3); double tx = i1.Input*w1+i2.Input*w2+i3.Input*w3; double expected_activation = 1/(1 + Math.Pow(Math.E, -tx)); MyAssert.CloseTo(neuron.Activation(), expected_activation); }
// Initialization /// <summary> /// Create new Neural Network by given <see cref="NetworkLayout"/> /// </summary> /// <param name="networkLayout">NN layout</param> public NeuralNetwork(NetworkLayout networkLayout) { var ic = networkLayout.inputs; var oc = networkLayout.outputs; var ih = networkLayout.hidden; inputs = new InputNeuron[ic]; outputs = new Neuron[oc]; hidden = new HiddenNeuron[ih.Length][]; // Initialize DNA genes = new float[networkLayout.TotalGenes]; // Gene counter var g = 0; // Generate input layer for (var i = 0; i < ic; i++) { var neuron = inputs[i] = new InputNeuron(); foreach (var gene in neuron.Genes) { genes[g++] = gene; } } // Number of inputs for neuron in layer, which equals to number of neurons in previous layer var layerInputsCount = ic; // Generate hidden layers for (var i = 0; i < ih.Length; i++) { var hc = ih[i]; var layer = hidden[i] = new HiddenNeuron[hc]; for (var j = 0; j < hc; j++) { var neuron = layer[j] = new HiddenNeuron(layerInputsCount, networkLayout.activationFunction); foreach (var gene in neuron.Genes) { genes[g++] = gene; } } layerInputsCount = hc; } // Generate output layer for (var i = 0; i < oc; i++) { var neuron = outputs[i] = new Neuron(layerInputsCount); foreach (var gene in neuron.Genes) { genes[g++] = gene; } } }
public void InputNeuron_DoesNothing() { // Arrange var inputNeuron = new InputNeuron(); // Assert Assert.IsNotNull(inputNeuron.Connections); Assert.IsTrue(inputNeuron.Connections.Count == 0); }
/// <summary> /// /// </summary> /// <param name="_inSize"></param> /// <param name="_numHLay"></param> /// <param name="_hidSize"></param> /// <param name="_outSize"></param> public MultilayerNeuralNet(int _inSize, int _numHLay, int _hidSize, int _outSize) { InputLayerSize = _inSize; NumHiddenLayers = _numHLay; HiddenLayerSize = _hidSize; OutputLayerSize = _outSize; //Instantiate Jagged Arrays of Neurons int TotalLayers = NumHiddenLayers + 2; Neurons = new Neuron[TotalLayers][]; Neurons[0] = new InputNeuron[InputLayerSize]; for (int i = 0; i < NumHiddenLayers; i++) { Neurons[i + 1] = new HiddenNeuron[HiddenLayerSize]; } Neurons[TotalLayers - 1] = new OutputNeuron[OutputLayerSize]; //Instantiate the layers based on the inputs for (int i = 0; i < InputLayerSize; i++) { Neurons[0][i] = new InputNeuron(HiddenLayerSize); } for (int i = 0; i < NumHiddenLayers; i++) { for (int j = 0; j < HiddenLayerSize; j++) { if (i == 0) { Neurons[i + 1][j] = new HiddenNeuron(InputLayerSize, HiddenLayerSize); } if (i == (NumHiddenLayers - 1)) { Neurons[i + 1][j] = new HiddenNeuron(HiddenLayerSize, OutputLayerSize); } else { Neurons[i + 1][j] = new HiddenNeuron(HiddenLayerSize, HiddenLayerSize); } } } for (int i = 0; i < OutputLayerSize; i++) { Neurons[NumHiddenLayers + 1][i] = new OutputNeuron(HiddenLayerSize); } //Get input values }
public InputNeuron CreateNewInput(Vector3 position) { InputNeuron _in = new InputNeuron { position = position }; inputNeurons.Add(_in); return(_in); }
public static void Reset() { First = new InputNeuron(); Second = new InputNeuron(); A = new HiddenNeuron(First, Second); B = new HiddenNeuron(First, Second); Output = new OutputNeuron(A, B); }
public void SetupVariablesFromBrain(int index) { inFoodValueFeeler = owner.Brain.GetInputNeuronFromName(Creature.NAME_IN_FOODVALUEFEELER + " #" + index); inOcclusionFeeler = owner.Brain.GetInputNeuronFromName(Creature.NAME_IN_OCCLUSIONFEELER + " #" + index); inWaterOnFeeler = owner.Brain.GetInputNeuronFromName(Creature.NAME_IN_WATERONFEELER + " #" + index); inGeneticDifference = owner.Brain.GetInputNeuronFromName(Creature.NAME_IN_GENETICDIFFERENCE + " #" + index); outFeelerAngle = owner.Brain.GetOutputNeuronFromName(Creature.NAME_OUT_FEELERANGLE + " #" + index); outAttack = owner.Brain.GetOutputNeuronFromName(Creature.NAME_OUT_ATTACK + " #" + index); }
public void TestIfInputNeuronAssignsCorrectOutputValue() { InputNeuron n1 = new InputNeuron(); n1.Output = 5; double expectedValue = 5; double actualValue = n1.Output; Assert.AreEqual(expectedValue, actualValue); }
public IList <Neuron> AddInputNeurons( int count, IEnumerable <NeuronValueModifier> neuronValueModifiers = null) { var sampleNeuron = new InputNeuron(-1) { ValueModifiers = neuronValueModifiers?.ToArray() }; return(AddNeurons(sampleNeuron, count)); }
public void pseudoConstructor2(Bibit mother) { transform.position = mother.transform.position; brain = mother.brain.cloneFullMesh(); energy = 150; displayName = mother.displayName; generation = mother.getGeneration() + 1; inBias = brain.getInputNeuronFromName(NAME_IN_BIAS); inEnergy = brain.getInputNeuronFromName(NAME_IN_ENERGY); inAge = brain.getInputNeuronFromName(NAME_IN_AGE); inMemory = brain.getInputNeuronFromName(NAME_IN_MEMORY); inDistToNearestPoison = brain.getInputNeuronFromName(NAME_IN_DISTTONEARESTPOISON); inAngleToNearestPoison = brain.getInputNeuronFromName(NAME_IN_ANGLETONEARESTPOISON); inFoodAmountAtCurrentBlock = brain.getInputNeuronFromName(NAME_IN_FOODAMOUNTATCURRENTBLOCK); inFoodAmountInSightRadius = brain.getInputNeuronFromName(NAME_IN_FOODAMOUNTINSIGHTRADIUS); inDistToMaxFoodBlockAround = brain.getInputNeuronFromName(NAME_IN_DISTTOMAXFOODBLOCKAROUND); inAngleToMaxFoodBlockAround = brain.getInputNeuronFromName(NAME_IN_ANGLETOMAXFOODBLOCKAROUND); inFoodAmountOfMaxFoodBlockAround = brain.getInputNeuronFromName(NAME_IN_FOODAMOUNTOFMAXFOODBLOCKAROUND); inNumberOfBibitsNear = brain.getInputNeuronFromName(NAME_IN_NUMBEROFBIBITSNEAR); inDistToNearestBibit = brain.getInputNeuronFromName(NAME_IN_DISTTONEARESTBIBIT); inAngleToNearestBibit = brain.getInputNeuronFromName(NAME_IN_ANGLETONEARESTBIBIT); inGeneticDifferenceToNearestBibit = brain.getInputNeuronFromName(NAME_IN_GENETICDIFFERENCETONEARESTBIBIT); inCenterPosition = brain.getInputNeuronFromName(NAME_IN_CENTERPOSITION); outBirth = brain.getOutputNeuronFromName(NAME_OUT_BIRTH); outRotate = brain.getOutputNeuronFromName(NAME_OUT_ROTATE); outForward = brain.getOutputNeuronFromName(NAME_OUT_FORWARD); outEat = brain.getOutputNeuronFromName(NAME_OUT_EAT); outMemory = brain.getOutputNeuronFromName(NAME_OUT_MEMORY); outAttack = brain.getOutputNeuronFromName(NAME_OUT_ATTACK); BibitProducer.updateGeneration(generation); // CalculateFeelerPos(); for (int i = 0; i < 10; i++) { brain.RandomMutation(0.2f); } float r = mother.color.r; float g = mother.color.g; float b = mother.color.b; r += Random.value * 0.1f - 0.05f; g += Random.value * 0.1f - 0.05f; b += Random.value * 0.1f - 0.05f; r = math.clamp(r, 0, 1); g = math.clamp(g, 0, 1); b = math.clamp(b, 0, 1); color = new Color(r, g, b); // writeStuff(mother, brain); }
public void ConnectToNeuron() { InputNeuron input1 = new InputNeuron(); InputNeuron input2 = new InputNeuron(); InputNeuron input3 = new InputNeuron(); neuron.Connect(input1, 0); neuron.Connect(input2, 0); neuron.Connect(input3, 0); INeuron[] neurons = neuron.Neurons; Assert.Contains(input1, neurons); Assert.Contains(input2, neurons); }
public void TestXNOR_Manualy() { Neuron a3_1 = new Neuron(); BiasNeuron bias_2 = new BiasNeuron(); a3_1.Connect(bias_2); Neuron a2_1 = new Neuron(); a3_1.Connect(a2_1); Neuron a2_2 = new Neuron(); a3_1.Connect(a2_2); BiasNeuron bias_1 = new BiasNeuron(); a2_1.Connect(bias_1); a2_2.Connect(bias_1); InputNeuron a1_1 = new InputNeuron(); a2_1.Connect(a1_1); a2_2.Connect(a1_1); InputNeuron a1_2 = new InputNeuron(); a2_1.Connect(a1_2); a2_2.Connect(a1_2); a3_1.SetWeight(0, -10); a3_1.SetWeight(1, 20); a3_1.SetWeight(2, 20); a2_1.SetWeight(0, -30); a2_1.SetWeight(1, 20); a2_1.SetWeight(2, 20); a2_2.SetWeight(0, 10); a2_2.SetWeight(1, -20); a2_2.SetWeight(2, -20); a1_1.Input = 0; a1_2.Input = 0; MyAssert.CloseTo(a3_1.Activation(), 1); a1_1.Input = 0; a1_2.Input = 1; MyAssert.CloseTo(a3_1.Activation(), 0); a1_1.Input = 1; a1_2.Input = 0; MyAssert.CloseTo(a3_1.Activation(), 0); a1_1.Input = 1; a1_2.Input = 1; MyAssert.CloseTo(a3_1.Activation(), 1); }
public void Both_inputs_low() { var neuronA = new InputNeuron(0, 1); var neuronB = new InputNeuron(0, 1); var neuronC = new OutputNeuron(1.5, 1, 0); var network = new Network(); network.Connect(neuronA, neuronC); network.Connect(neuronB, neuronC); Assert.IsFalse(network.Execute()); }
public void Both_inputs_high() { var neuronA = new InputNeuron(1, 1); var neuronB = new InputNeuron(1, 1); var neuronC = new OutputNeuron(.9, 1, 0); var network = new Network(); network.Connect(neuronA, neuronC); network.Connect(neuronB, neuronC); Assert.IsTrue(network.Execute()); }
public void InputLayer_InstantiatedWithInputAndBias_MakesCorrectLayer() { InputNeuron n1 = new InputNeuron(); InputNeuron n2 = new InputNeuron(); BiasNeuron b = new BiasNeuron(); List <Neuron> expectedList = new List <Neuron> { n1, n2, b }; List <Neuron> actualList = inputLayer.Neurons; CollectionAssert.AreEqual(expectedList, actualList, new NeuronListComparer()); }
public void Both_inputs_different() { var neuronA = new InputNeuron(0, 1); var neuronB = new InputNeuron(1, 1); var neuronC = new OutputNeuron(.9, 1, 0); var network = new Network(); network.Connect(neuronA, neuronC); network.Connect(neuronB, neuronC); Assert.IsTrue(network.Execute()); }
/// <summary> /// Initialize a new backpropagation(BP) network /// </summary> /// <param name="FunctionSet">Unified neuron functions</param> /// <param name="weightsRandomizer">Weights randomizer function (Current neuron, Neurons count, Previous layer neurons count)</param> /// <param name="Inputs">Input neurons count</param> /// <param name="Layers">Count of neurons in each layer</param> public BackpropagationNetwork(BackpropagationNetworkFunctionsSet FunctionSet, int Inputs, double[] WeightsRange, params int[] Layers) { InputNeurons = new InputNeuron[Inputs]; for (int i = 0; i < Inputs; i++) { InputNeurons[i] = new InputNeuron(0, FunctionSet.InputNormalizationFunction); } Neurons = new List <Neuron[]>(); BiasNeurons = new List <BiasNeuron>(); int layersCount = Layers.Length; for (int layer = 0; layer < layersCount; layer++) { int prevLayerNeuronCount = (layer == 0) ? (Inputs) : (Layers[layer - 1]); int neuronCount = Layers[layer]; IEnumerable <Neuron> prevLayer = (layer == 0) ? (InputNeurons) : (Neurons[layer - 1]); BiasNeurons.Add(new BiasNeuron()); Neurons.Add(new Neuron[neuronCount]); // Weight ramdomer Random weightRamdom = new Random(); for (int j = 0; j < neuronCount; j++) { // Generate a new weights set double[] weights = new double[prevLayerNeuronCount]; for (int w = 0; w < prevLayerNeuronCount; w++) { weights[w] = WeightsRange[0] + weightRamdom.NextDouble() * (WeightsRange[1] - WeightsRange[0]); } double bias = WeightsRange[0] + weightRamdom.NextDouble() * (WeightsRange[1] - WeightsRange[0]); double beta = 0.7 * Math.Pow(neuronCount + 1.0, 1.0 / Inputs); double euclid = Math.Sqrt(weights.Select((double x) => (x * x)).Sum() + (bias * bias)); for (int w = 0; w < prevLayerNeuronCount; w++) { weights[w] = beta * weights[w] / euclid; } bias = beta * bias / euclid; Neurons[layer][j] = new Neuron(prevLayer.Select((Neuron n) => (n.OutputValue)).ToArray(), weights, BiasNeurons[layer].OutputValue * bias, FunctionSet.AfferentFunction, FunctionSet.TransferFunction); } } }
public void Both_inputs_low() { var neuronA = new InputNeuron(0, 1); var neuronB = new InputNeuron(0, 1); var hiddenLayerA = new Neuron(1.5, -1); var hiddenLayerB = new Neuron(.5, 1); var result = new OutputNeuron(.5, 1, 0); var network = new Network(); network.Connect(neuronA, hiddenLayerA); network.Connect(neuronA, hiddenLayerB); network.Connect(neuronB, hiddenLayerA); network.Connect(neuronB, hiddenLayerB); network.Connect(hiddenLayerA, result); network.Connect(hiddenLayerB, result); Assert.IsFalse(network.Execute()); }
public void NeuronCanHaveAutoForwardPropagationDisabled() { neuron.Connect(new BiasNeuron()); InputNeuron input = new InputNeuron(); neuron.Connect(input, 1); input.Input = 1; var first_time = neuron.Activation(); neuron.IsCachingActivationResults = true; input.Input = 2; var with_cache = neuron.Activation(); Assert.AreEqual(first_time, with_cache); neuron.InvalidateActivationCache(); var without_cache = neuron.Activation(); Assert.AreNotEqual(first_time, without_cache); }
public void TestAccumulatesWeightShift() { Neuron n31 = new Neuron(); BiasNeuron bias2 = new BiasNeuron(); ; Neuron n21 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); input.Input = 1; n31.SetAnswer(0.9); double[] ws = n31.GetWeightShifts(); double acc = ws[1]; Assert.AreEqual(acc, 0); n31.PropagateBackwards(); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreNotEqual(acc, 0); n31.ApplyTraining(0, 1); ws = n31.GetWeightShifts(); acc = ws[1]; Assert.AreEqual(acc, 0); }
private void ConstructOutputLayer(int[] neurons_in_layers_without_bias) { int layer = neurons_in_layers_without_bias.Length - 1; neurons[layer][0] = new BiasNeuron(); for (int i = 1; i < neurons_in_layers_without_bias[layer] + 1; i++) { if (layer == 0) { neurons[layer][i] = new InputNeuron(); } else { neurons[layer][i] = CreateNeuron(); } } }
public void ThrowsIfPropagateTwice() { Neuron n31 = new Neuron(); BiasNeuron bias2 = new BiasNeuron(); ; Neuron n21 = new Neuron(); n31.Connect(bias2); n31.Connect(n21); InputNeuron input = new InputNeuron(); BiasNeuron bias1 = new BiasNeuron(); n21.Connect(bias1); n21.Connect(input); input.Input = 1; n31.SetAnswer(1); n31.PropagateBackwards(); Assert.Throws(typeof (CannotPropagateWithEmptyAcc), () => n31.PropagateBackwards()); }
public void TestCanSetAnswer() { Neuron n = new Neuron(); n.Connect(new BiasNeuron(), 1); InputNeuron input = new InputNeuron(); n.Connect(input, 1); n.SetWeight(0, 1); input.Input = 1; var desired = 1 / (1 + Math.Pow(Math.E, -2)); n.SetAnswer(desired); n.PropagateBackwards(); MyAssert.CloseTo(n.GetDelta(), 0); /* 1. If answer was set, than calculate like last layer, otherwise require theta and delta. 2. In back prop every layer calculates it's values and sets theta+delta for every connected neuron. */ }
public void SetUp() { neuron = new Neuron(); input = new InputNeuron(); }