public void Initialise(int numberInputNodes, int numberHiddenNodes, int numberOutputNodes) { InputLayer = new NeuralNetworkLayer(); HiddenLayer = new NeuralNetworkLayer(); OutputLayer = new NeuralNetworkLayer(); InputLayer.NumberOfNodes = numberInputNodes; InputLayer.NumberOfChildNodes = numberHiddenNodes; InputLayer.NumberOfParentNodes = 0; NeuralNetworkLayer[] n = new NeuralNetworkLayer[2]; n[0] = null; n[1] = HiddenLayer; InputLayer.Initialise(numberInputNodes, n); InputLayer.RandomiseWeights(); HiddenLayer.NumberOfNodes = numberHiddenNodes; HiddenLayer.NumberOfChildNodes = numberOutputNodes; HiddenLayer.NumberOfParentNodes = numberInputNodes; n = new NeuralNetworkLayer[2]; n[0] = InputLayer; n[1] = OutputLayer; HiddenLayer.Initialise(numberHiddenNodes, n); HiddenLayer.RandomiseWeights(); OutputLayer.NumberOfNodes = numberOutputNodes; OutputLayer.NumberOfChildNodes = 0; OutputLayer.NumberOfParentNodes = numberHiddenNodes; n = new NeuralNetworkLayer[2]; n[0] = HiddenLayer; n[1] = null; OutputLayer.Initialise(numberOutputNodes, n); }
public void Initialize(int nNodesInput, int nNodesHidden, int nNodesOutput) { InputLayer = new NeuralNetworkLayer(); HiddenLayer = new NeuralNetworkLayer(); OutputLayer = new NeuralNetworkLayer(); InputLayer.NumberOfNodes = nNodesInput; InputLayer.NumberOfChildNodes = nNodesHidden; InputLayer.NumberOfParentNodes = 0; NeuralNetworkLayer[] n = new NeuralNetworkLayer[2]; n[0] = null; n[1] = HiddenLayer; InputLayer.Initialize(nNodesInput, n); InputLayer.RandomizeWeights(); HiddenLayer.NumberOfNodes = nNodesHidden; HiddenLayer.NumberOfChildNodes = nNodesOutput; HiddenLayer.NumberOfParentNodes = nNodesInput; n = new NeuralNetworkLayer[2]; n[0] = InputLayer; n[1] = OutputLayer; HiddenLayer.Initialize(nNodesHidden, n); HiddenLayer.RandomizeWeights(); OutputLayer.NumberOfNodes = nNodesOutput; OutputLayer.NumberOfChildNodes = 0; OutputLayer.NumberOfParentNodes = nNodesHidden; n = new NeuralNetworkLayer[2]; n[0] = HiddenLayer; n[1] = null; OutputLayer.Initialize(nNodesOutput, n); }
public void Initialize(int NumNodes, NeuralNetworkLayer parent, NeuralNetworkLayer child) { int i, j; // Allocate memory neuronValues = new float[nodeCount]; desiredValues = new float[nodeCount]; errors = new float[nodeCount]; if (parent != null) { parentLayer = parent; } if (child != null) { childLayer = child; weights = new float[nodeCount][]; weightChanges = new float[nodeCount][]; for (i = 0; i < nodeCount; i++) { weights[i] = new float[nodeCount]; weightChanges[i] = new float[nodeCount]; } biasValues = new float[nodeCount]; biasWeights = new float[nodeCount]; } else { weights = null; weightChanges = null; biasValues = null; biasWeights = null; } // Make sure everything contains 0s for (i = 0; i < nodeCount; i++) { neuronValues[i] = 0; desiredValues[i] = 0; errors[i] = 0; if (childLayer != null) { for (j = 0; j < childNodeCount; j++) { weights[i][j] = 0; weightChanges[i][j] = 0; } } } // Initialize the bias values and weights if (childLayer != null) { for (j = 0; j < childNodeCount; j++) { biasValues[j] = -1; biasWeights[j] = 0; } } }
public void Initialise(int NumNodes, NeuralNetworkLayer[] ParentChild) { //Allocate everything NumberOfNodes = NumNodes; NeuronValues = new float[NumberOfNodes]; DesiredValues = new float[NumberOfNodes]; _errors = new float[NumberOfNodes]; if (ParentChild [0] != null) { _parentLayer = ParentChild[0]; } if (ParentChild [1] != null) { _childLayer = ParentChild [1]; _weights = new float[NumberOfNodes, NumberOfChildNodes]; _weightChanges = new float[NumberOfNodes, NumberOfChildNodes]; _biasValues = new float[NumberOfChildNodes]; _biasWeights = new float[NumberOfChildNodes]; } else { _weights = null; _biasValues = null; _biasWeights = null; _weightChanges = null; } //0 out everything for (int i = 0; i < NumberOfNodes; i++) { NeuronValues[i] = 0; DesiredValues[i] = 0; _errors[i] = 0; if (_childLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { _weights[i, j] = 0; _weightChanges[i, j] = 0; } } } //Init the bias values and weights if (_childLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { _biasValues[j] = -1; _biasWeights[j] = 0; } } }
public NeuralNetworkLayer() { _parentLayer = null; _childLayer = null; LinearOutput = false; UseMomentum = false; MomentumFactor = 0.9f; }
public NeuralNetworkLayer() { ParentLayer = null; ChildLayer = null; LinearOutput = false; UseMomentum = false; MomentumFactor = 0.9f; }
public void SetLayers(NeuralNetworkLayer[] ParentChild) { if (ParentChild [0] != null) { _parentLayer = ParentChild[0]; } if (ParentChild [1] != null) { _childLayer = ParentChild[1]; } }
// 初期化する public void Initialize(int NumNodes, NeuralNetworkLayer parent, NeuralNetworkLayer child) { NeuronValues = new float[NumberOfNodes]; DesiredValues = new float[NumberOfNodes]; Errors = new float[NumberOfNodes]; if (parent != null) { ParentLayer = parent; } if (child != null) { ChildLayer = child; Weights = new float[NumberOfNodes, NumberOfChildNodes]; WeightChanges = new float[NumberOfNodes, NumberOfChildNodes]; BiasValues = new float[NumberOfChildNodes]; BiasWeights = new float[NumberOfChildNodes]; } // ゼロで初期化する for (int i = 0; i < NumberOfNodes; i++) { NeuronValues[i] = 0.0f; DesiredValues[i] = 0.0f; Errors[i] = 0.0f; if (ChildLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { Weights[i, j] = 0.0f; WeightChanges[i, j] = 0.0f; } } } if (ChildLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { BiasValues[j] = -1.0f; BiasWeights[j] = 0.0f; } } }
public void AddHiddenLayer_ThreeTimes_ShouldAddThreeHiddenLayers() { NeuralNetworkBuilderTestDouble builder = new NeuralNetworkBuilderTestDouble(); builder.CreateNew(); NeuralNetwork network1 = builder.CurrentNeuralNetwork; ActivationFunction func1 = new ActivationFunction(x => x, x => 1); ActivationFunction func2 = new ActivationFunction(x => 2 * x, x => 1); ActivationFunction func3 = new ActivationFunction(x => 3 * x, x => 1); ActivationFunction func4 = new ActivationFunction(x => 4 * x, x => 1); builder.AddInputLayer(2, func1, false) .AddHiddenLayer(3, func2, false) .AddHiddenLayer(4, func3, false) .AddHiddenLayer(5, func4, false); Assert.AreEqual(4, network1.Layers.Count); Assert.IsNotNull(network1.InputLayer.Weights); Assert.AreEqual(2, network1.InputLayer.Weights.GetLength(0)); Assert.AreEqual(3, network1.InputLayer.Weights.GetLength(1)); NeuralNetworkLayer hidden1 = network1.Layers[1]; Assert.AreEqual(3, hidden1.Inputs.Length); Assert.AreEqual(func2, hidden1.ActivationFunction); Assert.IsNotNull(hidden1.Weights); Assert.AreEqual(3, hidden1.Weights.GetLength(0)); Assert.AreEqual(4, hidden1.Weights.GetLength(1)); NeuralNetworkLayer hidden2 = network1.Layers[2]; Assert.AreEqual(4, hidden2.Inputs.Length); Assert.AreEqual(func3, hidden2.ActivationFunction); Assert.IsNotNull(hidden2.Weights); Assert.AreEqual(4, hidden2.Weights.GetLength(0)); Assert.AreEqual(5, hidden2.Weights.GetLength(1)); NeuralNetworkLayer hidden3 = network1.Layers[3]; Assert.AreEqual(5, hidden3.Inputs.Length); Assert.AreEqual(func4, hidden3.ActivationFunction); Assert.IsNull(hidden3.Weights); }
public NeuralNetwork(NeuronCount inputs, NeuronCount outputs, params NeuronCount[] hiddenLayers) { if (hiddenLayers == null || !hiddenLayers.Any()) { throw new ArgumentException("At least one hidden layer is required", nameof(hiddenLayers)); } NeuralNetworkLayer previousLayer = null; layers = new[] { inputs } .Concat(hiddenLayers) .Concat(new[] { outputs }) .Select(nc => { previousLayer = previousLayer == null ? new NeuralNetworkLayer(nc) : new NeuralNetworkLayer(nc, previousLayer); return(previousLayer); }) .ToArray(); }
public NeuralNetworkLayer(int nodeCount, int childNodeCount, int parentNodeCount, NeuralNetworkLayer parent, NeuralNetworkLayer child) { numberOfNodes = nodeCount; numberOfChildNodes = childNodeCount; numberOfParentNodes = parentNodeCount; neuronValues = new float[nodeCount]; desiredValues = new float[nodeCount]; errors = new float[nodeCount]; parentLayer = parent; if (child != null) { childLayer = child; weights = new float[nodeCount, numberOfChildNodes]; weightsChanges = new float[nodeCount, numberOfChildNodes]; biasValues = new float[numberOfChildNodes]; biasWeights = new float[numberOfChildNodes]; for (int i = 0; i < numberOfChildNodes; i++) { biasValues[i] = -1; } } //eles null }
// コンストラクタ public NeuralNetwork() { InputLayer = new NeuralNetworkLayer(); HiddenLayer = new NeuralNetworkLayer(); OutputLayer = new NeuralNetworkLayer(); }
public void SetLayers(NeuralNetworkLayer[] ParentChild) { if (ParentChild[0] != null) { ParentLayer = ParentChild[0]; } if (ParentChild[1] != null) { ChildLayer = ParentChild[1]; } }
public void Initialize(int NumNodes, NeuralNetworkLayer[] ParentChild) { //Allocate everything NeuronValues = new float[NumberOfNodes]; DesiredValues = new float[NumberOfNodes]; Errors = new float[NumberOfNodes]; if (ParentChild[0] != null) { ParentLayer = ParentChild[0]; } if (ParentChild[1] != null) { ChildLayer = ParentChild[1]; Weights = new float[NumberOfNodes, NumberOfChildNodes]; WeightChanges = new float[NumberOfNodes, NumberOfChildNodes]; BiasValues = new float[NumberOfChildNodes]; BiasWeights = new float[NumberOfChildNodes]; } else { Weights = null; BiasValues = null; BiasWeights = null; WeightChanges = null; } //Make sure everything contains 0s for (int i = 0; i < NumberOfNodes; i++) { NeuronValues[i] = 0; DesiredValues[i] = 0; Errors[i] = 0; if (ChildLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { Weights[i,j] = 0; WeightChanges[i,j] = 0; } } } //Init the bias values and weights if (ChildLayer != null) { for (int j = 0; j < NumberOfChildNodes; j++) { BiasValues[j] = -1; BiasWeights[j] = 0; } } }
public NeuralNetworkLayer(NeuronCount numberOfNeurons, NeuralNetworkLayer previousLayer) { neurons = Enumerable.Range(1, numberOfNeurons).Select(_ => new Neuron(previousLayer)).ToArray(); }