public Network(IRegularizationFunction rf, IErrorFunction ef) { regularizationFunction = rf; errorFunction = ef; layers = new List <Layer>(); TrainData = new List <List <List <double> > >(); }
public static Network BuildNetwork(Random random, ICostFunction costFunction, IRegularizationFunction regularizationFunction, WeightIntializerType weightIntializerType, DropoutLayerOptions dropoutLayerOptions, int inputNeuronCount, int outputNeuronCount, params int[] hiddenLayerCounts) { Network network = new Network(costFunction, regularizationFunction, dropoutLayerOptions, random); network.InputLayer = InputLayer.BuildInputLayer(null, inputNeuronCount, random); Layer previousLayer = network.InputLayer; int dropoutLayerIndex = 1; bool isDropoutLayer = false; IWeightBuilder weightBuilder = null; for (int c = 0; c < hiddenLayerCounts.Length; c++) { isDropoutLayer = dropoutLayerOptions.DropoutLayerIndices.Contains(dropoutLayerIndex); int currentLayerCount = hiddenLayerCounts[c]; switch (weightIntializerType) { case WeightIntializerType.RandomGaussianWithNeuronCount: weightBuilder = new RandomGaussianWithNeuronCount(previousLayer.Neurons.Count, 0, random); break; case WeightIntializerType.RandomNormal: weightBuilder = new RandomGaussian(0, 1, random); break; } HiddenLayer hiddenLayer = HiddenLayer.BuildHiddenLayer(weightBuilder, previousLayer, currentLayerCount, isDropoutLayer ? dropoutLayerOptions.ProbabilityOfDropout : 0, random); network.HiddenLayers.Add(hiddenLayer); previousLayer = hiddenLayer; dropoutLayerIndex++; } isDropoutLayer = dropoutLayerOptions.DropoutLayerIndices.Contains(dropoutLayerIndex); switch (weightIntializerType) { case WeightIntializerType.RandomGaussianWithNeuronCount: weightBuilder = new RandomGaussianWithNeuronCount(previousLayer.Neurons.Count, 0, random); break; case WeightIntializerType.RandomNormal: weightBuilder = new RandomGaussian(0, 1, random); break; } network.OutputLayer = OutputLayer.BuildOutputLayer(weightBuilder, (HiddenLayer)previousLayer, outputNeuronCount, isDropoutLayer ? dropoutLayerOptions.ProbabilityOfDropout : 0, random); return(network); }
public Link(Node source, Node dest, IRegularizationFunction regularizationFunction, bool initZero) { this.Id = $"{source.Id}-{dest.Id}"; this.Source = source; this.Dest = dest; this.RegularizationFunction = regularizationFunction; if (initZero) { this.Weight = 0; } }
private Network(ICostFunction costFunction, IRegularizationFunction regularizationFunction, DropoutLayerOptions dropoutLayerOptions, Random rand) { HiddenLayers = new List <HiddenLayer>(); _costFunction = costFunction; _regularizationFunction = regularizationFunction; _dropoutLayerOptions = dropoutLayerOptions; NetworkRandom = rand; }
public Synapse Link(Neuron source, Neuron destination, IRegularizationFunction regularization = null) { var s = new Synapse { Source = source, Destination = destination, Regularization = regularization, Weight = _parameterGenerator.GenerateSynapseWeight() }; source.Outputs.Add(s); destination.Inputs.Add(s); return(s); }
public static List <List <Node> > BuildNetwork( List <int> networkShape, IActivationFunction activation, IActivationFunction outputActivation, IRegularizationFunction regularization, List <string> inputIds, bool initZero) { var numLayers = networkShape.Count; var id = 1; // List of layers, with each layer being a list of nodes. var network = new List <List <Node> >(); for (int layerIdx = 0; layerIdx < numLayers; layerIdx++) { var isOutputLayer = layerIdx == numLayers - 1; var isInputLayer = layerIdx == 0; var currentLayer = new List <Node>(); network.Add(currentLayer); var numNodes = networkShape[layerIdx]; for (int i = 0; i < numNodes; i++) { var nodeId = id.ToString(); if (isInputLayer) { nodeId = inputIds[i]; } else { id++; } var node = new Node(nodeId, isOutputLayer ? outputActivation : activation, initZero); currentLayer.Add(node); if (layerIdx >= 1) { for (int j = 0; j < network[layerIdx - 1].Count; j++) { var prevNode = network[layerIdx - 1][j]; var link = new Link(prevNode, node, regularization, initZero); prevNode.Outputs.Add(link); node.InputLinks.Add(link); } } } } return(network); }
/// <summary> /// Create a multilayer perceptron /// </summary> /// <param name="layerSizes">Number of neurons per layer</param> /// <param name="activationFunction">Activation function for every hidden node</param> /// <param name="outputActivationFunction">Activation function for last layer</param> /// <param name="regularizationFunction">Regularization function</param> /// <param name="neuronFactory">Neuron factory used to create neurons</param> /// <param name="synapseFactory">Synapse factory used to create synapses</param> /// <returns>New network</returns> public static Network CreateMultilayerPerceptron(int[] layerSizes, IActivationFunction activationFunction, IActivationFunction outputActivationFunction, IRegularizationFunction regularizationFunction, NeuronFactory neuronFactory, SynapseFactory synapseFactory) { var prevLayer = new Neuron[layerSizes[0]]; // create input layer var inputNeurons = new Neuron[layerSizes[0]]; for (var i = 0; i < inputNeurons.Length; i++) { var n = neuronFactory.CreateNeuron(activationFunction); inputNeurons[i] = n; prevLayer[i] = n; } // create hidden layers for (var i = 1; i < layerSizes.Length - 1; i++) { var layer = new Neuron[layerSizes[i]]; for (var j = 0; j < layer.Length; j++) { var n = neuronFactory.CreateNeuron(activationFunction); layer[j] = n; for (var k = 0; k < prevLayer.Length; k++) { var pn = prevLayer[k]; synapseFactory.Link(pn, n, regularizationFunction); } } prevLayer = layer; } // create output layer var outputNeurons = new Neuron[layerSizes[layerSizes.Length - 1]]; for (var i = 0; i < outputNeurons.Length; i++) { var outputNeuron = neuronFactory.CreateOutputNeuron(outputActivationFunction); outputNeurons[i] = outputNeuron; for (var j = 0; j < prevLayer.Length; j++) { var pn = prevLayer[j]; synapseFactory.Link(pn, outputNeuron, regularizationFunction); } } return(new Network { InputLayer = inputNeurons, OutputLayer = outputNeurons }); }