public static void Incremental(FileInfo dataDir, string networkfiletosave, string trainingfile) { FileInfo file = FileUtil.CombinePath(dataDir, networkfiletosave); FileInfo trainfile = FileUtil.CombinePath(dataDir, trainingfile); if (!trainfile.Exists) { Console.WriteLine(@"Can't read file: " + trainfile); return; } IMLDataSet training = EncogUtility.LoadEGB2Memory(trainfile); var pattern = new FeedForwardPattern { InputNeurons = training.InputSize, OutputNeurons = training.IdealSize, ActivationFunction = new ActivationTANH() }; var prune = new PruneIncremental(training, pattern, 100, 1, 10, new ConsoleStatusReportable()); prune.AddHiddenLayer(5, 50); prune.AddHiddenLayer(0, 50); prune.Process(); Encog.Util.NetworkUtil.NetworkUtility.SaveTraining(dataDir.Directory.FullName, trainingfile, training); EncogDirectoryPersistence.SaveObject(file, prune.BestNetwork); }
public static BasicNetwork CreateNetwork() { var pattern = new FeedForwardPattern {InputNeurons = 3}; pattern.AddHiddenLayer(50); pattern.OutputNeurons = 1; pattern.ActivationFunction = new ActivationTANH(); var network = (BasicNetwork) pattern.Generate(); network.Reset(); return network; }
private IMLMethod CreateFeedforwardNetwork() { // construct a feedforward type network FeedForwardPattern pattern = new FeedForwardPattern(); pattern.ActivationFunction = new ActivationSigmoid(); pattern.InputNeurons = 1; pattern.AddHiddenLayer(6); pattern.OutputNeurons = 1; return pattern.Generate(); }
public static BasicNetwork createNetwork() { var pattern = new FeedForwardPattern { InputNeurons = (Board.SIZE * Board.SIZE) }; pattern.AddHiddenLayer (NEURONS_HIDDEN_1); pattern.OutputNeurons = 1; pattern.ActivationFunction = new ActivationTANH (); var network = (BasicNetwork)pattern.Generate (); network.Reset (); return network; }
/// <summary> /// Creates the feedforward network. /// </summary> /// <param name="inputsize">The inputsize.</param> /// <param name="outputsize">The outputsize.</param> /// <param name="hiddenlayers">The hiddenlayers.</param> /// <param name="hidden2Layers">The hidden2layers.</param> /// <returns></returns> public static BasicNetwork CreateFeedforwardNetwork(int inputsize, int outputsize, int hiddenlayers, int hidden2Layers) { // construct an Elman type network var pattern = new FeedForwardPattern {ActivationFunction = new ActivationTANH(), InputNeurons = inputsize}; pattern.AddHiddenLayer(hiddenlayers); pattern.AddHiddenLayer(hidden2Layers); pattern.OutputNeurons = outputsize; IMLMethod network = pattern.Generate(); return (BasicNetwork) network; }
private static IMLMethod CreateFeedforwardNetwork(int inputs, int outputs, int hidden) { // construct a feedforward type network var pattern = new FeedForwardPattern(); pattern.ActivationFunction = new ActivationSigmoid(); pattern.InputNeurons = inputs; pattern.AddHiddenLayer(hidden); pattern.OutputNeurons = outputs; return pattern.Generate(); }
/// <summary> /// Create a simple feedforward neural network. /// </summary> /// <param name="input">The number of input neurons.</param> /// <param name="hidden1">The number of hidden layer 1 neurons.</param> /// <param name="hidden2">The number of hidden layer 2 neurons.</param> /// <param name="output">The number of output neurons.</param> /// <param name="tanh">True to use hyperbolic tangent activation function, false to /// use the sigmoid activation function.</param> /// <returns>The neural network.</returns> public static BasicNetwork SimpleFeedForward(int input, int hidden1, int hidden2, int output, bool tanh) { var pattern = new FeedForwardPattern {InputNeurons = input, OutputNeurons = output}; if (tanh) { pattern.ActivationFunction = new ActivationTANH(); } else { pattern.ActivationFunction = new ActivationSigmoid(); } if (hidden1 > 0) { pattern.AddHiddenLayer(hidden1); } if (hidden2 > 0) { pattern.AddHiddenLayer(hidden2); } var network = (BasicNetwork) pattern.Generate(); network.Reset(); return network; }
public static BasicNetwork SimpleFeedForward(int input, int hidden1, int hidden2, int output, bool tanh) { FeedForwardPattern pattern; BasicNetwork network; FeedForwardPattern pattern2 = new FeedForwardPattern(); if (((uint) output) < 0) { goto Label_002E; } pattern2.InputNeurons = input; goto Label_00BC; Label_000D: pattern.AddHiddenLayer(hidden2); goto Label_001A; Label_0016: if (hidden2 > 0) { goto Label_000D; } Label_001A: network = (BasicNetwork) pattern.Generate(); network.Reset(); return network; Label_002E: if ((((uint) tanh) | 3) != 0) { goto Label_0067; } if ((((uint) output) - ((uint) hidden2)) < 0) { goto Label_000D; } Label_005E: pattern.AddHiddenLayer(hidden1); goto Label_0016; Label_0067: if (hidden1 > 0) { goto Label_005E; } if ((((uint) input) + ((uint) hidden1)) >= 0) { goto Label_0016; } return network; Label_00BC: pattern2.OutputNeurons = output; pattern = pattern2; if (tanh) { pattern.ActivationFunction = new ActivationTANH(); goto Label_0067; } pattern.ActivationFunction = new ActivationSigmoid(); if ((((uint) hidden2) + ((uint) output)) >= 0) { goto Label_002E; } goto Label_00BC; }