public AddLayer ( ILayer layer ) : void | ||
layer | ILayer | The layer to be added to the network. |
return | void |
private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e) { worker.ReportProgress(0, "Creating Network..."); BasicNetwork Network = new BasicNetwork(); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize)); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50)); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize)); Network.Structure.FinalizeStructure(); Network.Reset(); DataContainer.NeuralNetwork.Network = Network; ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data); worker.ReportProgress(0, "Running Training: Epoch 0"); for(int i = 0; i < 200; i++) { training.Iteration(); worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + " Current Training Error : " + training.Error.ToString()); if(worker.CancellationPending == true) { completed = true; return; } } completed = true; }
public override void Run() { testNetwork = new BasicNetwork(); testNetwork.AddLayer(new BasicLayer(null, true, 2)); testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4)); testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); testNetwork.Structure.FinalizeStructure(); testNetwork.Reset(); // create training data IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); // train the neural network IMLTrain train = new Backpropagation(testNetwork, trainingSet); //IMLTrain train = new ResilientPropagation(testNetwork, trainingSet); //Encog manual says it is the best general one int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.0001); // test the neural network Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = testNetwork.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { // create a neural network, without using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); // create training data IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); // train the neural network IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); // test the neural network Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } }
static void Main(string[] args) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); var trainingSet = new BasicMLDataSet(XORInput, XORIdeal); var train = new ResilientPropagation(network, trainingSet); var epoch = 1; do { train.Iteration(); } while (train.Error > 0.01); train.FinishTraining(); foreach (var pair in trainingSet) { var output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]); } EncogFramework.Instance.Shutdown(); Console.ReadLine(); }
public void TestSingleOutput() { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); (new ConsistentRandomizer(-1, 1)).Randomize(network); IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); HessianFD testFD = new HessianFD(); testFD.Init(network, trainingData); testFD.Compute(); HessianCR testCR = new HessianCR(); testCR.Init(network, trainingData); testCR.Compute(); //dump(testFD, "FD"); //dump(testCR, "CR"); Assert.IsTrue(testCR.HessianMatrix.equals(testFD.HessianMatrix, 4)); }
public static long BenchmarkEncog(double[][] input, double[][] output) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, input[0].Length)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, HIDDEN_COUNT)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, output[0].Length)); network.Structure.FinalizeStructure(); network.Reset(23); // constant seed for repeatable testing IMLDataSet trainingSet = new BasicMLDataSet(input, output); // train the neural network IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7); var sw = new Stopwatch(); sw.Start(); // run epoch of learning procedure for (int i = 0; i < ITERATIONS; i++) { train.Iteration(); } sw.Stop(); return sw.ElapsedMilliseconds; }
static void Main(string[] args) { //create a neural network withtout using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine($"Epoch #{epoch} Error: {train.Error}"); epoch++; } while (train.Error > 0.01); train.FinishTraining(); Console.WriteLine("Neural Network Results:"); foreach (IMLDataPair iPair in trainingSet) { IMLData output = network.Compute(iPair.Input); Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}"); } EncogFramework.Instance.Shutdown(); Console.ReadKey(); }
public BasicNetwork generateNetwork() { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(MultiThreadBenchmark.INPUT_COUNT)); network.AddLayer(new BasicLayer(MultiThreadBenchmark.HIDDEN_COUNT)); network.AddLayer(new BasicLayer(MultiThreadBenchmark.OUTPUT_COUNT)); network.Structure.FinalizeStructure(); network.Reset(); return network; }
public void Create(int inputnodes,int hiddennodes) { network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, inputnodes)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, hiddennodes)); network.AddLayer(new BasicLayer(new ActivationLinear(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); this.hiddennodes = hiddennodes; }
public BasicNetwork generateNetwork() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(INPUT_COUNT)); network.AddLayer(new BasicLayer(HIDDEN_COUNT)); network.AddLayer(new BasicLayer(OUTPUT_COUNT)); network.Structure.FinalizeStructure(); network.Reset(); return network; }
public static BasicNetwork CreateThreeLayerNet() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(2)); network.AddLayer(new BasicLayer(3)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); return network; }
/// <summary> /// Metodo responsavel por criar a rede neural /// </summary> /// <param name="source">FileInfo com o path do network</param> private static void CreateNetwork(FileInfo source) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationLinear(), true, 4)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6)); network.AddLayer(new BasicLayer(new ActivationTANH(), false, 2)); network.Structure.FinalizeStructure(); network.Reset(); EncogDirectoryPersistence.SaveObject(source, (BasicNetwork)network); }
public static BasicNetwork createElliott() { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, INPUT_OUTPUT)); network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), true, HIDDEN)); network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), false, INPUT_OUTPUT)); network.Structure.FinalizeStructure(); network.Reset(); return network; }
private BasicNetwork ConstructNetwork() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationTANH(), true, VanDerWaerdenGameRules.VanDerWaerdenNumber(this.NColors, this.ProgressionLength) - 1)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, VanDerWaerdenGameRules.VanDerWaerdenNumber(this.NColors, this.ProgressionLength))); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 1)); network.Structure.FinalizeStructure(); return network; Debug.Print("Created new Network with parameters nColors = {0} and progression length = {1}.", NColors, ProgressionLength); }
public static void CreateNetwork(FileOps fileOps) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationLinear(),true,4)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 2)); network.Structure.FinalizeStructure(); network.Reset(); EncogDirectoryPersistence.SaveObject(fileOps.TrainedNeuralNetworkFile, network); }
private static BasicNetwork CreateNetwork() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); return network; }
public static BasicNetwork CreateXORNetworkUntrained() { // random matrix data. However, it provides a constant starting point // for the unit tests. BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); (new ConsistentRandomizer(-1, 1)).Randomize(network); return network; }
public PSO() { network = new BasicNetwork(); network.AddLayer(new BasicLayer(5)); network.AddLayer(new BasicLayer(1)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet dataSet = new BasicMLDataSet(); dataSet.Add(new BasicMLData(new double[] { 1.0, 4.0, 3.0, 4.0, 5.0}) , new BasicMLData(new double[] { 2.0, 4.0, 6.0 , 8.0, 10} )); train = new NeuralPSO(network, new RangeRandomizer(0, 10), new TrainingSetScore(dataSet),5); }
public IMLMethod Generate() { BasicNetwork method = new BasicNetwork(); ILayer layer = new BasicLayer(new ActivationLinear(), true, this._xcfe830a7176c14e5); ILayer layer2 = new BasicLayer(new ActivationLinear(), false, this._x8f581d694fca0474); if (0 == 0) { method.AddLayer(layer); method.AddLayer(layer2); method.Structure.FinalizeStructure(); new RangeRandomizer(-0.5, 0.5).Randomize(method); return method; } return method; }
public IMLMethod Generate() { BasicLayer layer; BasicLayer layer2; BasicNetwork network = new BasicNetwork(); if ((0 != 0) || (0 == 0)) { network.AddLayer(layer2 = new BasicLayer(this._x2a5a4034520336f3, true, this._xcfe830a7176c14e5)); } network.AddLayer(layer = new BasicLayer(this._x2a5a4034520336f3, true, this._xdf89f9cf9fc3d06f)); network.AddLayer(new BasicLayer(null, false, this._x8f581d694fca0474)); layer2.ContextFedBy = layer; network.Structure.FinalizeStructure(); network.Reset(); return network; }
public void BuildModel() { Model = new BasicNetwork(); Model.AddLayer(new BasicLayer(null, UseBias, FirstLayerSize)); foreach (int layer in Layers) { if (Activation == ActivationType.Bipolar) Model.AddLayer(new BasicLayer(new ActivationTANH(), UseBias, layer)); else Model.AddLayer(new BasicLayer(new ActivationSigmoid(), UseBias, layer)); } Model.AddLayer(new BasicLayer(Activation == ActivationType.Bipolar ? new ActivationTANH() as IActivationFunction : new ActivationSigmoid() as IActivationFunction, false, LastLayerSize)); Model.Structure.FinalizeStructure(); Model.Reset(); }
public static void Experiment() { BasicNetwork net = new BasicNetwork(); net.AddLayer( new BasicLayer(new ActivationLinear(), false, 3)); net.AddLayer( new BasicLayer(new ActivationTANH(), true, 3)); net.AddLayer( new BasicLayer(new ActivationLinear(), false, 2)); net.Structure.FinalizeStructure(); //Задание случайных весов? net.Reset(); }
/// <summary> /// Program entry point. /// </summary> /// <param name="args">Not used.</param> public static void Main(String[] args) { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); EncogUtility.TrainToError(network,trainingSet,0.01); // test the neural network Console.WriteLine("Neural Network Results:"); EncogUtility.Evaluate(network,trainingSet); }
/// <summary> /// Generate the network. /// </summary> public IMLMethod Generate() { var network = new BasicNetwork(); ILayer inputLayer = new BasicLayer(new ActivationLinear(), true, _inputNeurons); ILayer outputLayer = new BasicLayer(new ActivationLinear(), false, _outputNeurons); network.AddLayer(inputLayer); network.AddLayer(outputLayer); network.Structure.FinalizeStructure(); (new RangeRandomizer(-0.5d, 0.5d)).Randomize(network); return network; }
public int Train(DataSet dataSet) { Network = new BasicNetwork(); Network.AddLayer(new BasicLayer(null, true, 8 * 21)); var first = ((8 * 21 + 4) * FirstLayerParameter); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)first)); var second = ((8 * 21 + 4) * SecondLayerParameter); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)second)); Network.AddLayer(new BasicLayer(null, false, 1)); // Network.AddLayer(new ); Network.Structure.FinalizeStructure(); Network.Reset(); //IMLData x = new BasicNeuralData(); var set = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][]; var ideal = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][]; for (int i = 0; i < dataSet.Signatures.Count; i++) { set[i] = dataSet.Signatures[i].Data.Cast<double>().ToArray(); ideal[i] = new double[] {1}; } for (int i = dataSet.Signatures.Count; i < dataSet.Signatures.Count + dataSet.Forgeries.Count; i++) { set[i] = dataSet.Forgeries[i- dataSet.Signatures.Count].Data.Cast<double>().ToArray(); ideal[i] = new double[] { 0 }; } IMLDataSet trainingSet = new BasicMLDataSet(set, ideal); IMLTrain train = new ResilientPropagation(Network, trainingSet); int epoch = 1; var errors = new List<double>(); do { train.Iteration(); // Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; errors.Add(train.Error); } while ( epoch < 10000); train.FinishTraining(); return 1; }
public Predictor(TextBox txtOutput, CSVData data, int hiddenNodes, double percentValidation) { m_txtOutputWindow = txtOutput; m_data = data; // Populate the input and output arrays LoadData(percentValidation); // Create Neural Network m_network = new BasicNetwork(); m_network.AddLayer(new BasicLayer(null, true, m_data.InputNodes)); m_network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, hiddenNodes)); m_network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, m_data.OutputNodes)); m_network.Structure.FinalizeStructure(); m_network.Reset(); m_train = new Backpropagation(m_network, new BasicMLDataSet(m_inputTraining, m_outputTraining)); }
public void testPersistLargeEG() { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 200)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 200)); network.Structure.FinalizeStructure(); network.Reset(); EncogDirectoryPersistence.SaveObject(EG_FILENAME, network); BasicNetwork network2 = (BasicNetwork)EncogDirectoryPersistence.LoadObject(EG_FILENAME); double d = EngineArray.EuclideanDistance(network.Structure.Flat.Weights, network2.Structure.Flat.Weights); Assert.IsTrue(d < 0.01); }
public MLPNetwork(int layersCount, int neuronsCount, bool bias, ActivationFunctionType aft, ProblemType problemType, string inputFileName) { this.layersCount = layersCount; this.neuronsCount = neuronsCount; this.bias = bias; this.activationFunType = aft; this.problemType = problemType; LoadTrainingData(inputFileName); network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, bias, trainingData.InputSize)); for (int i = 0; i < layersCount; i++) network.AddLayer(new BasicLayer(CreateActivationFunction(), bias, neuronsCount)); network.AddLayer(new BasicLayer(CreateActivationFunction(), false, outputSize)); network.Structure.FinalizeStructure(); network.Reset(); }
public void Execute(IExampleInterface app) { // create a neural network, without using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); // create training data var trainingSet = new BasicMLDataSet(XORInput, XORIdeal); EncogUtility.TrainToError(network, trainingSet, 0.01); EncogUtility.Evaluate(network, trainingSet); var ff = new FreeformNetwork(network); EncogUtility.Evaluate(ff, trainingSet); EncogFramework.Instance.Shutdown(); }
/// <summary> /// Trains a random trainer. /// </summary> /// <param name="inputs">The inputs.</param> /// <param name="predictWindow">The predict window.</param> public static double RandomTrainerMethod(int inputs, int predictWindow) { double[] firstinput = MakeInputs(inputs); double[] SecondInput = MakeInputs(inputs); double[] ThirdInputs = MakeInputs(inputs); double[] FourthInputs = MakeInputs(inputs); double[] inp5 = MakeInputs(inputs); double[] inp6 = MakeInputs(inputs); var pair = TrainerHelper.ProcessPairs(firstinput, firstinput, inputs, predictWindow); var pair2 = TrainerHelper.ProcessPairs(SecondInput, firstinput, inputs, predictWindow); var pair3 = TrainerHelper.ProcessPairs(ThirdInputs, firstinput, inputs, predictWindow); var pair4 = TrainerHelper.ProcessPairs(FourthInputs, firstinput, inputs, predictWindow); var pair5 = TrainerHelper.ProcessPairs(inp5, firstinput, inputs, predictWindow); var pair6 = TrainerHelper.ProcessPairs(inp6, firstinput, inputs, predictWindow); BasicMLDataSet SuperSet = new BasicMLDataSet(); SuperSet.Add(pair); SuperSet.Add(pair2); SuperSet.Add(pair3); SuperSet.Add(pair4); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize)); network.AddLayer(new BasicLayer(new ActivationTANH(), false, 20)); network.AddLayer(new BasicLayer(new ActivationTANH(), true, 0)); network.AddLayer(new BasicLayer(new ActivationLinear(), true, predictWindow)); //var layer = new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize); //layer.Network = network; network.Structure.FinalizeStructure(); network.Reset(); // var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize); return CreateEval.TrainNetworks(network, SuperSet); //Lets create an evaluation. //Console.WriteLine(@"Last error rate on random trainer:" + error); }