public TrainOnData ( |
||
data | ||
maxEpochs | uint | |
epochsBetweenReports | uint | |
desiredError | float | |
Результат | void |
static void Main() { const uint num_layers = 3; const uint num_neurons_hidden = 96; const float desired_error = 0.001F; using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\robot.train")) using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\robot.test")) { for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F) { Console.WriteLine("============= momentum = {0} =============\n", momentum); using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount)) { net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL; net.LearningMomentum = momentum; net.TrainOnData(trainData, 20000, 5000, desired_error); Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData)); Console.WriteLine("MSE error on test data: {0}", net.TestData(testData)); } } } Console.ReadKey(); }
static void Main() { const uint num_input = 3; const uint num_output = 1; const uint num_layers = 4; const uint num_neurons_hidden = 5; const float desired_error = 0.0001F; const uint max_epochs = 5000; const uint epochs_between_reports = 1000; using(NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output)) { net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC; net.ActivationFunctionOutput = ActivationFunction.LINEAR; net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP; using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data")) { net.SetScalingParams(data, -1, 1, -1, 1); net.ScaleTrain(data); net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error); net.Save("..\\..\\..\\datasets\\scaling.net"); Console.ReadKey(); } } }
static void Main(string[] args) { DataType[] calc_out; const uint num_input = 2; const uint num_output = 1; const uint num_layers = 3; const uint num_neurons_hidden = 3; const float desired_error = 0; const uint max_epochs = 1000; const uint epochs_between_reports = 10; int decimal_point; Console.WriteLine("Creating network."); using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output)) using (TrainingData data = new TrainingData("..\\..\\..\\examples\\xor.data")) { net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC; net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC; net.TrainStopFunction = StopFunction.STOPFUNC_BIT; net.BitFailLimit = 0.01F; net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP; net.InitWeights(data); Console.WriteLine("Training network."); net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error); Console.WriteLine("Testing network"); // Keep a copy of the inputs and outputs so that we don't call TrainingData.Input // and TrainingData.Output multiple times causing a copy of all the data on each // call. An alternative is to use the Input/OutputAccessors which are fast with // repeated calls to get data and can be cast to arrays with the Array property DataType[][] input = data.Input; DataType[][] output = data.Output; for (int i = 0; i < data.TrainDataLength; i++) { calc_out = net.Run(input[i]); Console.WriteLine("XOR test ({0},{1}) -> {2}, should be {3}, difference={4}", input[i][0], input[i][1], calc_out[0], output[i][0], FannAbs(calc_out[0] - output[i][0])); } Console.WriteLine("Saving network.\n"); net.Save("..\\..\\..\\examples\\xor_float.net"); decimal_point = net.SaveToFixed("..\\..\\..\\examples\\xor_fixed.net"); data.SaveTrainToFixed("..\\..\\..\\examples\\xor_fixed.data", (uint)decimal_point); Console.ReadKey(); } }
static void Main() { const uint num_layers = 3; const uint num_neurons_hidden = 32; const float desired_error = 0.0001F; const uint max_epochs = 300; const uint epochs_between_reports = 10; Console.WriteLine("Creating network."); using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\mushroom.train")) using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount)) { Console.WriteLine("Training network."); net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC; net.ActivationFunctionOutput = ActivationFunction.SIGMOID; net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error); Console.WriteLine("Testing network."); using (TrainingData testData = new TrainingData()) { testData.ReadTrainFromFile("..\\..\\..\\datasets\\mushroom.test"); net.ResetMSE(); for (int i = 0; i < testData.TrainDataLength; i++) { // The difference between calling GetTrain[Input|Output] and calling // the Input and Output properties is huge in terms of speed net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array); } Console.WriteLine("MSE error on test data {0}", net.MSE); Console.WriteLine("Saving network."); net.Save("..\\..\\..\\examples\\mushroom_float.net"); Console.ReadKey(); } } }
static void Main() { const uint num_layers = 3; const uint num_neurons_hidden = 96; const float desired_error = 0.001F; Console.WriteLine("Creating network."); using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\robot.train")) using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount)) using (TrainingData testData = new TrainingData()) { Console.WriteLine("Training network."); net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL; net.LearningMomentum = 0.4F; net.TrainOnData(data, 3000, 10, desired_error); Console.WriteLine("Testing network."); testData.ReadTrainFromFile("..\\..\\..\\datasets\\robot.test"); try { net.ResetMSE(); for (int i = 0; i < testData.TrainDataLength; i++) { net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array); } Console.WriteLine("MSE error on test data: {0}", net.MSE); Console.WriteLine("Saving network."); net.Save("..\\..\\..\\datasets\\robot_float.net"); } catch (Exception e) { Console.WriteLine("Exception: {0}", e.Message); } Console.ReadKey(); } }
static void Main() { const uint num_layers = 3; const uint num_neurons_hidden = 96; const float desired_error = 0.00007F; using (TrainingData trainData = new TrainingData()) using (TrainingData testData = new TrainingData()) { trainData.CreateTrainFromCallback(374, 48, 3, TrainingDataCallback); testData.CreateTrainFromCallback(594, 48, 3, TestDataCallback); // Test Accessor classes for (int i = 0; i < trainData.TrainDataLength; i++) { Console.Write("Input {0}: ", i); for (int j = 0; j < trainData.InputCount; j++) { Console.Write("{0}, ", trainData.InputAccessor[i][j]); } Console.Write("\nOutput {0}: ", i); for (int j = 0; j < trainData.OutputCount; j++) { Console.Write("{0}, ", trainData.OutputAccessor[i][j]); } Console.WriteLine(""); } for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F) { Console.WriteLine("============= momentum = {0} =============\n", momentum); using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount)) { net.SetCallback(TrainingCallback, "Hello!"); net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL; net.LearningMomentum = momentum; net.TrainOnData(trainData, 20000, 500, desired_error); Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData)); Console.WriteLine("MSE error on test data: {0}", net.TestData(testData)); } } } Console.ReadKey(); }
static void XorTest() { Console.WriteLine("\nXOR test started."); const float learning_rate = 0.7f; const uint num_layers = 3; const uint num_input = 2; const uint num_hidden = 3; const uint num_output = 1; const float desired_error = 0.001f; const uint max_iterations = 300000; const uint iterations_between_reports = 1000; Console.WriteLine("\nCreating network."); using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_hidden, num_output)) { net.LearningRate = learning_rate; net.ActivationSteepnessHidden = 1.0F; net.ActivationSteepnessOutput = 1.0F; net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC_STEPWISE; net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC_STEPWISE; // Output network type and parameters Console.Write("\nNetworkType : "); switch (net.NetworkType) { case NetworkType.LAYER: Console.WriteLine("LAYER"); break; case NetworkType.SHORTCUT: Console.WriteLine("SHORTCUT"); break; default: Console.WriteLine("UNKNOWN"); break; } net.PrintParameters(); Console.WriteLine("\nTraining network."); using (TrainingData data = new TrainingData()) { if (data.ReadTrainFromFile("..\\..\\..\\examples\\xor.data")) { // Initialize and train the network with the data net.InitWeights(data); Console.WriteLine("Max Epochs " + String.Format("{0:D}", max_iterations).PadLeft(8) + ". Desired Error: " + String.Format("{0:F}", desired_error).PadRight(8)); net.SetCallback(PrintCallback, null); net.TrainOnData(data, max_iterations, iterations_between_reports, desired_error); Console.WriteLine("\nTesting network."); for (uint i = 0; i < data.TrainDataLength; i++) { // Run the network on the test data DataType[] calc_out = net.Run(data.Input[i]); Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference = {4}", data.InputAccessor[(int)i][0].ToString("+#;-#"), data.InputAccessor[(int)i][1].ToString("+#;-#"), calc_out[0] == 0 ? 0.ToString() : calc_out[0].ToString("+#.#####;-#.#####"), data.OutputAccessor[(int)i][0].ToString("+#;-#"), FannAbs(calc_out[0] - data.Output[i][0])); } Console.WriteLine("\nSaving network."); // Save the network in floating point and fixed point net.Save("..\\..\\..\\examples\\xor_float.net"); uint decimal_point = (uint)net.SaveToFixed("..\\..\\..\\examples\\xor_fixed.net"); data.SaveTrainToFixed("..\\..\\..\\examples\\xor_fixed.data", decimal_point); Console.WriteLine("\nXOR test completed."); } } } }
static void Main() { const uint num_layers = 3; const uint num_neurons_hidden = 32; const float desired_error = 0.0001F; const uint max_epochs = 300; const uint epochs_between_reports = 10; long before; Console.WriteLine("Creating network."); using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\mushroom.train")) using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount)) { Console.WriteLine("Training network."); net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC; net.ActivationFunctionOutput = ActivationFunction.SIGMOID; net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error); Console.WriteLine("Testing network."); using (TrainingData testData = new TrainingData()) { testData.ReadTrainFromFile("..\\..\\..\\datasets\\mushroom.test"); before = Environment.TickCount; for (int i = 0; i < 5; i++) { DataType[] input = new DataType[testData.InputCount]; DataType[] output = new DataType[testData.OutputCount]; for(int j = 0; j < testData.InputCount; j++) { input[j] = testData.InputAccessor[i][j]; } for (int j = 0; j < testData.OutputCount; j++) { output[j] = testData.OutputAccessor[i][j]; } net.Test(input, output); } Console.WriteLine("Data Accessor ticks {0}", Environment.TickCount - before); before = Environment.TickCount; for (int i = 0; i < 5; i++) { DataType[] input = new DataType[testData.InputCount]; DataType[] output = new DataType[testData.OutputCount]; for (int j = 0; j < testData.InputCount; j++) { input[j] = testData.Input[i][j]; } for (int j = 0; j < testData.OutputCount; j++) { output[j] = testData.Output[i][j]; } net.Test(input, output); } Console.WriteLine("Array ticks {0}", Environment.TickCount - before); } Console.WriteLine("MSE error on test data {0}", net.MSE); Console.WriteLine("Saving network."); net.Save("..\\..\\..\\examples\\mushroom_float.net"); Console.ReadKey(); } }