public string Learn(string knowFile) { var network = new ActivationNetwork(new BipolarSigmoidFunction(), Constants.StoneCount, 1); var teacher = new BackPropagationLearning(network); //new PerceptronLearning(network); var data = LoadData(knowFile); double error = int.MaxValue; int index = 0; while (error > 1.0 && index++ < 5000) { error = teacher.RunEpoch(data.Item1, data.Item2); } var networkFile = knowFile + ".net"; network.Save(networkFile); Console.WriteLine("Learn: {0}, Gen: {1}", knowFile, networkFile); return networkFile; }
private void Worker() { ExtendedIOHelpers.ShowAlert("Generate new samples before training? y/n ", () => GenerateSamples()); if (!LoadSamples()) { return; } Console.WriteLine("Loaded {0} samples", _inputList.Count); var network = new ActivationNetwork(new SigmoidFunction(1), InputCount, OutputCount); var teacher = new BackPropagationLearning(network) { LearningRate = 0.01 }; Console.WriteLine("Start training the network."); int iteration = 0; const int iterations = 5000; double error = 1.0; var st = new Stopwatch(); st.Start(); while (iteration < iterations && error > 0.00005) { error = teacher.RunEpoch(_inputList.ToArray(), _outputList.ToArray()) / _inputList.Count; iteration++; } var time = st.ElapsedMilliseconds / 1000.0; st.Stop(); Console.WriteLine("Network successfully trained! Error = {0:0.######}, Iteration = {1}", error, iteration); Console.WriteLine("Time = {0:0.000} s\n", time); // Normalize weights and convert to string format. var weights = network.Layers .Select(layer => layer.Neurons .Select(neuron => neuron.Weights .Select(x => string.Format("{0,6}", Convert.ToInt32(x * 1000.0))))); ExtendedIOHelpers.ShowAlert("Do you want to save network to file? y/n ", () => { SaveWeights(weights); network.Save(NetworkFile); }); Console.ReadLine(); }
public static void Optimize( FsdParser inputParser, string outputFileName, int[][] layers, double minThreshold, double maxThreshold, double thresholdStepSize, int minTrades, int iterations, int randomize, FsdParser[] validationParsers) { HashSet<int[]> layerCombinations; getLayerCombinations(layers, out layerCombinations); Network bestNetwork = null; double bestThreshold = double.NaN; double bestMinWinRate = double.NegativeInfinity; int cursorLeft = Console.CursorLeft; int cursorTop = Console.CursorTop; int numTotalIterations = (int)(layerCombinations.Count * ((maxThreshold - minThreshold) / thresholdStepSize + 1) * iterations); int numIterations = 0; foreach (int[] layerCombination in layerCombinations) { ActivationNetwork network = new ActivationNetwork( new SigmoidFunction(), inputParser.InputVectors[0].Length, layerCombination); network.Randomize(); ParallelResilientBackpropagationLearning teacher = new ParallelResilientBackpropagationLearning(network); for (double threshold = minThreshold; threshold <= maxThreshold; threshold += thresholdStepSize) { for (int iteration = 1; iteration <= iterations; iteration++) { numIterations++; Console.CursorLeft = cursorLeft; Console.CursorTop = cursorTop; Console.Write(new string(' ', Console.BufferWidth * 10)); Console.CursorLeft = cursorLeft; Console.CursorTop = cursorTop; Console.Write("layerCombination[]: { "); for (int layerComponentIdx = 0; layerComponentIdx < layerCombination.Length; layerComponentIdx++) { Console.Write(layerCombination[layerComponentIdx]); if (layerComponentIdx < layerCombination.Length - 1) Console.Write(", "); } Console.WriteLine(" }"); Console.WriteLine("threshold: {0:0.00}", threshold); Console.WriteLine("iteration: {0}", iteration); Console.WriteLine("bestMinWinRate: {0:0.00}%", bestMinWinRate); Console.WriteLine(""); Console.WriteLine("Progress: {0:0.00}%", (double)numIterations / numTotalIterations * 100.0); if (randomize > 0 && (iteration - 1) % randomize == 0) network.Randomize(); teacher.RunEpoch(inputParser.InputVectors, inputParser.OutputVectors); bool validData = true; double minWinRate = double.PositiveInfinity; foreach (FsdParser validationParser in validationParsers) { int numTradesWon, numTradesLost; double tradeWinRate; getStatistics(network, validationParser, threshold, out numTradesWon, out numTradesLost, out tradeWinRate); if (numTradesWon + numTradesLost < minTrades) { validData = false; break; } minWinRate = Math.Min(minWinRate, tradeWinRate); if (minWinRate < bestMinWinRate) { validData = false; break; } } if (validData) { bestNetwork = network; bestThreshold = threshold; bestMinWinRate = minWinRate; network.Save(outputFileName); // Konfigurationsinformationen speichern string configuration = ""; configuration += "layerCombination[]: { "; for (int layerComponentIdx = 0; layerComponentIdx < layerCombination.Length; layerComponentIdx++) { configuration += layerCombination[layerComponentIdx]; if (layerComponentIdx < layerCombination.Length - 1) configuration += ", "; } configuration += " }\r\n"; configuration += string.Format("threshold: {0:0.00}\r\n", threshold); configuration += string.Format("iteration: {0}\r\n", iteration); configuration += string.Format("bestMinWinRate: {0:0.00}%\r\n", bestMinWinRate); File.WriteAllText(outputFileName + ".txt", configuration); } } } } }
/// <summary> /// Background worker for neural network learning /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void backgroundWorkerNeural_DoWork(object sender, DoWorkEventArgs e) { string connectionString = "Data Source=192.168.0.245;Initial Catalog=MyDB;Integrated Security=True; Connection Timeout=30000";// + using (SqlConnection connection = new SqlConnection(connectionString)) { try { connection.Open(); string queryString = "SELECT * FROM rates WHERE [i1]<>0 AND [i2]<>0 AND [i3]<>0 AND [i4]<>0 AND [i5]<>0 AND [i6]<>0 AND [i7]<>0 AND [i8]<>0" + " AND [i9]<>0 AND [i10]<>0 AND [i11]<>0 AND [i12]<>0 AND [i13]<>0 AND [i14]<>0 AND [i15]<>0 AND [i16]<>0" + " AND [i17]<>0 AND [i18]<>0 AND [i20]<>0 AND [i21]<>0 AND [i23]<>0"; SqlCommand command = new SqlCommand(queryString, connection); SqlDataReader reader = command.ExecuteReader(); DataTable dt = new DataTable(); dt.Load(reader); int value_count = dt.Rows.Count; double[][] input_arr = new double[value_count][]; double[][] output_arr = new double[value_count][]; for (int j = 0; j < value_count; j++) { input_arr[j] = new double[21]; output_arr[j] = new double[1]; } // Call Read before accessing data. for (int i = 0; i < value_count; i++) { DataRow row = dt.Rows[i]; input_arr[i][0] = Double.Parse(row["i1"].ToString()); input_arr[i][1] = Double.Parse(row["i2"].ToString()); input_arr[i][2] = Double.Parse(row["i3"].ToString()); input_arr[i][3] = Double.Parse(row["i4"].ToString()); input_arr[i][4] = Double.Parse(row["i5"].ToString()); input_arr[i][5] = Double.Parse(row["i6"].ToString()); input_arr[i][6] = Double.Parse(row["i7"].ToString()); input_arr[i][7] = Double.Parse(row["i8"].ToString()); input_arr[i][8] = Double.Parse(row["i9"].ToString()); input_arr[i][9] = Double.Parse(row["i10"].ToString()); input_arr[i][10] = Double.Parse(row["i11"].ToString()); input_arr[i][11] = Double.Parse(row["i12"].ToString()); input_arr[i][12] = Double.Parse(row["i13"].ToString()); input_arr[i][13] = Double.Parse(row["i14"].ToString()); input_arr[i][14] = Double.Parse(row["i15"].ToString()); input_arr[i][15] = Double.Parse(row["i16"].ToString()); input_arr[i][16] = Double.Parse(row["i17"].ToString()); input_arr[i][17] = Double.Parse(row["i18"].ToString()); input_arr[i][18] = Double.Parse(row["i20"].ToString()); input_arr[i][19] = Double.Parse(row["i21"].ToString()); input_arr[i][20] = Double.Parse(row["i23"].ToString()); //output_arr[i][0] = Double.Parse(row["Difference"].ToString()); if (Double.Parse(row["Difference"].ToString()) > 0) output_arr[i][0] = 1; else if (Double.Parse(row["Difference"].ToString()) == 0) output_arr[i][0] = 0; else output_arr[i][0] = -1; } int[] neurons = new int[5] { 21, 21, 21, 21, 1 }; AForge.Neuro.BipolarSigmoidFunction sigmoiddFunction = new AForge.Neuro.BipolarSigmoidFunction(); //AForge.Neuro.SigmoidFunction sigmoiddFunction = new AForge.Neuro.SigmoidFunction(2); AForge.Neuro.ActivationNetwork network = new AForge.Neuro.ActivationNetwork(sigmoiddFunction, 21, 1); AForge.Neuro.ActivationNetwork network1 = new AForge.Neuro.ActivationNetwork(sigmoiddFunction, 21, 1);//neurons); //AForge.Neuro.Learning.DeltaRuleLearning teacher = new AForge.Neuro.Learning.DeltaRuleLearning(network) { LearningRate = 1}; AForge.Neuro.Learning.EvolutionaryLearning teacher = new AForge.Neuro.Learning.EvolutionaryLearning(network, 1000); // AForge.Neuro.Learning.ResilientBackpropagationLearning teacher = new AForge.Neuro.Learning.ResilientBackpropagationLearning(network) { LearningRate = 1 }; //AForge.Neuro.Learning.PerceptronLearning teacherP = new PerceptronLearning(network1){ LearningRate =1}; //AForge.Neuro.Learning.BackPropagationLearning teacher = new AForge.Neuro.Learning.BackPropagationLearning(network) { LearningRate =1, Momentum = .2 }; // loop bool noNeedToStop = false; double error = 0; //double error1 = 0; double lastError = 0; double learningRate = 1; int k = 0; sigmoiddFunction.Alpha = 0.01; while (!noNeedToStop) { // run epoch of learning procedure //error = teacher.RunEpoch(input_arr, output_arr); //error = teacherP.RunEpoch(input_arr,output_arr); error = teacher.RunEpoch(input_arr, output_arr); double temp = Math.Abs(lastError - error); if (error < 30) noNeedToStop = true; else if (temp < 0.0000001) { lastError = error; k++; if (k > 1000) { network.Randomize(); k = 0; } learningRate /= 2; //if (learningRate < 0.001) // { // learningRate = 0.001; //network.Randomize(); // noNeedToStop = true; // } } else lastError = error; // teacherP.LearningRate = learningRate; } network.Save(@"E:\\neural"); } catch (Exception ex) { message += " Exception: " + ex.Message; } finally { connection.Close(); } } }
public void Learn() { var network = new ActivationNetwork(new BipolarSigmoidFunction(), Constants.StoneCount, 1); var teacher = new BackPropagationLearning(network);//new PerceptronLearning(network); var data = LoadData("4-6-2012-04-24.know"); double error = 1.0; int index = 0; while (error > 0.001 && index < 100000) { error = teacher.RunEpoch(data.Item1, data.Item2); index++; } network.Save("4-6-2012-04-24.bp.net"); var text = "□○○○●○○□○●●□□●□□"; var i = ToDouble(text);//-2 var o = network.Compute(i); var eval = o[0] * 2 * Constants.StoneCount - Constants.StoneCount; Console.WriteLine("{0} {1}", text, eval); }
public void LearnDemo() { ActivationNetwork network = new ActivationNetwork(new ThresholdFunction(), 2, 1);//Constants.StoneCount PerceptronLearning teacher = new PerceptronLearning(network); double[][] input = new double[4][]; double[][] output = new double[4][]; input[0] = new double[] { 0, 0 }; output[0] = new double[] { 0 }; input[1] = new double[] { 0, 1 }; output[1] = new double[] { 0 }; input[2] = new double[] { 1, 0 }; output[2] = new double[] { 0 }; input[3] = new double[] { 1, 1 }; output[3] = new double[] { 1 }; double error = 1.0; while (error > 0.001) { error = teacher.RunEpoch(input, output); } var k = network.Compute(new double[] { 0.9, 0.7 }); var o = network.Output; network.Save("a.txt"); }
private void SearchSolution() { MemoryStream ms = new MemoryStream(); mNetwork = new ActivationNetwork( new BipolarSigmoidFunction(sigmoidAlphaValue), mInput[0].Length, mHiddenNeurons, mOutput[0].Length); // create teacher BackPropagationLearning teacher = new BackPropagationLearning(mNetwork); // set learning rate and momentum teacher.LearningRate = mLearningRate; teacher.Momentum = mMomentum; bool needToStop = false; int iteration = 0; while (!needToStop) { double error = teacher.RunEpoch(mInput, mOutput) / mInput.Length; mErrors[iteration] = error; double test_error = 0.0; for (int i = 0; i < mTestInput.Length; i++) { double[] test_result = mNetwork.Compute(mTestInput[i]); test_error += ( mTestOutput[i][0] - test_result[0])*( mTestOutput[i][0] - test_result[0]); } mTestErrors[iteration] = Math.Sqrt(test_error); if (min_test_error > mTestErrors[iteration]) { min_test_error = mTestErrors[iteration]; // mTestBestNetwork = mNetwork; ms = new MemoryStream(); mNetwork.Save(this.Id + ".txt"); } iteration++; if (iteration >= mIterations) needToStop = true; } mTestBestNetwork = (ActivationNetwork)ActivationNetwork.Load(this.Id + ".txt"); }
private static void trainNetwork(ActivationNetwork neuralNet, ISupervisedLearning teacher, double[][] input, double[][] output, double[][] crossValidationInput, char[] crossValidationDataLabels) { //Make the network learn the data DefaultLog.Info("Training the neural network . . ."); double error; //TODO: Store the previous NUM_ITERATIONS_EQUAL_IMPLIES_PLATEAU networks so in the event of over-learning, we can return to the best one //Use the cross-validation data to notice if the network starts to over-learn the data. //Store the previous network (before training) and check if the performance drops on the cross-validation data MemoryStream prevNetworkStream = new MemoryStream(); uint prevNetworkNumMisclassified = uint.MaxValue; Queue<uint> prevNetworksNumMisclassified = new Queue<uint>(NUM_ITERATIONS_EQUAL_IMPLIES_PLATEAU); //Initialise the queue to be full of uint.MaxValue for (int i = 0; i < NUM_ITERATIONS_EQUAL_IMPLIES_PLATEAU; i++) { prevNetworksNumMisclassified.Enqueue(prevNetworkNumMisclassified); } int iterNum = 1; do { //Perform an iteration of training (calls teacher.Run() for each item in the array of inputs/outputs provided) error = teacher.RunEpoch(input, output); //Progress update if (iterNum % ITERATIONS_PER_PROGRESS_UPDATE == 0) { DefaultLog.Debug(String.Format("Learned for {0} iterations. Error: {1}", iterNum, error)); } //Evaluate this network on the cross-validation data NeuralNetworkEvaluator crossValidationEvaluator = new NeuralNetworkEvaluator(neuralNet); crossValidationEvaluator.Evaluate(crossValidationInput, crossValidationDataLabels); uint networkNumMisclassified = crossValidationEvaluator.ConfusionMatrix.NumMisclassifications; DefaultLog.Debug(String.Format("Network misclassified {0} / {1} on the cross-validation data set", networkNumMisclassified, crossValidationEvaluator.ConfusionMatrix.TotalClassifications)); //Check if we've overlearned the data and performance on the cross-valiadtion data has dropped off if (networkNumMisclassified > Stats.Mean(prevNetworksNumMisclassified)) //Use the mean of the number of misclassification, as the actual number can move around a bit { //Cross-Validation performance has dropped, reinstate the previous network & break DefaultLog.Debug(String.Format("Network has started to overlearn the training data on iteration {0}. Using previous classifier.", iterNum)); prevNetworkStream.Position = 0; //Set head to start of stream neuralNet = ActivationNetwork.Load(prevNetworkStream) as ActivationNetwork; //Read in the network break; } //Clear the Memory Stream storing the previous network prevNetworkStream.SetLength(0); //Store this network & the number of characters it misclassified on the cross-validation data neuralNet.Save(prevNetworkStream); //This is now the previous network, update the number it misclassified prevNetworkNumMisclassified = networkNumMisclassified; prevNetworksNumMisclassified.Dequeue(); prevNetworksNumMisclassified.Enqueue(prevNetworkNumMisclassified); //Check if the performance has plateaued if (prevNetworksNumMisclassified.Distinct().Count() == 1) //Allow for slight movement in performance here?? { //Cross-Validation performance has plateaued, use this network as the final one & break DefaultLog.Debug(String.Format("Network performance on cross-validation data has plateaued on iteration {0}.", iterNum)); break; } //Check if we've performed the max number of iterations if (iterNum > MAX_LEARNING_ITERATIONS) { DefaultLog.Debug(String.Format("Reached the maximum number of learning iterations ({0}), with error {1}", MAX_LEARNING_ITERATIONS, error)); break; } iterNum++; } while (error > LEARNED_AT_ERROR); DefaultLog.Info("Data learned to an error of {0}", error); }
//Train the network many times, with different initial values, evaluate them on the cross valiadtion data and select the best one private static ActivationNetwork trainNetworksCompeteOnCrossValidation(ActivationNetwork neuralNet, ISupervisedLearning teacher, double[][] input, double[][] output, double[][] crossValidationInput, char[] crossValidationDataLabels) { DefaultLog.Info("Training {0} neural networks & picking the one that performs best on the cross-validation data . . .", NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION); MemoryStream bestNetworkStream = new MemoryStream(); uint bestNetworkNumMisclassified = uint.MaxValue; for (int i = 0; i < NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION; i++) { DefaultLog.Info("Training network {0}/{1}", (i + 1), NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION); //Train a new network neuralNet.Randomize(); //Reset the weights to random values trainNetwork(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Compare this new networks performance to our current best network NeuralNetworkEvaluator evaluator = new NeuralNetworkEvaluator(neuralNet); evaluator.Evaluate(crossValidationInput, crossValidationDataLabels); uint numMisclassified = evaluator.ConfusionMatrix.NumMisclassifications; if (numMisclassified < bestNetworkNumMisclassified) { //This network performed better than out current best network, make this the new best //Clear the Memory Stream storing the current best network bestNetworkStream.SetLength(0); //Save the network & update the best numMisclassified neuralNet.Save(bestNetworkStream); bestNetworkNumMisclassified = numMisclassified; } } DefaultLog.Info("Trained all networks and selected the best one"); //Load up the network that performed best bestNetworkStream.Position = 0; //Read from the start of the stream ActivationNetwork bestNetwork = ActivationNetwork.Load(bestNetworkStream) as ActivationNetwork; return bestNetwork; }
private static NeuralNetworkEvaluator evaluateSingleLayerActivationNetworkWithSigmoidFunctionBackPropagationLearning( double[][] input, double[][] output, double[][] crossValidationInput, char[] crossValidationDataLabels, double[][] evaluationInput, char[] evaluationDataLabels, double learningRate, string networkName) { //Create the neural Network BipolarSigmoidFunction sigmoidFunction = new BipolarSigmoidFunction(2.0f); ActivationNetwork neuralNet = new ActivationNetwork(sigmoidFunction, input[0].Length, ClassifierHelpers.NUM_CHAR_CLASSES); //Randomise the networks initial weights neuralNet.Randomize(); //Create teacher that the network will use to learn the data (Back Propogation Learning technique used here) BackPropagationLearning teacher = new BackPropagationLearning(neuralNet); teacher.LearningRate = LEARNING_RATE; //Train the Network //trainNetwork(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Train multiple networks, pick the one that performs best on the Cross-Validation data neuralNet = trainNetworksCompeteOnCrossValidation(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Evaluate the network returned on the cross-validation data so it can be compared to the current best NeuralNetworkEvaluator crossValEvaluator = new NeuralNetworkEvaluator(neuralNet); crossValEvaluator.Evaluate(crossValidationInput, crossValidationDataLabels); //See if this network is better than the current best network of it's type //Try and load a previous network of this type string previousNetworkPath = Program.NEURAL_NETWORKS_PATH + networkName + Program.NEURAL_NETWORK_FILE_EXTENSION; string networkCMPath = Program.NEURAL_NETWORKS_PATH + networkName + ".csv"; bool newBest = false; ActivationNetwork bestNetwork = neuralNet; if(File.Exists(previousNetworkPath)) { //Load the previous network & evaluate it ActivationNetwork previous = ActivationNetwork.Load(previousNetworkPath) as ActivationNetwork; NeuralNetworkEvaluator prevCrossValEval = new NeuralNetworkEvaluator(previous); prevCrossValEval.Evaluate(crossValidationInput, crossValidationDataLabels); //If this network is better than the previous best, write it out as the new best if(prevCrossValEval.ConfusionMatrix.NumMisclassifications > crossValEvaluator.ConfusionMatrix.NumMisclassifications) { DefaultLog.Info("New best cross-validation score for network \"{0}\". Previous was {1}/{2}, new best is {3}/{2}", networkName, prevCrossValEval.ConfusionMatrix.NumMisclassifications, prevCrossValEval.ConfusionMatrix.TotalClassifications, crossValEvaluator.ConfusionMatrix.NumMisclassifications); //Delete the old files File.Delete(previousNetworkPath); File.Delete(networkCMPath); newBest = true; } else //The previous network is still the best { DefaultLog.Info("Existing \"{0}\" network performed better than new one. New network scored {1}/{2}, existing scored {3}/{2}", networkName, crossValEvaluator.ConfusionMatrix.NumMisclassifications, crossValEvaluator.ConfusionMatrix.TotalClassifications, prevCrossValEval.ConfusionMatrix.NumMisclassifications); bestNetwork = previous; } } else //Otherwise there isn't a previous best { DefaultLog.Info("No previous best record for network \"{0}\" . . .", networkName); newBest = true; } //Evaluate the best system on the evaluation data NeuralNetworkEvaluator evaluator = new NeuralNetworkEvaluator(bestNetwork); evaluator.Evaluate(evaluationInput, evaluationDataLabels); //If there is a new best to write out if(newBest) { DefaultLog.Info("Writing out net best network of type\"{0}\"", networkName); neuralNet.Save(previousNetworkPath); //Write out the Confusion Matrix for the evaluation data, not cross-validation evaluator.ConfusionMatrix.WriteToCsv(networkCMPath); DefaultLog.Info("Finished writing out network \"{0}\"", networkName); } return evaluator; }
public void startTrain() { int class_count = 0; if ("NN".Equals(Constants.NN_SVM_SURF)) { double sigmoidAlphaValue = 1.0; double learningRate = 100; int max_epoch = 50; double min_err = 0.000000001; List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count); int samples = trainingFiles.Count; // prepare learning data double[][] input = new double[samples][]; Dictionary<int, double[][]> outputs = new Dictionary<int, double[][]>(); for (int i = 0; i < samples; i++) { int currentImageClass = Int32.Parse(trainingFiles[i].Directory.Name); Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false); int com_x = 0, com_y = 0; ByteTools.imageCoM(bmp, ref com_x, ref com_y); input[i] = new double[numOfinputs]; List<Ipoint> featureList = fillFeatures(bmp, com_x, com_y, input[i]); if (!outputs.ContainsKey(currentImageClass)) { outputs.Add(currentImageClass, new double[samples][]); for (int j = 0; j < samples; j++) { outputs[currentImageClass][j] = new double[] { 0d }; } } outputs[currentImageClass][i][0] = 1d; } Dictionary<int, ActivationNetwork> networks = new Dictionary<int, ActivationNetwork>(); int[] availSigns = outputs.Keys.ToArray(); foreach (int sign in availSigns) { ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, new int[] { Constants.NUM_OF_NN_HIDDEN_LAYER_NODES, 1 }); Accord.Neuro.Learning.LevenbergMarquardtLearning teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); teacher.LearningRate = learningRate; int epoch = 0; double error; while (true) { // run epoch of learning procedure error = teacher.RunEpoch(input, outputs[sign]) / samples; Console.WriteLine("Epoch:" + epoch + " Error:" + error); if (epoch++ > max_epoch || error < min_err) break; } networks.Add(sign, network); network.Save(Constants.base_folder + "nn_12x12_" + sign + ".dat"); Logger.log("Error: " + error + " Epoch:" + epoch); } } else if ("NN_SURF".Equals(Constants.NN_SVM_SURF) || "NN_12SIMPLE".Equals(Constants.NN_SVM_SURF)) { double sigmoidAlphaValue = 1.0; if ("NN_SURF".Equals(Constants.NN_SVM_SURF)) { if ("triangle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 6.0; if ("circle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 6.0; } else if ("NN_12SIMPLE".Equals(Constants.NN_SVM_SURF)) { if ("triangle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 1.0; if ("circle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 1.0; } double learningRate = 1.00; int max_epoch = 3000; double min_err = 0.000001; ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, Constants.NUM_OF_SIGN_TYPES); DeltaRuleLearning teacher = new DeltaRuleLearning(network); teacher.LearningRate = learningRate; /* ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, new int[] { (numOfinputs + Constants.NUM_OF_SIGN_TYPES)/2, Constants.NUM_OF_SIGN_TYPES }); BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = learningRate; //teacher.Momentum = momentum; */ List<FileInfo> trainingFiles = new List<FileInfo>(1000); DirectoryInfo di = new DirectoryInfo(Constants.base_folder + "train_" + Constants.CIRCLE_TRIANGLE); DirectoryInfo[] dirs = di.GetDirectories("*"); foreach (DirectoryInfo dir in dirs) { int i = 0; FileInfo[] files = dir.GetFiles("*.bmp"); foreach (FileInfo fi in files) { trainingFiles.Add(fi); if (i++ > Constants.MAX_TRAIN_SAMPLE) break; } } // List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count); int samples = trainingFiles.Count; // prepare learning data double[][] input = new double[samples][]; double[][] output = new double[samples][]; for (int i = 0; i < samples; i++) { Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false); int com_x = 0, com_y = 0; ByteTools.imageCoM(bmp, ref com_x, ref com_y); input[i] = new double[numOfinputs]; output[i] = new double[Constants.NUM_OF_SIGN_TYPES]; bmp.Tag = trainingFiles[i].Directory.Name + "_" + trainingFiles[i].Name; fillFeatures_SURF(bmp, com_x, com_y, input[i]); output[i][Int32.Parse(trainingFiles[i].Directory.Name) - 1] = 1d; } int epoch = 0; double error = 0; while (true) { // run epoch of learning procedure error = teacher.RunEpoch(input, output) / samples; Console.WriteLine("Epoch:" + epoch + " Error:" + error); if (epoch++ > max_epoch || error < min_err) break; } network.Save(Constants.base_folder + Constants.NN_SVM_SURF + "_" + Constants.CIRCLE_TRIANGLE + ".dat"); Logger.log("NNTrain [" + error + "]: " + Constants.NN_SVM_SURF + ", " + Constants.CIRCLE_TRIANGLE + ", " + learningRate + ", " + sigmoidAlphaValue); } }