public NeuralNetworkOperations(int characterSize) { neuralNet = new ActivationNetwork(new BipolarSigmoidFunction(2.0f), characterSize, 400, characterCount); neuralNet.Randomize(); teacher = new AForge.Neuro.Learning.BackPropagationLearning(neuralNet); teacher.LearningRate = 0.5f; teacher.Momentum = 0.1f; prepareDataForTeacher(); //var letters = treningLetterListInput.Zip(treningLetterListOutput, (i,o) => new { treningLetterListInput = i, treningLetterListOutput = o }); double err = 400.0f; int count = 0; while(err >= 30.0f) { err = teacher.RunEpoch(treningLetterListInput.ToArray(), treningLetterListOutput.ToArray()); count++; } }
static void Learn() { var network = new ActivationNetwork( new SigmoidFunction(), baseMaker.InputSize, arguments.NeuronsCount, baseMaker.OutputSize ); network.Randomize(); foreach (var l in network.Layers) foreach (var n in l.Neurons) for (int i = 0; i < n.Weights.Length; i++) n.Weights[i] = rnd.NextDouble() * 2 - 1; var teacher = new BackPropagationLearning(network); teacher.LearningRate = 1; teacher.Momentum = 0; while (true) { var watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < 500) { teacher.RunEpoch(baseMaker.Inputs, baseMaker.Answers); } watch.Stop(); var count = 0; percentage = new double[baseMaker.OutputSize, baseMaker.OutputSize]; for (int i = 0; i < baseMaker.OutputSize; i++) for (int j = 0; j < baseMaker.OutputSize * 5; j++) { var task = baseMaker.GenerateRandom(i); var output = network.Compute(task); var max = output.Max(); var maxIndex = Enumerable.Range(0, output.Length).Where(z => output[z] == max).First(); percentage[i, maxIndex]++; if (i != maxIndex) totalErrors++; count++; } var maxPercentage = percentage.Cast<double>().Max(); for (int i = 0; i < baseMaker.OutputSize; i++) for (int j = 0; j < baseMaker.OutputSize; j++) percentage[i, j] /= maxPercentage; totalErrors /= count; form.BeginInvoke(new Action(Update)); } }
public static void Optimize( FsdParser inputParser, string outputFileName, int[][] layers, double minThreshold, double maxThreshold, double thresholdStepSize, int minTrades, int iterations, int randomize, FsdParser[] validationParsers) { HashSet<int[]> layerCombinations; getLayerCombinations(layers, out layerCombinations); Network bestNetwork = null; double bestThreshold = double.NaN; double bestMinWinRate = double.NegativeInfinity; int cursorLeft = Console.CursorLeft; int cursorTop = Console.CursorTop; int numTotalIterations = (int)(layerCombinations.Count * ((maxThreshold - minThreshold) / thresholdStepSize + 1) * iterations); int numIterations = 0; foreach (int[] layerCombination in layerCombinations) { ActivationNetwork network = new ActivationNetwork( new SigmoidFunction(), inputParser.InputVectors[0].Length, layerCombination); network.Randomize(); ParallelResilientBackpropagationLearning teacher = new ParallelResilientBackpropagationLearning(network); for (double threshold = minThreshold; threshold <= maxThreshold; threshold += thresholdStepSize) { for (int iteration = 1; iteration <= iterations; iteration++) { numIterations++; Console.CursorLeft = cursorLeft; Console.CursorTop = cursorTop; Console.Write(new string(' ', Console.BufferWidth * 10)); Console.CursorLeft = cursorLeft; Console.CursorTop = cursorTop; Console.Write("layerCombination[]: { "); for (int layerComponentIdx = 0; layerComponentIdx < layerCombination.Length; layerComponentIdx++) { Console.Write(layerCombination[layerComponentIdx]); if (layerComponentIdx < layerCombination.Length - 1) Console.Write(", "); } Console.WriteLine(" }"); Console.WriteLine("threshold: {0:0.00}", threshold); Console.WriteLine("iteration: {0}", iteration); Console.WriteLine("bestMinWinRate: {0:0.00}%", bestMinWinRate); Console.WriteLine(""); Console.WriteLine("Progress: {0:0.00}%", (double)numIterations / numTotalIterations * 100.0); if (randomize > 0 && (iteration - 1) % randomize == 0) network.Randomize(); teacher.RunEpoch(inputParser.InputVectors, inputParser.OutputVectors); bool validData = true; double minWinRate = double.PositiveInfinity; foreach (FsdParser validationParser in validationParsers) { int numTradesWon, numTradesLost; double tradeWinRate; getStatistics(network, validationParser, threshold, out numTradesWon, out numTradesLost, out tradeWinRate); if (numTradesWon + numTradesLost < minTrades) { validData = false; break; } minWinRate = Math.Min(minWinRate, tradeWinRate); if (minWinRate < bestMinWinRate) { validData = false; break; } } if (validData) { bestNetwork = network; bestThreshold = threshold; bestMinWinRate = minWinRate; network.Save(outputFileName); // Konfigurationsinformationen speichern string configuration = ""; configuration += "layerCombination[]: { "; for (int layerComponentIdx = 0; layerComponentIdx < layerCombination.Length; layerComponentIdx++) { configuration += layerCombination[layerComponentIdx]; if (layerComponentIdx < layerCombination.Length - 1) configuration += ", "; } configuration += " }\r\n"; configuration += string.Format("threshold: {0:0.00}\r\n", threshold); configuration += string.Format("iteration: {0}\r\n", iteration); configuration += string.Format("bestMinWinRate: {0:0.00}%\r\n", bestMinWinRate); File.WriteAllText(outputFileName + ".txt", configuration); } } } } }
public void Train(List<Session> train, List<Session> cv, out List<double> trainErrors, out List<double> cvErrors, IActivationFunction function) { trainErrors = new List<double>(); cvErrors = new List<double>(); var count = train.Count; // prepare learning data Console.WriteLine("prepare learning data"); double[][] input = new double[count][]; double[][] output = new double[count][]; // preparing the data for (int i = 0; i < count; i++) { input[i] = CreateInput(train[i]); output[i] = CreateOutput(train[i]); } Console.WriteLine("feature scaling"); mean = new double[inputSize]; dev = new double[inputSize]; for (int i = 0; i < inputSize; i++) { var query = input.Select(p => p[i]); mean[i] = query.Average(); dev[i] = query.Deviation(mean[i]); } for (int i = 0; i < count; i++) for (int j = 0; j < inputSize; j++) { input[i][j] = (input[i][j] - mean[j]) / dev[j]; } Console.WriteLine("prepare cv data"); // prepare cv data double[][] cvIn = new double[cv.Count][]; double[][] cvOut = new double[cv.Count][]; // preparing the data for (int i = 0; i < cv.Count; i++) { cvIn[i] = CreateInput(cv[i]); cvOut[i] = CreateOutput(cv[i]); } Console.WriteLine("cv feature scaling"); for (int i = 0; i < cv.Count; i++) cvIn[i] = ScaleInput(cvIn[i]); Console.WriteLine("create network"); // create perceptron _network = new ActivationNetwork(function, inputSize, inputSize, classesCount); _network.Randomize(); // create teacher //PerceptronLearning teacher = new PerceptronLearning(_network); BackPropagationLearning teacher = new BackPropagationLearning(_network); // set learning rate teacher.LearningRate = 0.01; // loop int iter = 0; double error = 999; double delta = 999; Console.WriteLine("Train Network"); //while (iter < 1000) while (delta > 1 && iter < 5000) //while (iter < 2000) { // run epoch of learning procedure double trainError = teacher.RunEpoch(input, output); double trainError2 = ComputeCVError(_network, input, output); double cvError = ComputeCVError(_network, cvIn, cvOut); delta = Math.Abs(error - trainError); error = trainError; trainErrors.Add(trainError2); cvErrors.Add(cvError); iter++; if (iter % 100 == 0) Console.WriteLine(iter); } Console.WriteLine(iter); }
/// <summary> /// Background worker for neural network learning /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void backgroundWorkerNeural_DoWork(object sender, DoWorkEventArgs e) { string connectionString = "Data Source=192.168.0.245;Initial Catalog=MyDB;Integrated Security=True; Connection Timeout=30000";// + using (SqlConnection connection = new SqlConnection(connectionString)) { try { connection.Open(); string queryString = "SELECT * FROM rates WHERE [i1]<>0 AND [i2]<>0 AND [i3]<>0 AND [i4]<>0 AND [i5]<>0 AND [i6]<>0 AND [i7]<>0 AND [i8]<>0" + " AND [i9]<>0 AND [i10]<>0 AND [i11]<>0 AND [i12]<>0 AND [i13]<>0 AND [i14]<>0 AND [i15]<>0 AND [i16]<>0" + " AND [i17]<>0 AND [i18]<>0 AND [i20]<>0 AND [i21]<>0 AND [i23]<>0"; SqlCommand command = new SqlCommand(queryString, connection); SqlDataReader reader = command.ExecuteReader(); DataTable dt = new DataTable(); dt.Load(reader); int value_count = dt.Rows.Count; double[][] input_arr = new double[value_count][]; double[][] output_arr = new double[value_count][]; for (int j = 0; j < value_count; j++) { input_arr[j] = new double[21]; output_arr[j] = new double[1]; } // Call Read before accessing data. for (int i = 0; i < value_count; i++) { DataRow row = dt.Rows[i]; input_arr[i][0] = Double.Parse(row["i1"].ToString()); input_arr[i][1] = Double.Parse(row["i2"].ToString()); input_arr[i][2] = Double.Parse(row["i3"].ToString()); input_arr[i][3] = Double.Parse(row["i4"].ToString()); input_arr[i][4] = Double.Parse(row["i5"].ToString()); input_arr[i][5] = Double.Parse(row["i6"].ToString()); input_arr[i][6] = Double.Parse(row["i7"].ToString()); input_arr[i][7] = Double.Parse(row["i8"].ToString()); input_arr[i][8] = Double.Parse(row["i9"].ToString()); input_arr[i][9] = Double.Parse(row["i10"].ToString()); input_arr[i][10] = Double.Parse(row["i11"].ToString()); input_arr[i][11] = Double.Parse(row["i12"].ToString()); input_arr[i][12] = Double.Parse(row["i13"].ToString()); input_arr[i][13] = Double.Parse(row["i14"].ToString()); input_arr[i][14] = Double.Parse(row["i15"].ToString()); input_arr[i][15] = Double.Parse(row["i16"].ToString()); input_arr[i][16] = Double.Parse(row["i17"].ToString()); input_arr[i][17] = Double.Parse(row["i18"].ToString()); input_arr[i][18] = Double.Parse(row["i20"].ToString()); input_arr[i][19] = Double.Parse(row["i21"].ToString()); input_arr[i][20] = Double.Parse(row["i23"].ToString()); //output_arr[i][0] = Double.Parse(row["Difference"].ToString()); if (Double.Parse(row["Difference"].ToString()) > 0) output_arr[i][0] = 1; else if (Double.Parse(row["Difference"].ToString()) == 0) output_arr[i][0] = 0; else output_arr[i][0] = -1; } int[] neurons = new int[5] { 21, 21, 21, 21, 1 }; AForge.Neuro.BipolarSigmoidFunction sigmoiddFunction = new AForge.Neuro.BipolarSigmoidFunction(); //AForge.Neuro.SigmoidFunction sigmoiddFunction = new AForge.Neuro.SigmoidFunction(2); AForge.Neuro.ActivationNetwork network = new AForge.Neuro.ActivationNetwork(sigmoiddFunction, 21, 1); AForge.Neuro.ActivationNetwork network1 = new AForge.Neuro.ActivationNetwork(sigmoiddFunction, 21, 1);//neurons); //AForge.Neuro.Learning.DeltaRuleLearning teacher = new AForge.Neuro.Learning.DeltaRuleLearning(network) { LearningRate = 1}; AForge.Neuro.Learning.EvolutionaryLearning teacher = new AForge.Neuro.Learning.EvolutionaryLearning(network, 1000); // AForge.Neuro.Learning.ResilientBackpropagationLearning teacher = new AForge.Neuro.Learning.ResilientBackpropagationLearning(network) { LearningRate = 1 }; //AForge.Neuro.Learning.PerceptronLearning teacherP = new PerceptronLearning(network1){ LearningRate =1}; //AForge.Neuro.Learning.BackPropagationLearning teacher = new AForge.Neuro.Learning.BackPropagationLearning(network) { LearningRate =1, Momentum = .2 }; // loop bool noNeedToStop = false; double error = 0; //double error1 = 0; double lastError = 0; double learningRate = 1; int k = 0; sigmoiddFunction.Alpha = 0.01; while (!noNeedToStop) { // run epoch of learning procedure //error = teacher.RunEpoch(input_arr, output_arr); //error = teacherP.RunEpoch(input_arr,output_arr); error = teacher.RunEpoch(input_arr, output_arr); double temp = Math.Abs(lastError - error); if (error < 30) noNeedToStop = true; else if (temp < 0.0000001) { lastError = error; k++; if (k > 1000) { network.Randomize(); k = 0; } learningRate /= 2; //if (learningRate < 0.001) // { // learningRate = 0.001; //network.Randomize(); // noNeedToStop = true; // } } else lastError = error; // teacherP.LearningRate = learningRate; } network.Save(@"E:\\neural"); } catch (Exception ex) { message += " Exception: " + ex.Message; } finally { connection.Close(); } } }
//Train the network many times, with different initial values, evaluate them on the cross valiadtion data and select the best one private static ActivationNetwork trainNetworksCompeteOnCrossValidation(ActivationNetwork neuralNet, ISupervisedLearning teacher, double[][] input, double[][] output, double[][] crossValidationInput, char[] crossValidationDataLabels) { DefaultLog.Info("Training {0} neural networks & picking the one that performs best on the cross-validation data . . .", NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION); MemoryStream bestNetworkStream = new MemoryStream(); uint bestNetworkNumMisclassified = uint.MaxValue; for (int i = 0; i < NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION; i++) { DefaultLog.Info("Training network {0}/{1}", (i + 1), NUM_NETWORKS_TO_TRAIN_FOR_CROSS_VALIDATION_COMPETITION); //Train a new network neuralNet.Randomize(); //Reset the weights to random values trainNetwork(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Compare this new networks performance to our current best network NeuralNetworkEvaluator evaluator = new NeuralNetworkEvaluator(neuralNet); evaluator.Evaluate(crossValidationInput, crossValidationDataLabels); uint numMisclassified = evaluator.ConfusionMatrix.NumMisclassifications; if (numMisclassified < bestNetworkNumMisclassified) { //This network performed better than out current best network, make this the new best //Clear the Memory Stream storing the current best network bestNetworkStream.SetLength(0); //Save the network & update the best numMisclassified neuralNet.Save(bestNetworkStream); bestNetworkNumMisclassified = numMisclassified; } } DefaultLog.Info("Trained all networks and selected the best one"); //Load up the network that performed best bestNetworkStream.Position = 0; //Read from the start of the stream ActivationNetwork bestNetwork = ActivationNetwork.Load(bestNetworkStream) as ActivationNetwork; return bestNetwork; }
private static NeuralNetworkEvaluator evaluateSingleLayerActivationNetworkWithSigmoidFunctionBackPropagationLearning( double[][] input, double[][] output, double[][] crossValidationInput, char[] crossValidationDataLabels, double[][] evaluationInput, char[] evaluationDataLabels, double learningRate, string networkName) { //Create the neural Network BipolarSigmoidFunction sigmoidFunction = new BipolarSigmoidFunction(2.0f); ActivationNetwork neuralNet = new ActivationNetwork(sigmoidFunction, input[0].Length, ClassifierHelpers.NUM_CHAR_CLASSES); //Randomise the networks initial weights neuralNet.Randomize(); //Create teacher that the network will use to learn the data (Back Propogation Learning technique used here) BackPropagationLearning teacher = new BackPropagationLearning(neuralNet); teacher.LearningRate = LEARNING_RATE; //Train the Network //trainNetwork(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Train multiple networks, pick the one that performs best on the Cross-Validation data neuralNet = trainNetworksCompeteOnCrossValidation(neuralNet, teacher, input, output, crossValidationInput, crossValidationDataLabels); //Evaluate the network returned on the cross-validation data so it can be compared to the current best NeuralNetworkEvaluator crossValEvaluator = new NeuralNetworkEvaluator(neuralNet); crossValEvaluator.Evaluate(crossValidationInput, crossValidationDataLabels); //See if this network is better than the current best network of it's type //Try and load a previous network of this type string previousNetworkPath = Program.NEURAL_NETWORKS_PATH + networkName + Program.NEURAL_NETWORK_FILE_EXTENSION; string networkCMPath = Program.NEURAL_NETWORKS_PATH + networkName + ".csv"; bool newBest = false; ActivationNetwork bestNetwork = neuralNet; if(File.Exists(previousNetworkPath)) { //Load the previous network & evaluate it ActivationNetwork previous = ActivationNetwork.Load(previousNetworkPath) as ActivationNetwork; NeuralNetworkEvaluator prevCrossValEval = new NeuralNetworkEvaluator(previous); prevCrossValEval.Evaluate(crossValidationInput, crossValidationDataLabels); //If this network is better than the previous best, write it out as the new best if(prevCrossValEval.ConfusionMatrix.NumMisclassifications > crossValEvaluator.ConfusionMatrix.NumMisclassifications) { DefaultLog.Info("New best cross-validation score for network \"{0}\". Previous was {1}/{2}, new best is {3}/{2}", networkName, prevCrossValEval.ConfusionMatrix.NumMisclassifications, prevCrossValEval.ConfusionMatrix.TotalClassifications, crossValEvaluator.ConfusionMatrix.NumMisclassifications); //Delete the old files File.Delete(previousNetworkPath); File.Delete(networkCMPath); newBest = true; } else //The previous network is still the best { DefaultLog.Info("Existing \"{0}\" network performed better than new one. New network scored {1}/{2}, existing scored {3}/{2}", networkName, crossValEvaluator.ConfusionMatrix.NumMisclassifications, crossValEvaluator.ConfusionMatrix.TotalClassifications, prevCrossValEval.ConfusionMatrix.NumMisclassifications); bestNetwork = previous; } } else //Otherwise there isn't a previous best { DefaultLog.Info("No previous best record for network \"{0}\" . . .", networkName); newBest = true; } //Evaluate the best system on the evaluation data NeuralNetworkEvaluator evaluator = new NeuralNetworkEvaluator(bestNetwork); evaluator.Evaluate(evaluationInput, evaluationDataLabels); //If there is a new best to write out if(newBest) { DefaultLog.Info("Writing out net best network of type\"{0}\"", networkName); neuralNet.Save(previousNetworkPath); //Write out the Confusion Matrix for the evaluation data, not cross-validation evaluator.ConfusionMatrix.WriteToCsv(networkCMPath); DefaultLog.Info("Finished writing out network \"{0}\"", networkName); } return evaluator; }