public void Test() { Log("Start"); var windowSize = 10; var predictionSize = 2; var series = Enumerable.Range(0, 6000).Select(e => (decimal)(e % 46 + 1)).ToList(); var settings = ConnectionSettings.Create(); settings.SetHeartbeatTimeout(TimeSpan.FromSeconds(60)); using (var connection = EventStoreConnection.Create(settings, new IPEndPoint(IPAddress.Loopback, 1113))) { connection.ConnectAsync().Wait(); var x = connection.ReadStreamEventsBackwardAsync("BotCoin-001001", StreamPosition.End, 4096, false).Result; var events = x.Events.Where(e => e.Event.EventType == typeof(BotCoinEvents.Events.TickEvent).Name).Reverse(); series = events.Select( e => Newtonsoft.Json.JsonConvert.DeserializeObject <BotCoinEvents.Events.TickEvent>( Encoding.UTF8.GetString(e.Event.Data) ).last_price ).ToList(); } var orgSeries = series.ToList(); series.Clear(); for (var i = 1; i < orgSeries.Count; i++) { series.Add(orgSeries[i] / orgSeries[i - 1]); } var normalizer = new BotCoinShared.Normalizer(series); var normalizedSeries = normalizer.Normalize(series).ToList(); var network = new ActivationNetwork( new BipolarSigmoidFunction(2.0), windowSize, windowSize * 2, predictionSize ); var teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); //teacher.Reset(0.0125); var sampleCount = normalizedSeries.Count() - predictionSize - windowSize; var input = new double[sampleCount][]; var output = new double[sampleCount][]; for (var pos = 0; pos < sampleCount; pos++) { if (pos > 0 && pos % 10000 == 0) { Log($"Loaded {pos} of {sampleCount} samples"); } input[pos] = normalizedSeries.Skip(pos).Take(windowSize).Select(e => (double)e).ToArray(); output[pos] = normalizedSeries.Skip(pos + windowSize).Take(predictionSize).Select(e => (double)e).ToArray(); } var lastBatch = 100000m; var batchSize = 10; var absoluteMax = int.MaxValue; var ema = 200m; var lastEma = 200m; for (var iteration = 1; iteration <= absoluteMax; iteration++) { Log($"{iteration:00000} - ", false); teacher.RunEpoch(input, output); var learningError = 0m; var predictionError = 0m; var predicted = ""; var actual = ""; for (var pos = 0; pos < sampleCount; pos++) { var inputs = input[pos]; var expected = series.Skip(pos + windowSize).Take(predictionSize).ToArray(); //var expected = orgSeries.Skip(pos + windowSize + 1).Take(predictionSize).ToArray(); var calculatedRaw = network.Compute(inputs); var calculated = normalizer.Denormalize(calculatedRaw.Select(e => (decimal)e)).ToArray(); //var cv = orgSeries[pos + windowSize]; //for(var i = 0; i < predictionSize; i++) //{ // cv *= calculated[i]; // calculated[i] = cv; //} var error = Enumerable.Range(0, predictionSize).Select(e => Math.Abs(1 - (calculated[e] / expected[e]))).Average() * 100; if (pos >= sampleCount - 1) { predictionError += error; predicted = string.Join(",", calculated.Select(e => e.ToString("#0.000"))); actual = string.Join(",", expected.Select(e => e.ToString("#0.000")));; } else { learningError += error; } } learningError /= (sampleCount - 1); ema = (ema * (batchSize - 1) + predictionError) / batchSize; Log($"LE {learningError:#0.0000}%, Prediction {predicted:#0} ({actual:#0}) {predictionError:#0.0000}%, EMA {ema:#0.0000}%"); if (iteration > batchSize && ema > lastEma) { Log("Prediction quality not improving, breaking..."); break; } lastEma = ema; } var o = network; }
public void startTrain() { int class_count = 0; if ("NN".Equals(Constants.NN_SVM_SURF)) { double sigmoidAlphaValue = 1.0; double learningRate = 100; int max_epoch = 50; double min_err = 0.000000001; List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count); int samples = trainingFiles.Count; // prepare learning data double[][] input = new double[samples][]; Dictionary<int, double[][]> outputs = new Dictionary<int, double[][]>(); for (int i = 0; i < samples; i++) { int currentImageClass = Int32.Parse(trainingFiles[i].Directory.Name); Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false); int com_x = 0, com_y = 0; ByteTools.imageCoM(bmp, ref com_x, ref com_y); input[i] = new double[numOfinputs]; List<Ipoint> featureList = fillFeatures(bmp, com_x, com_y, input[i]); if (!outputs.ContainsKey(currentImageClass)) { outputs.Add(currentImageClass, new double[samples][]); for (int j = 0; j < samples; j++) { outputs[currentImageClass][j] = new double[] { 0d }; } } outputs[currentImageClass][i][0] = 1d; } Dictionary<int, ActivationNetwork> networks = new Dictionary<int, ActivationNetwork>(); int[] availSigns = outputs.Keys.ToArray(); foreach (int sign in availSigns) { ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, new int[] { Constants.NUM_OF_NN_HIDDEN_LAYER_NODES, 1 }); Accord.Neuro.Learning.LevenbergMarquardtLearning teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); teacher.LearningRate = learningRate; int epoch = 0; double error; while (true) { // run epoch of learning procedure error = teacher.RunEpoch(input, outputs[sign]) / samples; Console.WriteLine("Epoch:" + epoch + " Error:" + error); if (epoch++ > max_epoch || error < min_err) break; } networks.Add(sign, network); network.Save(Constants.base_folder + "nn_12x12_" + sign + ".dat"); Logger.log("Error: " + error + " Epoch:" + epoch); } } else if ("NN_SURF".Equals(Constants.NN_SVM_SURF) || "NN_12SIMPLE".Equals(Constants.NN_SVM_SURF)) { double sigmoidAlphaValue = 1.0; if ("NN_SURF".Equals(Constants.NN_SVM_SURF)) { if ("triangle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 6.0; if ("circle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 6.0; } else if ("NN_12SIMPLE".Equals(Constants.NN_SVM_SURF)) { if ("triangle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 1.0; if ("circle".Equals(Constants.CIRCLE_TRIANGLE)) sigmoidAlphaValue = 1.0; } double learningRate = 1.00; int max_epoch = 3000; double min_err = 0.000001; ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, Constants.NUM_OF_SIGN_TYPES); DeltaRuleLearning teacher = new DeltaRuleLearning(network); teacher.LearningRate = learningRate; /* ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue), numOfinputs, new int[] { (numOfinputs + Constants.NUM_OF_SIGN_TYPES)/2, Constants.NUM_OF_SIGN_TYPES }); BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = learningRate; //teacher.Momentum = momentum; */ List<FileInfo> trainingFiles = new List<FileInfo>(1000); DirectoryInfo di = new DirectoryInfo(Constants.base_folder + "train_" + Constants.CIRCLE_TRIANGLE); DirectoryInfo[] dirs = di.GetDirectories("*"); foreach (DirectoryInfo dir in dirs) { int i = 0; FileInfo[] files = dir.GetFiles("*.bmp"); foreach (FileInfo fi in files) { trainingFiles.Add(fi); if (i++ > Constants.MAX_TRAIN_SAMPLE) break; } } // List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count); int samples = trainingFiles.Count; // prepare learning data double[][] input = new double[samples][]; double[][] output = new double[samples][]; for (int i = 0; i < samples; i++) { Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false); int com_x = 0, com_y = 0; ByteTools.imageCoM(bmp, ref com_x, ref com_y); input[i] = new double[numOfinputs]; output[i] = new double[Constants.NUM_OF_SIGN_TYPES]; bmp.Tag = trainingFiles[i].Directory.Name + "_" + trainingFiles[i].Name; fillFeatures_SURF(bmp, com_x, com_y, input[i]); output[i][Int32.Parse(trainingFiles[i].Directory.Name) - 1] = 1d; } int epoch = 0; double error = 0; while (true) { // run epoch of learning procedure error = teacher.RunEpoch(input, output) / samples; Console.WriteLine("Epoch:" + epoch + " Error:" + error); if (epoch++ > max_epoch || error < min_err) break; } network.Save(Constants.base_folder + Constants.NN_SVM_SURF + "_" + Constants.CIRCLE_TRIANGLE + ".dat"); Logger.log("NNTrain [" + error + "]: " + Constants.NN_SVM_SURF + ", " + Constants.CIRCLE_TRIANGLE + ", " + learningRate + ", " + sigmoidAlphaValue); } }