public static double Pass(double learningRate, INetwork network, List <DataSequence> sequences, bool applyTraining, ILoss lossTraining, ILoss lossReporting) { double numerLoss = 0; double denomLoss = 0; RnnConfig rnnConfig = (RnnConfig)Serializer.Deserialize(NetworkBuilder.Config.RnnConfigFile); foreach (DataSequence seq in sequences) { network.ResetState(); Graph g = new Graph(applyTraining); network.GenerateDropout(applyTraining); for (int i = 0; i < seq.Steps.Count; ++i) { DataStep step = seq.Steps[i]; // Generate in dropout bool[] dropped = new bool[step.Input.W.Length]; for (int col = 0; col < dropped.Length; ++col) { dropped[col] = Math.Abs(rnnConfig.GetTransformed(0, i, col, step.Input.W[col])) < 0.0000001; } Matrix output = network.Activate(step.Input, g, dropped); if (step.TargetOutput != null) { double loss = lossReporting.Measure(output, step.TargetOutput); if (Double.IsNaN(loss) || Double.IsInfinity(loss)) { return(loss); } numerLoss += loss; denomLoss++; if (applyTraining) { lossTraining.Backward(output, step.TargetOutput); } } if (i % 10 == 0 && applyTraining) { g.Backward(); //backprop dw values UpdateModelParams(network, learningRate); //update params g = new Graph(applyTraining); network.GenerateDropout(applyTraining); } } } return(numerLoss / denomLoss); }
public void CreateRnnTable() { // Create config RnnConfig = new RnnConfig(); // Load data double[][] data = LoadFullData(DataManager.Config.FullDataFile); List <double[]> newData = new List <double[]>(); for (int i = 0; i < 5500; ++i) { newData.Add(new double[] { Math.Sin(i / 50.0) }); } //data = newData.ToArray(); // Fill NaNs FillNaNs(data); // Create input/output CreateInputOutput(data, DataManager.Config.Predict); // Split to train, valid, test SplitData(); // Export YY a Z data (correct output and first order prediction) ExportYZData(); // Normalize data NormalizeData(TrainX, ValidX, TestX, 0, DataManager.Config.NormalizationWindowSize); NormalizeData(TrainY, ValidY, TestY, 1, DataManager.Config.NormalizationWindowSize); //// PCA transformation //PcaTransformation(0); //PcaTransformation(1); // Normalize again //NormalizeData(TrainX, ValidX, TestX, 888); //NormalizeData(TrainY, ValidY, TestY, 1); // Clip to high values ClipData(10); }
public void Predict() { if (!Config.Reload && File.Exists(Config.RnnPredictedXFile) && File.Exists(Config.RnnPredictedYFile)) { return; } Random rng = new Random(Config.Random.Next()); CustomDataSet data = new CustomDataSet(Config); RnnConfig rnnConfig = (RnnConfig)Serializer.Deserialize(Config.RnnConfigFile); int inputDimension = data.Training[0].Steps[0].Input.Rows; int hiddenDimension = 30; int outputDimension = data.Training[0].Steps[0].TargetOutput.Rows; int hiddenLayers = 1; double learningRate = 0.01; double initParamsStdDev = 0.08; double dropout = 0.5; double inDropout = 0.8; INetwork nn = NetworkBuilder.MakeLstm(inputDimension, hiddenDimension, hiddenLayers, outputDimension, new LinearUnit(), initParamsStdDev, rng, dropout, inDropout, Config); //nn = NetworkBuilder.MakeFeedForward(inputDimension, // hiddenDimension, // hiddenLayers, // outputDimension, // new SigmoidUnit(), // new LinearUnit(), // initParamsStdDev, rng, dropout, inDropout, Config); int reportEveryNthEpoch = 10; int trainingEpochs = 100; Trainer.train <NeuralNetwork>(trainingEpochs, learningRate, nn, data, reportEveryNthEpoch, rng); StreamWriter predictedXFile = new StreamWriter(Config.RnnPredictedXFile); StreamWriter predictedYFile = new StreamWriter(Config.RnnPredictedYFile); for (int i = 0; i < data.Testing.First().Steps.Count; ++i) { DataStep ds = data.Testing.First().Steps[i]; Graph g = new Graph(false); // Generate in dropout bool[] dropped = new bool[ds.Input.W.Length]; for (int col = 0; col < dropped.Length; ++col) { dropped[col] = Math.Abs(rnnConfig.GetTransformed(0, i, col, ds.Input.W[col])) < 0.0000001; } Matrix input = new Matrix(ds.Input.W); Matrix output = nn.Activate(input, g, dropped); // Write into file string line1 = ""; string line2 = ""; foreach (double d in output.W) { line1 += d + ";"; } foreach (double d in ds.TargetOutput.W) { line2 += d + ";"; } predictedXFile.WriteLine(line1.Substring(0, line1.Length - 1)); predictedYFile.WriteLine(line2.Substring(0, line2.Length - 1)); } predictedXFile.Close(); predictedYFile.Close(); }
public void DecodePrediction() { if (!Config.Reload && File.Exists(Config.FullPredictedXFile) && File.Exists(Config.FullPredictedYFile)) { return; } string prefix = "Decoding predictions..."; Utils.DrawMessage(prefix, Utils.CreateProgressBar(Utils.ProgressBarLength, 0), ConsoleColor.Gray); // Load config RnnConfig rnnConfig = (RnnConfig)Serializer.Deserialize(Config.RnnConfigFile); // Source files string[] sourceFiles = new string[] { Config.RnnPredictedXFile, Config.RnnPredictedYFile }; // Destination files string[] destinationFiles = new string[] { Config.FullPredictedXFile, Config.FullPredictedYFile }; // Decode files for (int i = 0; i < sourceFiles.Length; ++i) { // Load source file double[][] data = File.ReadAllLines(sourceFiles[i]).Select(x => x.Split(new char[] { ';' }).Select(y => double.Parse(y)).ToArray()).ToArray(); // Remove clipped values if (i == 1) { for (int k = 0; k < data.Length; ++k) { for (int m = 0; m < data[k].Length; ++m) { double?clipped = rnnConfig.GetClippedValue(k, m); if (clipped != null) { data[k][m] = (double)clipped; } } } } //// Unnormalize after PCA //for (int k = 0; k < data.Length; ++k) // for (int m = 0; m < data[k].Length; ++m) // data[k][m] = rnnConfig.GetTransformed(1, k, m, data[k][m]); //// Decode from PCA //data = data.Dot(rnnConfig.PcaCoefficients[1]); // Unnormalize for (int k = 0; k < data.Length; ++k) { for (int m = 0; m < data[k].Length; ++m) { data[k][m] = rnnConfig.GetTransformed(0, k, m, data[k][m]); } } // Flush File.WriteAllLines(destinationFiles[i], data.Select(x => string.Join(";", x))); Utils.DrawMessage(prefix, Utils.CreateProgressBar(Utils.ProgressBarLength, (double)(i + 1) / sourceFiles.Length * 100.0), ConsoleColor.Gray); } Utils.DrawMessage(prefix, Utils.CreateProgressBar(Utils.ProgressBarLength, 100), ConsoleColor.Green); Console.WriteLine(); }
public static void ExportRnnConfig(RnnConfig rnnConfig, string filename) { Serializer.Serialize(filename, rnnConfig); }