private void PropogateError(TrainingData ts, int batchSize) { // Initialize the error from the future List <Vector <double> > futureErrorCache = new List <Vector <double> >(); for (var i = 0; i < Layers.Count; i++) { futureErrorCache.Add(new DenseVector(Layers[i].OutputDimension)); // Only store the error relevant to that layer } // Step backwards through the memory for (var t = OutputCache.Count - 1; t > 0; t--) { // Error on the output layer from the training data Vector <double> error = CostFunc.Derivative(ts[t - 1].Response, OutputCache[t].Last()); // Step backwards through the net. for (var i = Layers.Count - 1; i >= 0; i--) { error += futureErrorCache[i]; Vector <double> lastInput = Concatenate(Layers[i].InputDimension, OutputCache[t - 1][i + 1], OutputCache[t][i]);// [t-1][i+1] is the output of the current layer at a previous time Vector <double> jointInputError = Layers[i].PropogateError(error, LearningRate / batchSize, lastInput); Vector <double> pastStateError; // If this is the first layer, error would be the error on the training input, so we can just ignore it. Split(jointInputError, Layers[i].OutputDimension, out pastStateError, out error, OutputCache[t][i]?.Count ?? 1); futureErrorCache[i] = pastStateError; // Store the most recent error from the future. } } }
static void LoadXOR(out HashSet <TrainingData> data, int length, int nData) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, 1); for (var i = 0; i < nData; i++) { TrainingData td = new TrainingData(1, 1); td[0] = new TrainingData.TrainingPair(DenseVector.Create(1, rand.Sample() > 0.5 ? 1 : 0), DenseVector.Create(1, 0.5)); for (var j = 1; j < length; j++) { TrainingData.TrainingPair p = new TrainingData.TrainingPair(); p.Data = DenseVector.Create(1, rand.Sample() > 0.5 ? 1 : 0); if (td[j - 1].Data[0] == p.Data[0]) { p.Response = DenseVector.Create(1, 0); } else { p.Response = DenseVector.Create(1, 1); } td[j] = p; } data.Add(td); } }
static void TrainAutoEncoder() { MenuModel.LoadTrainingSet("newMnist_test", "newMnist_test.csv"); MenuModel.LoadTestSet("newMnist_test", "newMnist_test.csv"); AutoEncoder net = new AutoEncoder(activationGoal: 0.2, sparsityWeight: 30); net.SetParameters(learningRate: 1, costFunc: CostFunction.MeanSquare); net.Add(new Layer(784, 100, ActivationFunction.Sigmoid)); MenuModel.CurrentNet = net; MenuModel.SelectTest("newMnist_test"); MenuModel.SelectTrain("newMnist_test"); MenuModel.TestSampleFreq = 10; MenuModel.AddTestMonitor(PrintToScreen, 10101010); Console.WriteLine("Done Loading"); MenuModel.TrainNet(10, 100); using (StreamWriter fout = new StreamWriter("autoEncoded.txt")) { Matrix <double> w = (net[0] as Layer).Weights; for (var i = 0; i < w.ColumnCount; i++) { StringBuilder line = new StringBuilder(); for (var j = 0; j < w.RowCount; j++) { line.Append(w[j, i] + " "); } line.Remove(line.Length - 1, 1); fout.WriteLine(line.ToString()); } } using (StreamWriter fout = new StreamWriter("replication.txt")) { TrainingData td = MenuModel.SelectedTest.First(); Vector <double> output = net.Process(td.Data); StringBuilder line = new StringBuilder(); for (var j = 0; j < output.Count; j++) { line.Append(output[j] + " "); } line.Remove(line.Length - 1, 1); fout.WriteLine(line.ToString()); line = new StringBuilder(); for (var j = 0; j < td.Data.Count; j++) { line.Append(td.Data[j] + " "); } line.Remove(line.Length - 1, 1); fout.WriteLine(line.ToString()); } }
public TrainingData SubSequence(int start, int end) { int dir = Math.Sign(end - start); TrainingData td = new TrainingData(); for (var i = start; i < end; i += dir) { td.data.Add(data[i]); td.response.Add(response[i]); } return(td); }
internal double TestOne(TrainingData ts) { double err = 0; Vector <double> output; WipeMemory(); foreach (TrainingData.TrainingPair pair in ts) { output = Process(pair.Data); err += (output - pair.Response).L1Norm(); PreviousResponse = pair.Response; } return(err / ts.Count); }
static void LoadSin(out HashSet <TrainingData> data, int n) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, Math.PI); for (var i = 0; i < n; i++) { double x = rand.Sample(); TrainingData td = new TrainingData(1, 1); double scale = Math.PI; td.Data[0] = x / scale; td.Response = DenseVector.Create(1, Math.Sin(x)); data.Add(td); } }
static void LoadSinSeq(out HashSet <TrainingData> data, double dt, int seqLength, int nData) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, 2 * Math.PI); for (var i = 0; i < nData; i++) { double theta = rand.Sample(); TrainingData td = new TrainingData(0, 1); for (var j = 0; j < seqLength; j++) { TrainingData.TrainingPair pair = new TrainingData.TrainingPair(); theta += dt; pair.Response = DenseVector.Create(1, Math.Sin(theta)); td[j] = pair; } data.Add(td); } }
static void LoadMultiplication(out HashSet <TrainingData> data, int digits, int n) { data = new HashSet <TrainingData>(); DiscreteUniform rand = new DiscreteUniform(1, (int)Math.Pow(10, digits) - 1); for (var i = 0; i < n; i++) { double x = rand.Sample(); double y = rand.Sample(); double z = x * y; TrainingData td = new TrainingData(2, 1); double scale = Math.Pow(10, digits); td.Data[0] = x / scale; td.Data[1] = y / scale; td.Response = DenseVector.Create(1, z / Math.Pow(10, 2 * digits)); data.Add(td); } }
private static bool LoadSet(string fp, out HashSet <TrainingData> data) { data = new HashSet <TrainingData>(); bool success = true; int inputDim = -1; int outputDim = -1; try { using (StreamReader trainingIn = new StreamReader(fp)) { while (!trainingIn.EndOfStream) { string[] line = trainingIn.ReadLine().Split(';'); string[] inputPiece = line[0].Split(','); string[] outputPiece = line[1].Split(','); if ((inputDim != -1 && inputPiece.Length != inputDim) || (outputDim != -1 && outputPiece.Length != outputDim)) { throw new TrainingDataException("Failed to load data set. Inconsistent lines found."); } TrainingData td = new TrainingData(inputPiece.Length, outputPiece.Length); for (var i = 0; i < inputPiece.Length; i++) { td.Data[i] = double.Parse(inputPiece[i]); } for (var i = 0; i < outputPiece.Length; i++) { td.Response[i] = double.Parse(outputPiece[i]); } data.Add(td); } } } catch (Exception ex) when(IsFileException(ex)) { success = false; } return(success); }