static void LoadXOR(out HashSet <TrainingData> data, int length, int nData) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, 1); for (var i = 0; i < nData; i++) { TrainingData td = new TrainingData(1, 1); td[0] = new TrainingData.TrainingPair(DenseVector.Create(1, rand.Sample() > 0.5 ? 1 : 0), DenseVector.Create(1, 0.5)); for (var j = 1; j < length; j++) { TrainingData.TrainingPair p = new TrainingData.TrainingPair(); p.Data = DenseVector.Create(1, rand.Sample() > 0.5 ? 1 : 0); if (td[j - 1].Data[0] == p.Data[0]) { p.Response = DenseVector.Create(1, 0); } else { p.Response = DenseVector.Create(1, 1); } td[j] = p; } data.Add(td); } }
internal override void Learn(HashSet <TrainingData> trainSet, int batchSize = 1) { batchSize = Math.Min(batchSize, trainSet.Count); Vector <double> output; double cost = 0; int nBatch = 0; foreach (TrainingData trainSeq in trainSet) { // Start over for each different training sequence provided. WipeMemory(); PreviousResponse = null; for (var i = 0; i < trainSeq.Count; i++) { // Process the pair TrainingData.TrainingPair pair = trainSeq[i]; output = Process(pair.Data); cost += CostFunc.Of(trainSeq[i].Response, output) / (batchSize * MaxMemory); // If we have completely overwriten our short term memory, then // update the weights based on how we performed this time. if (i > 0 && i % MaxMemory == 0) { PropogateError(trainSeq.SubSequence(Math.Min(i - MaxMemory, 0), i), batchSize); } // Count batches by number of error propogations if (i % (batchSize * MaxMemory) == 0) { nBatch++; LastCost = cost; Hook?.Invoke(nBatch, this); ApplyError(); cost = 0; } // Keep the last... uhh. this is a PIPI (Parallel Implementation Prone to Inconsistency) // See this.Process if (ForceOutput) { PreviousResponse = pair.Response; } else { PreviousResponse = output; } if (Abort) { Abort = false; return; } } } }
static void LoadSinSeq(out HashSet <TrainingData> data, double dt, int seqLength, int nData) { data = new HashSet <TrainingData>(); ContinuousUniform rand = new ContinuousUniform(0, 2 * Math.PI); for (var i = 0; i < nData; i++) { double theta = rand.Sample(); TrainingData td = new TrainingData(0, 1); for (var j = 0; j < seqLength; j++) { TrainingData.TrainingPair pair = new TrainingData.TrainingPair(); theta += dt; pair.Response = DenseVector.Create(1, Math.Sin(theta)); td[j] = pair; } data.Add(td); } }