public void Apply(NeuralTrainSet set, float learningRate = 0.05f) { for (int i = 0; i < Layers.Length; i++) { Layers[i].Apply(set.Layers[i], learningRate); } }
public NeuralTrainSet(NeuralTrainSet a, NeuralTrainSet b) { if (a.NeuralNetwork != b.NeuralNetwork) { throw new Exception("Cannot blend sets that belong to a different network."); } NeuralNetwork = a.NeuralNetwork; int layerCount = a.Layers.Length; Layers = new NeuralTrainLayer[layerCount]; for (int i = 0; i < layerCount; i++) { Layers[i] = new NeuralTrainLayer(a.Layers[i], b.Layers[i]); } }
public void Apply(IEnumerable <NeuralTrainSet> trainingSets, float learningRate = 0.05f) { NeuralTrainSet combination = null; foreach (NeuralTrainSet set in trainingSets) { if (combination == null) { combination = set; } else { combination = new NeuralTrainSet(combination, set); } } Apply(combination, learningRate); }
public void Train(Sample[] samples, int miniBatchCount, bool parallel, bool shuffle = true) { if (shuffle) { samples = General.Shuffle(samples); } int end = samples.Length; NeuralTrainSet[] miniBatch; for (int i = 0; i < end;) { if (StopTraining) { return; } int localEnd = i + miniBatchCount; if (localEnd > end) { localEnd = end; } int size = localEnd - i; miniBatch = new NeuralTrainSet[size]; if (parallel) { Parallel.For(0, size, j => { miniBatch[j] = GetNeuralTrainSet(samples[j + i]); }); } else { for (int j = 0; j < size; j++) { miniBatch[j] = GetNeuralTrainSet(samples[j + i]); } } Apply(miniBatch, LearningRate); i = localEnd; } }