/// <summary> /// Leave-one-out optimization /// </summary> public void Train_LOO(double hMin, double hMax, double step) { var hOpt = double.MaxValue; var minErrCnt = int.MaxValue; for (double h = hMin; h <= hMax; h += step) { var errCnt = 0; m_H = h; for (int i = 0; i < TrainingSample.Count; i++) { var pData = TrainingSample.ElementAt(i); using (var mask = this.ApplySampleMask((p, c, idx) => idx != i)) { var cls = this.Classify(pData.Key); if (cls != pData.Value) { errCnt++; } } } if (errCnt < minErrCnt) { minErrCnt = errCnt; hOpt = h; } } m_H = hOpt; }
public void ThetasPropertyAfterTrainingReturnsTheSameThetasAsTrainerReturns() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(2); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new[] { 0.0 }), TrainingSample.Create(false, new[] { 1.0 }) }); const int FeaturesCount = 1; const double RegularizationParameter = 0.0; var classifier = new LogisticRegressionClassifier(FeaturesCount, RegularizationParameter); classifier.Train(dataSetMock.Object); var classifierTraining = new LogisticRegressionClassifierTraining(FeaturesCount, RegularizationParameter, dataSetMock.Object); var expectedThetas = classifierTraining.Train(); // act var thetas = classifier.Thetas; // assert Assert.IsTrue(expectedThetas.SequenceEqual(thetas)); }
/// <summary> /// A protected helper function used to train single learning sample /// </summary> /// <param name="trainingSample"> /// Training sample to use /// </param> /// <param name="currentIteration"> /// Current training epoch (Assumed to be positive and less than <c>trainingEpochs</c>) /// </param> /// <param name="trainingEpochs"> /// Number of training epochs (Assumed to be positive) /// </param> protected override void LearnSample(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { // No validation here int layerCount = layers.Count; // Set input vector inputLayer.SetInput(trainingSample.InputVector); for (int i = 0; i < layerCount; i++) { layers[i].Run(); } // Set Errors meanSquaredError += (outputLayer as ActivationLayer).SetErrors(trainingSample.OutputVector); // Backpropagate errors for (int i = layerCount; i > 0;) { ActivationLayer layer = layers[--i] as ActivationLayer; if (layer != null) { layer.EvaluateErrors(); } } // Optimize synapse weights and neuron bias values for (int i = 0; i < layerCount; i++) { layers[i].Learn(currentIteration, trainingEpochs); } }
public void TestMethod1() { // 创建输入层、隐层和输出层 var inputLayer = new LinearLayer(1); var hiddenLayer = new LinearLayer(5); var outputLayer = new LinearLayer(1); // 创建层之间的关联 new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete); new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete); // 创建神经网络 var network = new BackpropagationNetwork(inputLayer, outputLayer); //network.SetLearningRate(new LinearFunction(0.1, 0.6)); network.Initialize(); // 训练 var ran = new Random(); for (var i = 0; i < 100; i++) { var inputVector = new double[] { i }; var outputVector = new double[] { Math.PI *i }; var trainingSample = new TrainingSample(inputVector, outputVector); network.Learn(trainingSample, i, 100); } // 预测 var testInput = new double[] { 1 }; var testOutput = network.Run(testInput); Console.WriteLine(testOutput[0]); }
public void Score(List <NetworkState> states, TrainingSample sample) { var availableOnsets = sample.Frames.Where(frame => frame.IsOnset).ToList(); int totalSelection = availableOnsets.Count(); // tp + fn for (int i = 1; i < states.Count; i++) { NetworkState state = states[i]; double time = sample.Frames[i - 1].Frame.Start; Debug.Assert(state.Output.Length == 1); if (state.Output[0] < 0.25) { continue; // < state.Output[1]) continue; } TrainingFrame matchedOnset = availableOnsets.FirstOrDefault(onset => Math.Abs(onset.Frame.Start - time) < MatchingTolerance); if (matchedOnset != null) { availableOnsets.Remove(matchedOnset); truePositives++; } else { falsePositives++; } } falseNegatives += availableOnsets.Count; }
/// <summary> /// Brings all the ai list together into a training set to do some killer stuff. /// </summary> /// <returns>Compilation of a single training set.</returns> private TrainingSet CompileTrainingSet(List <CoastalRaidersFuedalResourceManager> rawMgxStats) { if (rawMgxStats.Count == 0) { Program.Logger.Error("There are currently now stats availible in the System to build a database."); Program.Logger.Error("Attemting to generate new entry...."); // Generate brand new AI entry in here to test the auto data collection capability. } TrainingSet tset = new TrainingSet(rawMgxStats[0].GetInputParams.Length * 2, rawMgxStats[0].GetOutputParams.Length * 2); for (int i = 0; i < rawMgxStats.Count; i += 2) { var player1 = rawMgxStats[i].GenerateAnnSample(); var player2 = rawMgxStats[i + 1].GenerateAnnSample(); var trainingSample = new TrainingSample( player1.InputVector.Concat(player2.InputVector).ToArray(), player1.OutputVector.Concat(player2.OutputVector).ToArray()); tset.Add(trainingSample); } return(tset); }
/// <summary> /// Estimated closeness of given point to given classes /// </summary> public override double CalculateClassScore(double[] obj, Class cls) { var score = 0.0D; var dim = DataDim; var useMin = UseKernelMinValue; var min = KernelMinValue; foreach (var pData in TrainingSample.Where(d => d.Value.Equals(cls))) { var data = pData.Key; var p = 0.0D; for (int i = 0; i < dim; i++) { var h = (m_Hs != null) ? m_Hs[i] : H; var r = (obj[i] - data[i]) / h; var v = Kernel.Value(r) / h; if (Math.Abs(v) < min && useMin) { v = min; } p += Math.Log(v); } score += p; } score += PriorProbs[cls.Value]; return(score); }
/// <summary> /// 一个受保护的帮助函数,用于训练单个学习样本 /// </summary> /// <param name="trainingSample"> /// 使用的训练样本 /// </param> /// <param name="currentIteration"> /// 当前训练时期(假设为正且小于<c> trainingEpochs </ c>) /// </param> /// <param name="trainingEpochs"> /// 训练时期数(假定为正) /// </param> protected override void LearnSample(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { // 这里没有验证 int layerCount = layers.Count; // 设置输入向量 inputLayer.SetInput(trainingSample.InputVector); for (int i = 0; i < layerCount; i++) { layers[i].Run(); } // 设置错误 meanSquaredError += (outputLayer as ActivationLayer).SetErrors(trainingSample.OutputVector); // 反向传播错误 for (int i = layerCount; i > 0;) { ActivationLayer layer = layers[--i] as ActivationLayer; if (layer != null) { layer.EvaluateErrors(); } } // 优化突触权重和神经元偏差值 for (int i = 0; i < layerCount; i++) { layers[i].Learn(currentIteration, trainingEpochs); } }
public void Train(TrainingSample sample, NetworkScorer scorer) { List <NetworkState> states = FeedForward(sample, scorer); trainingState.Prepare(momentum: Momentum); // Backpropogate through time. for (int i = states.Count - 1; i >= 1; i--) { // Get the corresponding frame. TrainingFrame frame = sample.Frames[i - 1]; NetworkState current = states[i]; NetworkState last = states[i - 1]; //bool annotatedOnsetNearby = IsAnnotatedOnsetNearby(sample, states, i); double onCorrect = frame.IsOnset ? DetectionValue : NoDetectionValue; //double offCorrect = frame.IsOnset ? 0.0 : 1.0; trainingState.Errors[0] = (current.Output[0] - onCorrect); // trainingState.Errors[1] = (current.Output[1] - offCorrect); trainingState.BackPropogate(last: last, now: current); } // Apply the required weight changes. trainingState.ApplyWeightChanges(LearningCoefficient); }
protected override void DoTrain() { base.DoTrain(); var classes = Classes.ToList(); for (int i = 0; i < classes.Count; i++) { var any = classes.Any(c => (int)c.Value == i); if (!any) { throw new MLException(string.Format("Class values must be enumerated from 0 to {0}", classes.Count)); } } m_ClassHist = new int[classes.Count]; m_PriorProbs = new double[classes.Count]; m_DataCount = TrainingSample.Count; m_DataDim = TrainingSample.GetDimension(); foreach (var pData in TrainingSample) { var cls = pData.Value; m_ClassHist[cls.Value] += 1; } foreach (var cls in classes) { var penalty = (ClassLosses == null) ? 1 : ClassLosses[cls.Value]; m_PriorProbs[cls.Value] = Math.Log(penalty * m_ClassHist[cls.Value] / (double)m_DataCount); } TrainImpl(); }
/// <summary> /// Estimated closeness of given point to given classes /// </summary> public override double CalculateClassScore(double[] obj, Class cls) { var dim = DataDim; var p = 0.0D; var y = 0.0D; var my = ClassHist[cls.Value]; var useMin = UseKernelMinValue; var min = KernelMinValue; for (int i = 0; i < dim; i++) { foreach (var pData in TrainingSample.Where(d => d.Value.Equals(cls))) { var data = pData.Key; var r = (obj[i] - pData.Key[i]) / H; p += Kernel.Value(r); } p = p / (H * my); if (Math.Abs(p) < min && useMin) { p = min; } y += Math.Log(p); p = 0.0D; } y += PriorProbs[cls.Value]; return(y); }
public static IDataSet <T, double> MapTwoFeaturesDataSetToDegree <T>(IDataSet <T, double> dataSet, int degree) where T : IEquatable <T> { return(InMemoryDataSet.Create((from data in dataSet.GetData() let x1 = data.Attributes[0] let x2 = data.Attributes[1] select TrainingSample.Create(data.Category, MapTwoFeaturesToDegrees(x1, x2, degree), data.Count)) .ToArray())); }
public void Train(int cpi, int cs = 5, int seed = -1) { if (seed == -1) { seed = Environment.TickCount; } Random r = new Random(seed); int ai = 0; TrainingSet ts = new TrainingSet(Inputs, W * H * 3); foreach (var i in InImgs) { double[] iv = new double[Inputs]; double[] ov = new double[W * H * 3]; int ic = 0; for (int y = 0; y < i.H; y++) { for (int x = 0; x < i.W; x++) { iv[ic] = GV(i.Dat[ic++]); iv[ic] = GV(i.Dat[ic++]); iv[ic] = GV(i.Dat[ic++]); } } Image oi = OutImgs[ai]; int vv = 0; for (int y = 0; y < i.H; y++) { for (int x = 0; x < i.W; x++) { //int l = (i.H * y * 3) + (x * 3); ov[vv] = GV(i.Dat[vv++]); ov[vv] = GV(i.Dat[vv++]); ov[vv] = GV(i.Dat[vv++]); } } ai++; TrainingSample s = new TrainingSample(iv, ov); for (int xc = 0; xc < cpi; xc++) { ts.Add(s); } } Ready = false; //for(int t = 0; t < cs; t++) //{ // net.BeginEpochEvent += TrainE; net.EndEpochEvent += EndE; net.Learn(ts, cs); net.StopLearning(); Console.WriteLine("Done training mind."); }
//Train agent's neural network for specific input and desired output private void trainNeural(double[] input, double output) { double[] tmp = { output }; //Create the training sample for the neural network TrainingSample sample = new TrainingSample(input, tmp); //Train nn network.Learn(sample, 0, currentEpoch); }
/// <summary> /// A protected helper function used to train single learning sample /// </summary> /// <param name="trainingSample"> /// Training sample to use /// </param> /// <param name="currentIteration"> /// Current training epoch (Assumed to be positive and less than <c>trainingEpochs</c>) /// </param> /// <param name="trainingEpochs"> /// Number of training epochs (Assumed to be positive) /// </param> protected override void LearnSample(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { // No validation here inputLayer.SetInput(trainingSample.InputVector); foreach (ILayer layer in layers) { layer.Run(); layer.Learn(currentIteration, trainingEpochs); } }
public static TrainingSet ConvertToUnSupervisedTrainingSet(IForecastingDataSets sets) { TrainingSet trainingset = new TrainingSet(sets.InputData[0].Length); for (int i = 0; i < sets.InputData.Length; i++) { TrainingSample ts = new TrainingSample(sets.InputData[i]); trainingset.Add(ts); } return(trainingset); }
private void openFileDialog1_FileOk(object sender, System.ComponentModel.CancelEventArgs e) { TrainingSample[] samples = null; string filename = openFileDialog.FileName; string extension = Path.GetExtension(filename); if (extension == ".xls" || extension == ".xlsx") { ExcelReader db = new ExcelReader(filename, true, false); TableSelectDialog t = new TableSelectDialog(db.GetWorksheetList()); if (t.ShowDialog(this) == DialogResult.OK) { var sampleTable = db.GetWorksheet(t.Selection); samples = new TrainingSample[sampleTable.Rows.Count]; for (int i = 0; i < samples.Length; i++) { samples[i] = new TrainingSample(); samples[i].Sequence = new double[(sampleTable.Columns.Count - 1) / 2][]; for (int j = 0; j < samples[i].Sequence.Length; j++) { samples[i].Sequence[j] = new double[] { (double)sampleTable.Rows[i][j] * 50, (double)sampleTable.Rows[i][j + 1] * 50 }; } samples[i].Output = (int)(double)sampleTable.Rows[i][sampleTable.Columns.Count - 1] - 1; } } } else if (extension == ".xml") { using (var stream = openFileDialog.OpenFile()) { XmlSerializer serializer = new XmlSerializer(typeof(TrainingSample[])); samples = (TrainingSample[])serializer.Deserialize(stream); } } dataGridView1.Rows.Clear(); for (int i = 0; i < samples.Length; i++) { var sequence = samples[i].Sequence; var label = samples[i].Output + 1; var bitmap = ToBitmap(sequence); var row = dataGridView1.Rows.Add(bitmap, label, null); dataGridView1.Rows[row].Tag = sequence; } }
private static IDataSet <bool, double> GetDataSet(string resourceName) { var dataLines = GetResourceLines(resourceName); var parsedDataLines = ParseDoublesLines(dataLines, new[] { ',' }); var dataSet1 = InMemoryDataSet.Create(parsedDataLines .Select(pl => TrainingSample.Create(pl[2] != 0, new[] { pl[0], pl[1] })) .ToArray()); return(dataSet1); }
/// <summary> /// 点击计算按钮 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void tsmiCalculate_Click(object sender, EventArgs e) { // 创建输入层、隐层和输出层 ActivationLayer inputLayer = GetLayer(cboInputLayerType.SelectedItem.ToString(), 2); ActivationLayer hiddenLayer = GetLayer(cboHiddenLayerType.SelectedItem.ToString(), int.Parse(txtHiddenLayerCount.Text)); ActivationLayer outputLayer = GetLayer(cboOutputLayerType.SelectedItem.ToString(), 1); // 创建层之间的关联 new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3); new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3); // 创建神经网络 var network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(double.Parse(txtInitialLearningRate.Text), double.Parse(txtFinalLearningRate.Text)); // 进行训练 var trainingSet = new TrainingSet(2, 1); for (var i = 0; i < 17; i++) { var x1 = data[i, 0]; var x2 = data[i, 1]; var y = data[i, 2]; var inputVector = new double[] { x1, x2 }; var outputVector = new double[] { y }; var trainingSample = new TrainingSample(inputVector, outputVector); trainingSet.Add(trainingSample); } network.SetLearningRate(0.3, 0.1); network.Learn(trainingSet, int.Parse(txtTrainingEpochs.Text)); network.StopLearning(); // 进行预测 for (var i = 0; i < 17; i++) { var x1 = data[i, 0]; var x2 = data[i, 1]; var y = data[i, 2]; var testInput = new double[] { x1, x2 }; var testOutput = network.Run(testInput)[0]; var absolute = testOutput - y; var relative = Math.Abs((testOutput - y) / testOutput); dgvData.Rows[i].Cells[3].Value = testOutput.ToString("f3"); dgvData.Rows[i].Cells[4].Value = absolute.ToString("f3"); dgvData.Rows[i].Cells[5].Value = (relative * 100).ToString("f1") + "%"; } }
public void ConstructorDoesNotThrowExceptionIfAllParametersIsGood() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new double[1]) }); // act var trainer = new LogisticRegressionClassifierTraining(1, 0.0, dataSetMock.Object); // assert }
public void RunBatch(int skip, int take) { if (skip < 0) { throw new MLException("Skip value must be non-negative"); } if (take <= 0) { throw new MLException("Take value must be positive"); } runBatch(Net, TrainingSample.Subset(skip, take)); }
/// <summary> /// Estimates closeness of given point to given classes /// </summary> public override double CalculateClassScore(double[] obj, Class cls) { var score = 0.0D; foreach (var pData in TrainingSample.Where(d => d.Value.Equals(cls))) { var r = Metric.Dist(pData.Key, obj) / H; score += Kernel.Value(r); } score = Math.Log(score) + PriorProbs[cls.Value]; return(score); }
public void CostFunctionThrowsArgumentOutOfRangeExceptionIfOneOfTheTrainingSamplesAttributesLengthIsNotEqualToFeaturesCount() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new double[0]) }); var trainer = new LogisticRegressionClassifierTraining(1, 0.0, dataSetMock.Object); // act var cost = trainer.CostFunction(new double[2], new double[2]); // assert Assert.Fail(); }
public void CostFunctionThrowsNullReferenceExceptionIfOneOfTheTrainingSamplesAttributesIsNull() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, (double[])null) }); var trainer = new LogisticRegressionClassifierTraining(1, 0.0, dataSetMock.Object); // act var cost = trainer.CostFunction(new double[2], new double[2]); // assert Assert.Fail(); }
public void TrainDoesNotThrowExceptionOnValidDataSet() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new double[1]) }); var classifier = new LogisticRegressionClassifier(1, 0.0); // act classifier.Train(dataSetMock.Object); // assert }
public void CostFunctionThrowsArgumentNullExceptionIfThetasIsNull() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new double[1]) }); var trainer = new LogisticRegressionClassifierTraining(1, 0.0, dataSetMock.Object); // act var cost = trainer.CostFunction(null, new double[0]); // assert Assert.Fail(); }
public void GetCategoryProbabilityThrowsInvalidOperationExceptionIfClassifierIsNotTrained() { // arrange var dataSetMock = new Mock <IDataSet <bool, double> >(); dataSetMock.Setup(ds => ds.GetTrainingSamplesCount()).Returns(1); dataSetMock.Setup(ds => ds.GetData()).Returns(new[] { TrainingSample.Create(true, new double[1]) }); var classifier = new LogisticRegressionClassifier(1, 0.0); // act classifier.GetCategoryProbability(true, new double[1]); // assert Assert.Fail(); }
public static Bitmap Draw(TrainingSample sample) { var frames = sample.Frames; int height = frames[0].Frame.Values.Length; Debug.Assert(height % 6 == 0); int bands = height / 6; Bitmap result = new Bitmap(frames.Length, bands * 6 + 5); for (int x = 0; x < frames.Length; x++) { TrainingFrame frame = frames[x]; DrawRow(result, x, frame); } return(result); }
public void CorrectlyClassifiesTestDataSetFromExample13Dot1() { var trainingData = InMemoryDataSet.Create(new[] { TrainingSample.Create("China", new[] { "Chinese", "Beijing", "Chinese" }), TrainingSample.Create("China", new[] { "Chinese", "Chinese", "Shanghai" }), TrainingSample.Create("China", new[] { "Chinese", "Makao" }), TrainingSample.Create("Not China", new[] { "Tokio", "Japan", "Chinese" }), }); var classifier = MultinomialNaiveBayesClassifier.Create(trainingData); Assert.AreEqual("China", classifier.Classify(new[] { "Chinese", "Chinese", "Chinese", "Tokio", "Japan" })); Assert.AreEqual("Not China", classifier.Classify(new[] { "Tokio" })); Assert.AreEqual("China", classifier.Classify(new[] { "Chinese", "Tokio" })); Assert.AreEqual("China", classifier.Classify(new[] { "Unknown", "Chinese", "Tokio" })); Assert.AreEqual("Not China", classifier.Classify(new[] { "Chinese", "Tokio", "Japan" })); }
private void runEpoch(NeuralNetwork net) { // loop on batches foreach (var batch in TrainingSample.Batch(m_BatchSize)) { runBatch(net, batch); } // update epoch stats m_Epoch++; m_Iteration = 0; m_Batch = 0; if (EpochEndedEvent != null) { EpochEndedEvent(this, EventArgs.Empty); } }
private void openFileDialog1_FileOk(object sender, System.ComponentModel.CancelEventArgs e) { TrainingSample[] samples = null; string filename = openFileDialog.FileName; string extension = Path.GetExtension(filename); if (extension == ".xls" || extension == ".xlsx") { ExcelReader db = new ExcelReader(filename, true, false); TableSelectDialog t = new TableSelectDialog(db.GetWorksheetList()); if (t.ShowDialog(this) == DialogResult.OK) { var sampleTable = db.GetWorksheet(t.Selection); samples = new TrainingSample[sampleTable.Rows.Count]; for (int i = 0; i < samples.Length; i++) { samples[i] = new TrainingSample(); samples[i].Sequence = new double[(sampleTable.Columns.Count - 1) / 2][]; for (int j = 0; j < samples[i].Sequence.Length; j++) { samples[i].Sequence[j] = new double[] { (double)sampleTable.Rows[i][j] * 50, (double)sampleTable.Rows[i][j+1] * 50 }; } samples[i].Output = (int)(double)sampleTable.Rows[i][sampleTable.Columns.Count - 1] - 1; } } } else if (extension == ".xml") { using (var stream = openFileDialog.OpenFile()) { XmlSerializer serializer = new XmlSerializer(typeof(TrainingSample[])); samples = (TrainingSample[])serializer.Deserialize(stream); } } dataGridView1.Rows.Clear(); for (int i = 0; i < samples.Length; i++) { var sequence = samples[i].Sequence; var label = samples[i].Output + 1; var bitmap = ToBitmap(sequence); var row = dataGridView1.Rows.Add(bitmap, label, null); dataGridView1.Rows[row].Tag = sequence; } }
/// <summary> /// A protected helper function used to train single learning sample /// </summary> /// <param name="trainingSample"> /// Training sample to use /// </param> /// <param name="currentIteration"> /// Current training epoch (Assumed to be positive and less than <c>trainingEpochs</c>) /// </param> /// <param name="trainingEpochs"> /// Number of training epochs (Assumed to be positive) /// </param> protected override void LearnSample(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { // No validation here int layerCount = layers.Count; // Set input vector inputLayer.SetInput(trainingSample.InputVector); for (int i = 0; i < layerCount; i++) { layers[i].Run(); } // Set Errors meanSquaredError += (outputLayer as ActivationLayer).SetErrors(trainingSample.OutputVector); // Backpropagate errors for (int i = layerCount; i > 0; ) { ActivationLayer layer = layers[--i] as ActivationLayer; if (layer != null) { layer.EvaluateErrors(); } } // Optimize synapse weights and neuron bias values for (int i = 0; i < layerCount; i++) { layers[i].Learn(currentIteration, trainingEpochs); } }
/// <summary> /// A protected helper function used to train single learning sample /// </summary> /// <param name="trainingSample"> /// Training sample to use /// </param> /// <param name="currentIteration"> /// Current training epoch (Assumed to be positive and less than <c>trainingEpochs</c>) /// </param> /// <param name="trainingEpochs"> /// Number of training epochs (Assumed to be positive) /// </param> protected override void LearnSample(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { // No validation here int layerCount = layers.Count; // Set input vector inputLayer.SetInput(trainingSample.InputVector); for (int i = 0; i < layerCount; i++) { layers[i].Run(); } // Set Errors meanSquaredError += (outputLayer as ActivationLayer).SetErrors(trainingSample.OutputVector); }
/// <summary> /// <para> /// Trains the network for the given training sample (Online training mode). Note that this /// method trains the sample only once irrespective of the values of <c>currentIteration</c> /// and <c>trainingEpochs</c>. Those arguments are just used to adjust training parameters /// which are dependent on training progress. /// </para> /// </summary> /// <param name="trainingSample"> /// Training sample to use /// </param> /// <param name="currentIteration"> /// Current training epoch /// </param> /// <param name="trainingEpochs"> /// Number of training epochs /// </param> /// <exception cref="ArgumentNullException"> /// If <c>trainingSample</c> is <c>null</c> /// </exception> /// <exception cref="ArgumentException"> /// If <c>trainingEpochs</c> is not positive, or if <c>currentIteration</c> is negative or if /// <c>currentIteration</c> is less than <c>trainingEpochs</c> /// </exception> public override void Learn(TrainingSample trainingSample, int currentIteration, int trainingEpochs) { meanSquaredError = 0d; isValidMSE = true; base.Learn(trainingSample, currentIteration, trainingEpochs); }
private void saveFileDialog1_FileOk(object sender, System.ComponentModel.CancelEventArgs e) { // Extract data int rows = dataGridView1.Rows.Count; TrainingSample[] samples = new TrainingSample[rows]; for (int i = 0; i < rows; i++) { samples[i] = new TrainingSample(); samples[i].Output = (int)dataGridView1.Rows[i].Cells["colLabel"].Value - 1; samples[i].Sequence = (double[][])dataGridView1.Rows[i].Tag; } XmlSerializer serializer = new XmlSerializer(typeof(TrainingSample[])); using (var stream = saveFileDialog1.OpenFile()) { serializer.Serialize(stream, samples); } }