private void runBatch(ClassifiedSample <double[][, ]> sampleBatch) { // loop over batch if (m_UseBatchParallelization) { Parallel.ForEach(sampleBatch, pdata => m_BatchContext.Push(pdata.Key, pdata.Value)); } else { foreach (var pdata in sampleBatch) { runIteration(pdata.Key, pdata.Value); } } // optimize and apply updates m_Optimizer.Push(Net.Weights, m_Gradient, m_LearningRate); // update batch stats m_Iteration += m_BatchSize; m_Batch++; m_Step2 = m_Optimizer.Step2; m_PrevLossValue = m_LossValue; m_LossValue = m_IterLossValue; m_LossDelta = m_LossValue - m_PrevLossValue; m_IterLossValue = 0.0D; if (BatchEndedEvent != null) { BatchEndedEvent(this, EventArgs.Empty); } }
public void Gradient_1ConvLayer_1Iter_Euclidean() { // arrange var net = new ConvNet(3, 1, 1) { IsTraining = true }; net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 1, activation: Activation.Atan)); net._Build(); net.RandomizeParameters(seed: 0); var point1 = RandomPoint(3, 1, 1); var point2 = RandomPoint(3, 1, 1); // just for 2 dim output var sample = new ClassifiedSample <double[][, ]>(); sample[point1] = CLASSES[0]; sample[point2] = CLASSES[1]; var alg = new BackpropAlgorithm(net) { LearningRate = 0.1D, LossFunction = Loss.Euclidean }; alg.Build(); // act alg.RunIteration(point1, EXPECTED[0]); // assert AssertNetGradient(alg, point1, EXPECTED[0]); }
private void outputError(AlgorithmBase <double[][, ]> alg) { Console.WriteLine("Errors:"); var sample = new ClassifiedSample <double[][, ]>(); foreach (var obj in Data.Data) { var data = obj.Key; var key = new double[data.Length][, ]; for (int i = 0; i < data.Length; i++) { key[i] = new double[1, 1]; } for (int i = 0; i < data.Length; i++) { key[i][0, 0] = data[i]; } sample[key] = obj.Value; } var errors = alg.GetErrors(sample); var ec = errors.Count(); var dc = Data.Data.Count; var pct = Math.Round(100.0F * ec / dc, 2); Console.WriteLine("{0} of {1} ({2}%)", ec, dc, pct); }
/// <summary> /// /// </summary> public static BackpropAlgorithm CreateMNISTSimpleDemoWithBatching(ClassifiedSample <double[][, ]> training) { Console.WriteLine("init CreateMNISTSimpleDemoWithBatching"); var activation = Activation.ReLU; var net = new ConvNet(1, 28) { IsTraining = true }; net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5)); net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation)); net.AddLayer(new ConvLayer(outputDepth: 18, windowSize: 5)); net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation)); net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation)); net._Build(); net.RandomizeParameters(seed: 0); var lrate = 0.0001D; var alg = new BackpropAlgorithm(training, net) { EpochCount = 50, LearningRate = lrate, BatchSize = 8, UseBatchParallelization = true, MaxBatchThreadCount = 8, LossFunction = Loss.Euclidean, Optimizer = Optimizer.RMSProp, LearningRateScheduler = LearningRateScheduler.DropBased(lrate, 5, 0.5D) }; return(alg); }
public static BackpropAlgorithm CreateMNISTHardDemo(ClassifiedSample <double[][, ]> training) { Console.WriteLine("init CreateMNISTHardDemo"); var activation = Activation.ReLU; var net = new ConvNet(1, 28) { IsTraining = true }; net.AddLayer(new ConvLayer(outputDepth: 32, windowSize: 3, activation: activation)); net.AddLayer(new ConvLayer(outputDepth: 64, windowSize: 3, activation: activation)); net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2)); net.AddLayer(new DropoutLayer(0.25)); net.AddLayer(new FlattenLayer(outputDim: 128, activation: activation)); net.AddLayer(new DropoutLayer(0.5)); net.AddLayer(new FlattenLayer(outputDim: 10, activation: Activation.Logistic(1))); net._Build(); net.RandomizeParameters(seed: 0); var lrate = 0.005D; var alg = new BackpropAlgorithm(training, net) { LossFunction = Loss.Euclidean, EpochCount = 50, LearningRate = lrate, BatchSize = 1, LearningRateScheduler = LearningRateScheduler.Constant(lrate) }; return(alg); }
private ML.DeepMethods.Algorithms.BackpropAlgorithm createCNNAlg_NN_ForTest() { var cnn = new ConvNet(2, 1) { IsTraining = true }; cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1))); cnn.AddLayer(new MaxPoolingLayer(1, 1)); //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1))); cnn.AddLayer(new DropoutLayer(0.1)); cnn.AddLayer(new FlattenLayer(3, activation: Activation.Logistic(1))); //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1))); cnn.AddLayer(new MaxPoolingLayer(1, 1)); cnn._Build(); cnn.RandomizeParameters(0); var sample = new ClassifiedSample <double[][, ]>(); foreach (var obj in Data.TrainingSample) { var data = obj.Key; var key = new double[data.Length][, ]; for (int i = 0; i < data.Length; i++) { key[i] = new double[1, 1]; } for (int i = 0; i < data.Length; i++) { key[i][0, 0] = data[i]; } sample[key] = obj.Value; } var alg = new ML.DeepMethods.Algorithms.BackpropAlgorithm(sample, cnn); alg.EpochCount = 6000; alg.LearningRate = 0.01D; alg.BatchSize = 1; alg.LossFunction = Loss.Euclidean; int epoch = 0; alg.EpochEndedEvent += (o, e) => { if (epoch++ % 300 != 0) { return; } Console.WriteLine("----------------Epoch #: {0}", epoch); Console.WriteLine("L:\t{0}", alg.LossValue); Console.WriteLine("DL:\t{0}", alg.LossDelta); Console.WriteLine("DW:\t{0}", alg.Step2); }; return(alg); }
public void BernoulliDistribution_MaximumLikelihood_FromClassifiedSample() { // arrange var distr = new BernoulliDistribution(); var sample = new ClassifiedSample <double[]> { { new[] { 1.0D, 1.0D, 0.0D }, new Class("A", 0) }, { new[] { 1.0D, 0.0D, 1.0D }, new Class("A", 0) }, { new[] { 0.0D, 0.0D, 1.0D }, new Class("B", 1) }, { new[] { 0.0D, 0.0D, 0.0D }, new Class("B", 1) }, }; // act var res = distr.FromSample(sample); var dA1 = res[0][0]; var dA2 = res[0][1]; var dA3 = res[0][2]; var dB1 = res[1][0]; var dB2 = res[1][1]; var dB3 = res[1][2]; // assert Assert.AreEqual(1.0D, dA1.P, EPS); Assert.AreEqual(0.5D, dA2.P, EPS); Assert.AreEqual(0.5D, dA3.P, EPS); Assert.AreEqual(0.0D, dB1.P, EPS); Assert.AreEqual(0.0D, dB2.P, EPS); Assert.AreEqual(0.5D, dB3.P, EPS); }
public void MultinomialPartDistribution_MaximumLikelihood_FromClassifiedSample_UseSmoothing() { // arrange var sample = new ClassifiedSample <double[]> { { new[] { 1.0D, 2.0D, 0.0D }, new Class("A", 0) }, { new[] { 3.0D, 0.0D, 2.0D }, new Class("A", 0) }, { new[] { 0.0D, 3.0D, 1.0D }, new Class("B", 1) }, { new[] { 0.0D, 2.0D, 0.0D }, new Class("B", 1) }, { new[] { 0.0D, 2.0D, 2.0D }, new Class("B", 1) }, }; var n = 3; // sample[i].Key.Length - the length of the word dictionary var distr = new MultinomialPartDistribution { N = n, UseSmoothing = true, Alpha = 2 }; // act var res = distr.FromSample(sample); var dA1 = res[0][0]; var dA2 = res[0][1]; var dA3 = res[0][2]; var dB1 = res[1][0]; var dB2 = res[1][1]; var dB3 = res[1][2]; // assert Assert.AreEqual(6.0D / 14, dA1.P, EPS); Assert.AreEqual(4.0D / 14, dA2.P, EPS); Assert.AreEqual(4.0D / 14, dA3.P, EPS); Assert.AreEqual(2.0D / 16, dB1.P, EPS); Assert.AreEqual(9.0D / 16, dB2.P, EPS); Assert.AreEqual(5.0D / 16, dB3.P, EPS); }
private void doLoad(string path) { var sample = new ClassifiedSample <string>(); using (var srcFile = File.Open(path, FileMode.Open, FileAccess.Read)) using (var srcReader = new StreamReader(srcFile)) { while (true) { var line = srcReader.ReadLine(); if (line == null) { break; } line = line.Replace('"', ' ').TrimEnd(SEPARATOR); var sIdx = line.IndexOf(SEPARATOR[0]); if (sIdx < 0) { continue; } var cls = m_Classes[line.Substring(0, sIdx).Trim()]; var doc = line.Substring(sIdx + 1, line.Length - sIdx - 1).Trim(); sample[doc] = cls; } } var cnt = sample.Count; var tcnt = cnt * 4 / 5; m_TrainingSet = sample.Subset(0, tcnt); m_TestingSet = sample.Subset(tcnt, cnt - tcnt); }
public void NaiveBayesianAlgorithm_CalculateClassScore() { // arrange var kernel = new TriangularKernel(); var alg = new NaiveBayesianKernelAlgorithm(kernel, 2.0D); var sample = new ClassifiedSample <double[]> { { new[] { 2.0, 1.0 }, new Class("A", 0) }, { new[] { 0.0, 3.0 }, new Class("A", 0) }, { new[] { 4.0, 3.0 }, new Class("B", 1) } }; // act alg.Train(sample); var s11 = alg.CalculateClassScore(new[] { 1.0, 2.0 }, new Class("A", 0)); var s12 = alg.CalculateClassScore(new[] { 1.0, 2.0 }, new Class("B", 1)); var s21 = alg.CalculateClassScore(new[] { 2.0, 2.0 }, new Class("A", 0)); var s22 = alg.CalculateClassScore(new[] { 2.0, 2.0 }, new Class("B", 1)); var s31 = alg.CalculateClassScore(new[] { 3.0, 2.0 }, new Class("A", 0)); var s32 = alg.CalculateClassScore(new[] { 3.0, 2.0 }, new Class("B", 1)); // assert Assert.AreEqual(Math.Log(1 / 24.0D), s11, EPS); Assert.AreEqual(double.NegativeInfinity, s12); Assert.AreEqual(Math.Log(1 / 24.0D), s21, EPS); Assert.AreEqual(double.NegativeInfinity, s22); Assert.AreEqual(Math.Log(1 / 48.0D), s31, EPS); Assert.AreEqual(Math.Log(1 / 48.0D), s32, EPS); }
public void NaiveBayesianAlgorithm_Predict() { // arrange var kernel = new TriangularKernel(); var alg = new NaiveBayesianKernelAlgorithm(kernel, 0.3D); var sample = new ClassifiedSample <double[]> { { new[] { 0.2, 0.2 }, new Class("A", 0) }, { new[] { 0.4, 0.6 }, new Class("A", 0) }, { new[] { 0.6, 0.4 }, new Class("A", 0) }, { new[] { 0.8, 0.6 }, new Class("B", 1) }, { new[] { 0.8, 0.8 }, new Class("B", 1) } }; // act alg.Train(sample); var res1 = alg.Predict(new[] { 0.4, 0.4 }); var res2 = alg.Predict(new[] { 0.6, 0.6 }); var res3 = alg.Predict(new[] { 0.9, 0.7 }); // assert Assert.AreEqual(new Class("A", 0), res1); Assert.AreEqual(new Class("A", 0), res2); Assert.AreEqual(new Class("B", 1), res3); }
public NearestKNeighboursAlgorithm(ClassifiedSample <double[]> classifiedSample, IMetric metric, int k) : base(classifiedSample, metric) { K = k; }
public void NormalDistribution_MaximumLikelihood_FromClassifiedSample() { // arrange var distr = new NormalDistribution(); var sample = new ClassifiedSample <double[]> { { new[] { -1.0D, 1.0D, 2.0D }, new Class("A", 0) }, { new[] { 2.0D, 2.0D, 2.5D }, new Class("A", 0) }, { new[] { 3.0D, 3.0D, 2.6D }, new Class("B", 1) }, { new[] { 3.5D, 4.0D, 2.8D }, new Class("B", 1) }, }; // act var res = distr.FromSample(sample); var dA1 = res[0][0]; var dA2 = res[0][1]; var dA3 = res[0][2]; var dB1 = res[1][0]; var dB2 = res[1][1]; var dB3 = res[1][2]; // assert Assert.AreEqual(0.5D, dA1.Mu, EPS); Assert.AreEqual(1.5D, dA1.Sigma, EPS); Assert.AreEqual(1.5D, dA2.Mu, EPS); Assert.AreEqual(0.5D, dA2.Sigma, EPS); Assert.AreEqual(2.25D, dA3.Mu, EPS); Assert.AreEqual(0.25D, dA3.Sigma, EPS); Assert.AreEqual(3.25D, dB1.Mu, EPS); Assert.AreEqual(0.25D, dB1.Sigma, EPS); Assert.AreEqual(3.5D, dB2.Mu, EPS); Assert.AreEqual(0.5D, dB2.Sigma, EPS); Assert.AreEqual(2.7D, dB3.Mu, EPS); Assert.AreEqual(0.1D, dB3.Sigma, EPS); }
public ParzenVariableAlgorithm(ClassifiedSample <double[]> classifiedSample, IMetric metric, IFunction kernel, int k) : base(classifiedSample, metric, kernel) { K = k; }
public ParzenFixedAlgorithm(ClassifiedSample <double[]> classifiedSample, IMetric metric, IFunction kernel, double h) : base(classifiedSample, metric, kernel) { H = h; }
public NearestKWeighedNeighboursAlgorithm(ClassifiedSample <double[]> classifiedSample, IMetric metric, int k, double[] weights) : base(classifiedSample, metric) { K = k; Weights = weights; }
public BackpropAlgorithm(ClassifiedSample <double[][, ]> classifiedSample, ConvNet net) : base(classifiedSample, net) { m_EpochCount = DFT_EPOCH_COUNT; m_LearningRate = DFT_LEARNING_RATE; m_Stop = DTF_STOP_CRITERIA; m_BatchSize = DFT_BATCH_SIZE; m_MaxBatchThreadCount = DFT_BATCH_THREAD_COUNT; }
protected MetricAlgorithmBase(ClassifiedSample <TObj> classifiedSample, IMetric metric) : base(classifiedSample) { if (metric == null) { throw new MLException("MetricAlgorithmBase.ctor(metric=null)"); } m_Metric = metric; }
protected ConvNetAlgorithmBase(ClassifiedSample <double[][, ]> trainingSample, ConvNet net) : base(trainingSample) { if (net == null) { throw new MLException("Network can not be null"); } m_Net = net; }
protected NeuralNetworkAlgorithmBase(ClassifiedSample <double[]> classifiedSample, NeuralNetwork net) : base(classifiedSample) { if (net == null) { throw new MLException("Network can not be null"); } m_Result = net; net.IsTraining = true; }
public void SimpleNet_Euclidean_OneIter() { // arrange var net = Mocks.SimpleLinearNetwork(); var sample = new ClassifiedSample <double[][, ]>(); var point = new double[1][, ] { new[, ] { { 1.0D } } }; sample[point] = new Class("a", 0); var alg = new BackpropAlgorithm(net); alg.LearningRate = 2.0D; alg.LossFunction = Loss.Euclidean; alg.Build(); // act alg.RunIteration(point, new double[] { 1.0D }); // assert Assert.AreEqual(12, alg.Values[0][0][0, 0]); Assert.AreEqual(33, alg.Values[1][0][0, 0]); Assert.AreEqual(-62, alg.Values[2][0][0, 0]); Assert.AreEqual(3, net[0].ActivationFunction.DerivativeFromValue(alg.Values[0][0][0, 0])); Assert.AreEqual(3, net[1].ActivationFunction.DerivativeFromValue(alg.Values[1][0][0, 0])); Assert.AreEqual(2, net[2].ActivationFunction.DerivativeFromValue(alg.Values[2][0][0, 0])); Assert.AreEqual(-126, alg.Errors[2][0][0, 0]); Assert.AreEqual(378, alg.Errors[1][0][0, 0]); Assert.AreEqual(1134, alg.Errors[0][0][0, 0]); Assert.AreEqual(-126 * 33, alg.Gradient[2][0]); Assert.AreEqual(-126, alg.Gradient[2][1]); Assert.AreEqual(378 * 12, alg.Gradient[1][0]); Assert.AreEqual(378, alg.Gradient[1][1]); Assert.AreEqual(1134 * 1, alg.Gradient[0][0]); Assert.AreEqual(1134, alg.Gradient[0][1]); alg.FlushGradient(); Assert.AreEqual(-1 + 2 * 126 * 33, net[2].Weights[0]); Assert.AreEqual(2 + 2 * 126, net[2].Weights[1]); Assert.AreEqual(1 + 2 * (-378 * 12), net[1].Weights[0]); Assert.AreEqual(-1 + 2 * (-378), net[1].Weights[1]); Assert.AreEqual(3 + 2 * (-1134 * 1), net[0].Weights[0]); Assert.AreEqual(1 + 2 * (-1134), net[0].Weights[1]); }
public KernelAlgorithmBase(ClassifiedSample <double[]> classifiedSample, IMetric metric, IFunction kernel) : base(classifiedSample, metric) { if (kernel == null) { throw new MLException("KernelAlgorithmBase.ctor(kernel=null)"); } m_Kernel = kernel; }
public override Parameters[][] FromSample(ClassifiedSample <double[]> sample) { var dim = sample.GetDimension(); var classes = sample.CachedClasses; var ts = new double[classes.Count]; var result = new Parameters[classes.Count][]; var temp = new double[classes.Count][]; foreach (var cls in classes) { result[cls.Value] = new Parameters[dim]; temp[cls.Value] = new double[dim]; } for (int i = 0; i < dim; i++) { foreach (var pData in sample) { var data = pData.Key; var cls = pData.Value; var p = data[i]; temp[cls.Value][i] += p; ts[cls.Value] += p; } } foreach (var cls in classes) { var tmps = temp[cls.Value]; var rs = result[cls.Value]; var bs = ts[cls.Value]; if (UseSmoothing) { bs += m_Alpha * m_N; } for (int i = 0; i < dim; i++) { var p = tmps[i]; if (UseSmoothing) { p += m_Alpha; } rs[i] = new Parameters(p / bs); } } return(result); }
public void Gradient_DifferentLayers_1Iter_CrossEntropy_Regularization() { // arrange var activation = Activation.ReLU; var net = new ConvNet(1, 5) { IsTraining = true }; net.AddLayer(new ConvLayer(outputDepth: 2, windowSize: 3, padding: 1)); net.AddLayer(new MaxPoolingLayer(windowSize: 3, stride: 2, activation: Activation.Exp)); net.AddLayer(new ActivationLayer(activation: Activation.Tanh)); net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation)); net.AddLayer(new DropoutLayer(rate: 0.5D)); net.AddLayer(new DenseLayer(outputDim: 3, activation: Activation.Exp)); net._Build(); net.RandomizeParameters(seed: 0); var sample = new ClassifiedSample <double[][, ]>(); for (int i = 0; i < 3; i++) { var point = RandomPoint(1, 5, 5); sample[point] = new Class(i.ToString(), i); } var regularizator = Regularizator.Composite(Regularizator.L1(0.1D), Regularizator.L2(0.3D)); var alg = new BackpropAlgorithm(net) { LearningRate = 0.1D, LossFunction = Loss.CrossEntropySoftMax, Regularizator = regularizator }; alg.Build(); // act var data = sample.First(); var expected = new double[3] { 1.0D, 0.0D, 0.0D }; alg.RunIteration(data.Key, expected); regularizator.Apply(alg.Gradient, alg.Net.Weights); ((DropoutLayer)alg.Net[4]).ApplyCustomMask = true; // assert AssertNetGradient(alg, data.Key, expected); }
private void doExport(string fpath, string opath) { var sample = new ClassifiedSample <string>(); using (var srcFile = File.Open(fpath, FileMode.Open, FileAccess.Read)) using (var srcReader = new StreamReader(srcFile)) { var line = srcReader.ReadLine(); var segs = line.Split(SEPARATOR, StringSplitOptions.RemoveEmptyEntries); var cls = m_Classes[segs[0]]; var doc = segs[1]; sample.Add(doc, cls); } var vocabulary = Alg.ExtractVocabulary(sample); var dim = vocabulary.Count; var builder = new StringBuilder(); using (var outFile = File.Open(opath, FileMode.CreateNew, FileAccess.Write)) using (var outWriter = new StreamWriter(outFile)) { for (int i = 0; i < dim; i++) { builder.AppendFormat("{0},", vocabulary[i]); } builder.Append("_class,_value,_training"); outWriter.WriteLine(builder.ToString()); foreach (var pData in sample) { var doc = pData.Key; var cls = pData.Value; bool isEmpty; var data = Alg.ExtractFeatureVector(doc, out isEmpty); if (isEmpty) { continue; } builder.Clear(); for (int i = 0; i < dim; i++) { builder.AppendFormat("{0},", data[i]); } builder.AppendFormat("{0},{1},{2}", cls.Name, cls.Value, 1); outWriter.WriteLine(builder.ToString()); } } }
public PotentialFunctionAlgorithm(ClassifiedSample <double[]> classifiedSample, IMetric metric, IFunction kernel, KernelEquipment[] eqps) : base(classifiedSample, metric) { if (kernel == null) { throw new MLException("PotentialAlgorithm.ctor(kernel=null)"); } m_Kernel = kernel; Eqps = eqps; }
public override IEnumerable <ErrorInfo> GetErrors(ClassifiedSample <double[][, ]> classifiedSample) { var isTraining = m_Net.IsTraining; m_Net.IsTraining = false; try { return(base.GetErrors(classifiedSample)); } finally { m_Net.IsTraining = isTraining; } }
public virtual List <string> ExtractVocabulary(ClassifiedSample <string> corpus) { var dict = new HashSet <string>(); foreach (var doc in corpus) { var tokens = m_Preprocessor.Preprocess(doc.Key); foreach (var token in tokens) { dict.Add(token); } } return(dict.ToList()); }
private DecisionNode <TObj> trainID3Core(IEnumerable <Predicate <TObj> > patterns, ClassifiedSample <TObj> sample, IInformativityIndex <TObj> informativity) { if (!sample.Any()) { throw new MLException("Empty sample"); } var cls = sample.First().Value; if (sample.All(kvp => kvp.Value.Equals(cls))) { return(new LeafNode <TObj>(cls)); } var pattern = informativity.Max(patterns, sample); var negSample = new ClassifiedSample <TObj>(); var posSample = new ClassifiedSample <TObj>(); foreach (var pData in sample) { if (pattern(pData.Key)) { posSample.Add(pData.Key, pData.Value); } else { negSample.Add(pData.Key, pData.Value); } } if (!negSample.Any() || !posSample.Any()) { var majorClass = sample.GroupBy(pd => pd.Value) .Select(g => new KeyValuePair <Class, int>(g.Key, g.Count())) .OrderByDescending(c => c.Value) .First(); return(new LeafNode <TObj>(majorClass.Key)); } var node = new InnerNode <TObj>(pattern); var negNode = trainID3Core(patterns, negSample, informativity); var posNode = trainID3Core(patterns, posSample, informativity); node.SetNegativeNode(negNode); node.SetPositiveNode(posNode); return(node); }
public void Gradient_MNISTSimple_1Iter() { // arrange var activation = Activation.ReLU; var net = new ConvNet(1, 14) { IsTraining = true }; net.AddLayer(new ConvLayer(outputDepth: 4, windowSize: 5)); net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation)); net.AddLayer(new ConvLayer(outputDepth: 8, windowSize: 5)); net.AddLayer(new MaxPoolingLayer(windowSize: 2, stride: 2, activation: activation)); net.AddLayer(new FlattenLayer(outputDim: 10, activation: activation)); net._Build(); Randomize(net.Weights, -1.0D, 1.0D); var sample = new ClassifiedSample <double[][, ]>(); for (int i = 0; i < 10; i++) { var point = RandomPoint(1, 14, 14); sample[point] = new Class(i.ToString(), i); } var alg = new BackpropAlgorithm(net) { LearningRate = 0.005D, LossFunction = Loss.Euclidean }; alg.Build(); // act var data = sample.First(); var expected = new double[10] { 1.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D, 0.0D }; alg.RunIteration(data.Key, expected); // assert AssertNetGradient(alg, data.Key, expected); }