public TrainerSettings() { EpochsCount = 1; Optimizer = new CpuAdam(1e-3f); LossFunction = new CrossEntropy(); Metric = new CpuClassificationAccuracy(); }
public void CrossEntropyTest() { var crossEntropyOH = new CrossEntropyOneHot(); var crossEntropy = new CrossEntropy(); int nb = 10; double[] input = new double[nb]; double[] output = new double[nb]; for (int i = 0; i < nb; i++) { input[i] = Math.PI * rnd.NextDouble(); } var softmax = new Softmax(); softmax.Activate(input, output); double[] expectedOutput = new double[nb]; expectedOutput[output.ArgMax()] = 1; double[] errors = new double[nb]; double entropyOH = crossEntropyOH.Evaluate(output, expectedOutput, errors); double entropy = crossEntropy.Evaluate(output, expectedOutput, errors); Assert.AreEqual(entropyOH, entropy); }
public void CrossEntropyTest() { var actual = new Matrix(4, 1); var expected = new Matrix(4, 1); actual.InRandomize(); expected.InRandomize(); var autoErr = new CrossEntropy().Evaluate(actual, expected); var error = 0.0; for (var i = 0; i < actual.Rows; i++) { for (var j = 0; j < actual.Columns; j++) { error += -expected[i, j] * Math.Log(actual[i, j]) - (1.0 - expected[i, j]) * Math.Log(1.0 - actual[i, j] + double.Epsilon); } } error /= actual.Rows * actual.Columns; Assert.IsTrue(Math.Abs(error - autoErr) < 0.01, new CrossEntropy().Type().ToString() + " Forward!"); var autoDErr = new CrossEntropy().Backward(actual, expected); var dErr = (actual - expected).HadamardDivision(actual.Hadamard(actual) - (actual + actual.Fill(double.Epsilon))); Assert.IsTrue(Math.Abs(dErr.FrobeniusNorm() - autoDErr.FrobeniusNorm()) < 0.01, new CrossEntropy().Type().ToString() + " Backward!"); }
public void AccuracyTest2() { var y = new TensorOld(new double[] { 0, 0, 1, 0, 1, 0 }, 2, 3); var yHat = new TensorOld(new double[] { 0, 1, 0, 0, 1, 0 }, 2, 3); var expected = 0.5; var actual = CrossEntropy.ComputeAccuracy(y, yHat); Assert.Equal(expected, actual); }
public void LossStorageTest() { var loss = new CrossEntropy(); var xml = new XmlDocument(); var el = XmlStorage.SaveToEl(xml, loss); var test = XmlStorage.LoadFromNode <CrossEntropy>(el); Assert.True(test is CrossEntropy); }
public void ForwardTest2() { var yHat = new TensorOld(new double[] { 0.05, 0.15, 0.7, 0.1 }, 1, 4); var yHat2 = new TensorOld(new double[] { 0.1, 0.15, 0.65, 0.1 }, 1, 4); var y = new TensorOld(new double[] { 0, 0, 1, 0 }, 1, 4); var ce = new CrossEntropy(); ce.PrepareTrain(y, yHat); ce.Compute(y, yHat); var loss = ce.GetLoss(); ce.Compute(y, yHat2); var loss2 = ce.GetLoss(); Assert.True(loss < loss2); }
public void ForwardTest() { //模拟sigmoid的情况,4个样本 var yHat = new TensorOld(new double[] { 0.7, 0.2, 0.4, 0.9 }, 4, 1); var yHat2 = new TensorOld(new double[] { 0.7, 0.2, 0.6, 0.9 }, 4, 1); var y = new TensorOld(new double[] { 1, 0, 1, 1 }, 4, 1); var ce = new CrossEntropy(); ce.PrepareTrain(y, yHat); ce.Compute(y, y); var minLoss = ce.GetLoss(); ce.Compute(y, yHat); var loss = ce.GetLoss(); ce.Compute(y, yHat2); var loss2 = ce.GetLoss(); Assert.Equal(0, minLoss); Assert.True(loss > minLoss); Assert.True(loss > loss2); }