public void ForwadBackwardTest() { var soft = new MLStudy.Deep.Softmax(); soft.PrepareTrain(new TensorOld(3, 4)); //模拟之前的N次操作 for (int i = 0; i < 3; i++) { var noiseIn = TensorOld.Rand(3, 4); var noiseError = TensorOld.Rand(3, 4); soft.Forward(noiseIn); soft.Backward(noiseError); } //真正的测试开始,等正常输出说明不受前面影响 var input = new TensorOld(new double[] { 7, 9, 1, -1, 2, -7, 2, 4, 7, 8, 4, -1 }, 3, 4); var y = new TensorOld(new double[] { 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0 }, 3, 4); //第1个样本正确,第2,3个样本错误 var output = soft.Forward(input); //用交叉熵计算Loss时反向传回的error var error = TensorOld.DivideElementWise(y, output) * -1; //计算出来的结果 var actual = soft.Backward(error); //推导出来的结果,因为要把softmax和损失函数分离,所以实际应用时要分别计算 var expected = output - y; //存在精度问题,有些值无法完全相等 MyAssert.ApproximatelyEqual(expected, actual); }
public void ApplyTest() { var t = TensorOld.Rand(20, 30); var n = TensorOld.Apply(t, a => a * a); for (int i = 0; i < 20; i++) { for (int j = 0; j < 30; j++) { Assert.Equal(t[i, j] * t[i, j], n[i, j]); } } }
public void ReshapeTest() { var t = TensorOld.Rand(100); var r = t.Reshape(5, 20); for (int i = 0; i < 5; i++) { for (int j = 0; j < 20; j++) { Assert.Equal(t[i * 20 + j], r[i, j]); } } //测试Reshape只是视图 r[0, 0] = 123; r[0, 1] = 456; Assert.Equal(123, t[0]); Assert.Equal(456, t[1]); }