public void ZScoreTest() { var data = new TensorOld(new double[] { 1, 2, 3 }); var norm = new ZScoreNorm(data); var test = new TensorOld(new double[] { 1.5, 2.5 }); var result = norm.Normalize(test); Assert.Equal(2, norm.Mean); Assert.Equal(Math.Sqrt(2d / 3d), norm.Delta); Assert.True(result[0] < 0); Assert.True(result[1] > 0); }
public void Play() { var trainX = MNISTReader.ReadImagesToTensor4("Data\\train-images.idx3-ubyte", 60000); var trainY = MNISTReader.ReadLabelsToMatrix("Data\\train-labels.idx1-ubyte", 60000); var testX = MNISTReader.ReadImagesToTensor4("Data\\t10k-images.idx3-ubyte", 10000); var testY = MNISTReader.ReadLabelsToMatrix("Data\\t10k-labels.idx1-ubyte", 10000); var nn = new NeuralNetwork() .AddLayer(new ConvLayer(10, 5, 1)) .AddReLU() .AddLayer(new MaxPooling(2)) .AddLayer(new ConvLayer(20, 5, 1)) .AddReLU() .AddLayer(new MaxPooling(2)) .AddLayer(new FlattenLayer()) .AddFullLayer(100) .AddSigmoid() .AddFullLayer(50) .AddSigmoid() .AddFullLayer(10) .AddSoftmaxWithCrossEntropyLoss() .UseAdam(); var cate = trainY .GetRawValues() .Distinct() .OrderBy(a => a) .Select(a => a.ToString()) .ToList(); var codec = new OneHotCodec(cate); var norm = new ZScoreNorm(trainX); var trainer = new Trainer(nn, 64, 10, true) { LabelCodec = codec, Normalizer = norm, }; trainer.StartTrain(trainX, trainY, testX, testY); }