static void Main(string[] args) { Global.ComputationType = ComputationType.Cpu; NeuralLayeredNetwork network = new NeuralLayeredNetwork(new Shape(BatchSize, 1, 28, 28)); network .Conv(8, 3, 1, new HeInitializer()) .Relu() .MaxPool(2, 2) .Flatten() .Fully(64, new HeInitializer()) .Relu() .Fully(10, new HeInitializer()) .Softmax(); _trainExamples = Dataset.CreateTrainDataset(BatchSize); _testExamples = Dataset.CreateTestDataset(BatchSize); var optimizerFactory = ComponentsFactories.OptimizerFactory; var metricFactory = ComponentsFactories.MetricFactory; BaseTrainer trainer = new MiniBatchTrainer(_trainExamples, new MiniBatchTrainerSettings { EpochsCount = 1, BatchSize = BatchSize, LossFunction = new CrossEntropy(), Optimizer = optimizerFactory.CreateAdam(0.01f), Metric = metricFactory.CreateClassificationAccuracy() //Warning: GPU metrics have not implemented yet }); trainer.AddEventHandler(new ConsoleLogger()); trainer.TrainModel(network); ITester tester = new ClassificationTester(_testExamples); Console.WriteLine("Testing..."); var testResult = tester.TestModel(network); Console.WriteLine(testResult); }
static void Main(string[] args) { #region Reading Data string[] fields = new string[ParametersCount]; float[][] parameters = new float[RowsCount][]; float[] output = new float[RowsCount]; for (int i = 0; i < parameters.Length; i++) { parameters[i] = new float[ParametersCount]; } using (StreamReader streamReader = new StreamReader(DataFilePath)) { using (CsvReader csvReader = new CsvReader(streamReader, new CultureInfo("en-US"))) { csvReader.Configuration.Delimiter = ","; csvReader.Read(); for (int i = 0; i < ParametersCount; i++) { fields[i] = csvReader.GetField <string>(i + 1); } var index = 0; while (csvReader.Read()) { for (int i = 0; i < ParametersCount; i++) { parameters[index][i] = csvReader.GetField <float>(i + 1); } output[index] = csvReader.GetField <float>(ParametersCount + 1); index++; } } } #endregion #region Data standartization float[] mean = new float[ParametersCount]; float[] deviation = new float[ParametersCount]; for (int i = 0; i < ParametersCount; i++) { for (int j = 0; j < RowsCount; j++) { mean[i] += parameters[j][i]; deviation[i] += MathF.Pow(parameters[j][i] - mean[i], 2); } mean[i] /= RowsCount; deviation[i] = MathF.Sqrt(deviation[i]) / RowsCount; } for (int i = 0; i < RowsCount; i++) { for (int j = 0; j < ParametersCount; j++) { parameters[i][j] = (parameters[i][j] - mean[j]) / deviation[j]; } } #endregion #region Training var examples = new List <Example>(); var builder = TensorBuilder.Create(); for (int i = 0; i < RowsCount; i++) { var inTensor = builder.OfShape(new Shape(1, 1, 1, ParametersCount)); var outTensor = builder.OfShape(Shape.ForScalar()); inTensor.Storage.Data = parameters[i]; outTensor[0] = output[i]; examples.Add(new Example { Input = inTensor, Output = outTensor }); } var network = new NeuralLayeredNetwork(new Shape(1, 1, 1, ParametersCount)); network .Fully(64) .Relu() .Fully(1); var optimizerFactory = ComponentsFactories.OptimizerFactory; var metricFactory = ComponentsFactories.MetricFactory; var trainer = new MiniBatchTrainer(examples, new MiniBatchTrainerSettings { BatchSize = 32, EpochsCount = 50, LossFunction = new MeanSquaredError(), Metric = metricFactory.CreateMAE(), //Warning: GPU metrics have not implemented yet Optimizer = optimizerFactory.CreateAdam(0.001f) }); trainer.AddEventHandler(new ConsoleLogger()); trainer.TrainModel(network); #endregion }