private static void ORGate() { DataFrame train_x = new DataFrame(4, 2); DataFrame train_y = new DataFrame(4, 1); train_x.AddData(0, 0); train_x.AddData(0, 1); train_x.AddData(1, 0); train_x.AddData(1, 1); train_y.AddData(0); train_y.AddData(1); train_y.AddData(1); train_y.AddData(1); DataFrameIter train = new DataFrameIter(train_x, train_y); Sequential model = new Sequential(new Shape(2), 1); model.AddHidden(new Dense(4, ActivationType.ReLU, new GlorotUniform())); model.Compile(OptimizerType.SGD, LossType.BinaryCrossEntropy, "accuracy"); model.Fit(train, 100, 2); model.SaveModel(@"C:\Users\bdkadmin\Desktop\SSHKeys\"); }
static void Main(string[] args) { CNTKLib.SetFixedRandomSeed(0); //for reproducibility. because initialization of weights in neural network layers //depends on CNTK random number generator //create a simulated dataset from sequences describing a sinusoid var dataset = Enumerable.Range(1, 2000) .Select(p => Math.Sin(p / 100.0)) //decrease the pitch so that the sine wave is smoother .Segment(10) //break the sinusoid into segments of 10 elements .Select(p => (featureSequence: p.Take(9).Select(q => new[] { q }).ToArray(), //set a sequence of 9 elements, each element of dimension 1 (maybe: 1, 2, 3 ... n) label: new[] { p[9] })) //set a label for a sequence of dimension 1 (maybe: 1, 2, 3 ... n) .ToArray(); dataset.Split(0.7, out var train, out var test); int minibatchSize = 16; int epochCount = 300; int inputDimension = 1; var device = DeviceDescriptor.GPUDevice(0); var model = new Sequential <double>(device, new[] { inputDimension }, inputName: "Input"); model.Add(new LSTM(1, selfStabilizerLayer: new SelfStabilization())); model.Add(new Residual2(1, new Tanh())); //it is possible to join LSTM layers one after another as in the comment below: //var model = new Sequential<double>(device, new[] { inputDimension }); //model.Add(new Dense(3, new Tanh())); //model.Add (new LSTM (10, isLastLstm: false)); // LSTM can also be the first layer in the model //model.Add(new LSTM(5, isLastLstm: false)); //model.Add(new LSTM(2, selfStabilizerLayer: new SelfStabilization())); //model.Add(new Residual2(1, new Tanh())); //uses one of several overloads that can train recursive networks var fitResult = model.Fit(features: train.Select(p => p.featureSequence).ToArray(), labels: train.Select(p => p.label).ToArray(), minibatchSize: minibatchSize, lossFunction: new AbsoluteError(), evaluationFunction: new AbsoluteError(), optimizer: new Adam(0.005, 0.9, minibatchSize), epochCount: epochCount, device: device, shuffleSampleInMinibatchesPerEpoch: true, ruleUpdateLearningRate: (epoch, learningRate) => learningRate % 50 == 0 ? 0.95 * learningRate : learningRate, actionPerEpoch: (epoch, loss, eval) => { Console.WriteLine($"Loss: {loss:F10} Eval: {eval:F3} Epoch: {epoch}"); if (loss < 0.05) //stopping criterion is reached, save the model to a file and finish training (approximately 112 epochs) { model.SaveModel($"{model}.model", saveArchitectureDescription: false); return(true); } return(false); }, inputName: "Input"); Console.WriteLine($"Duration train: {fitResult.Duration}"); Console.WriteLine($"Epochs: {fitResult.EpochCount}"); Console.WriteLine($"Loss error: {fitResult.LossError}"); Console.WriteLine($"Eval error: {fitResult.EvaluationError}"); var metricsTrain = model .Evaluate(train.Select(p => p.featureSequence), train.Select(p => p.label), device) .GetRegressionMetrics(); var metricsTest = model .Evaluate(test.Select(p => p.featureSequence), test.Select(p => p.label), device) .GetRegressionMetrics(); Console.WriteLine($"Train => MAE: {metricsTrain[0].MAE} RMSE: {metricsTrain[0].RMSE} R2: {metricsTrain[0].Determination}"); //R2 ~ 0,983 Console.WriteLine($"Test => MAE: {metricsTest[0].MAE} RMSE: {metricsTest[0].RMSE} R2: {metricsTest[0].Determination}"); //R2 ~ 0,982 Console.ReadKey(); }
static void Main(string[] args) { var datasetTrain = new List <double[]>(); var datasetTest = new List <double[]>(); using (var zipToOpen = File.OpenRead(@"Dataset\mnist-in-csv.zip")) using (ZipArchive archive = new ZipArchive(zipToOpen, ZipArchiveMode.Read)) { using (var train = new StreamReader(archive.GetEntry("mnist_train.csv").Open())) { datasetTrain = train.ReadToEnd() .Split('\n') .Skip(1) .Select(p => p.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) .Where(p => p.Length > 0) .Select(p => p.Select(q => double.Parse(q)).ToArray()) .Select(p => p.Skip(1) //1 столбец - метка, пропускаем .Concat(MnistOneHotEncoding((int)p[0])) //переносим метку в конец массива с признаками, и сразу кодируем в one-hot-encoding .ToArray()) .ToList(); } using (var test = new StreamReader(archive.GetEntry("mnist_test.csv").Open())) { datasetTest = test.ReadToEnd() .Split('\n') .Skip(1) .Select(p => p.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) .Where(p => p.Length > 0) .Select(p => p.Select(q => double.Parse(q)).ToArray()) .Select(p => p.Skip(1) .Concat(MnistOneHotEncoding((int)p[0])) .ToArray()) .ToList(); } } var device = DeviceDescriptor.GPUDevice(0); int minibatchSize = 512; int inputDimension = 784; int epochs = 50; var model = new Sequential <double>(device, new[] { inputDimension }, inputName: "Input"); model.Add(new Residual2(784, new Tanh())); model.Add(new Residual2(300, new Tanh())); model.Add(new Dense(10, new Sigmoid())); var fitResult = model.Fit(datasetTrain, inputDimension, minibatchSize, new SquaredError(), new ClassificationError(), new Adam(0.1, 0.9, minibatchSize), epochs, shuffleSampleInMinibatchesPerEpoch: false, device: device, ruleUpdateLearningRate: (epoch, learningRate) => epoch % 10 == 0 ? 0.95 * learningRate : learningRate, actionPerEpoch: (epoch, loss, eval) => { Console.WriteLine($"Loss: {loss:F10} Eval: {eval:F3} Epoch: {epoch}"); if (eval < 0.05) //ошибка классфикации меньше 5%, сохраем модель в файл и заканчиваем обучение { model.SaveModel($"{model}.model", saveArchitectureDescription: false); return(true); } return(false); }, inputName: "Input"); Console.WriteLine($"Duration train: {fitResult.Duration}"); Console.WriteLine($"Epochs: {fitResult.EpochCount}"); Console.WriteLine($"Loss error: {fitResult.LossError}"); Console.WriteLine($"Eval error: {fitResult.EvaluationError}"); var metricsTrain = model .Evaluate(datasetTrain, inputDimension, device) .GetOneLabelClassificationMetrics(); Console.WriteLine($"---Train---"); Console.WriteLine($"Accuracy: {metricsTrain.Accuracy}"); metricsTrain.ClassesDistribution.ForEach(p => Console.WriteLine($"Class: {p.Index} | Precision: {p.Precision:F5} | Recall: {p.Recall:F5} | Fraction: {p.Fraction * 100:F3}")); var metricsTest = model .Evaluate(datasetTest, inputDimension, device) .GetOneLabelClassificationMetrics(); Console.WriteLine($"---Test---"); Console.WriteLine($"Accuracy: {metricsTest.Accuracy}"); metricsTest.ClassesDistribution.ForEach(p => Console.WriteLine($"Class: {p.Index} | Precision: {p.Precision:F5} | Recall: {p.Recall:F5} | Fraction: {p.Fraction * 100:F3}")); Console.Read(); }
static void Main(string[] args) { CNTKLib.SetFixedRandomSeed(0); //для воспроизводимости. т.к. инициализация весов в слоях нейросети //зависит от генератора случайных чисел CNTK //создаем симулированный датасет из последовательностей описывающих синусоиду var dataset = Enumerable.Range(1, 2000) .Select(p => Math.Sin(p / 100.0)) //уменьшаем шаг, чтобы синусоида была плавнее .Segment(10) //разбиваем синусоиду на сегменты по 10 элементов .Select(p => (featureSequence: p.Take(9).Select(q => new[] { q }).ToArray(), //задаем последовательность из 9 элементов, каждый элемент размерности 1 (может быть: 1, 2, 3...n) label: new[] { p[9] })) //задаем метку для последовательности размерности 1 (может быть: 1, 2, 3...n) .ToArray(); dataset.Split(0.7, out var train, out var test); int minibatchSize = 16; int epochCount = 300; int inputDimension = 1; var device = DeviceDescriptor.GPUDevice(0); var model = new Sequential <double>(device, new[] { inputDimension }, inputName: "Input"); model.Add(new LSTM(1, selfStabilizerLayer: new SelfStabilization())); model.Add(new Residual2(1, new Tanh())); //можно стыковать слои LSTM друг за другом как в комментарии ниже: //var model = new Sequential<double>(device, new[] { inputDimension }); //model.Add(new Dense(3, new Tanh())); //model.Add(new LSTM(10, isLastLstm: false)); //LSTM так же может быть первым слоем в модели //model.Add(new LSTM(5, isLastLstm: false)); //model.Add(new LSTM(2, selfStabilizerLayer: new SelfStabilization())); //model.Add(new Residual2(1, new Tanh())); //используется одна из нескольких перегрузок, которые способны обучать реккурентные сети var fitResult = model.Fit(features: train.Select(p => p.featureSequence).ToArray(), labels: train.Select(p => p.label).ToArray(), minibatchSize: minibatchSize, lossFunction: new AbsoluteError(), evaluationFunction: new AbsoluteError(), optimizer: new Adam(0.005, 0.9, minibatchSize), epochCount: epochCount, device: device, shuffleSampleInMinibatchesPerEpoch: true, ruleUpdateLearningRate: (epoch, learningRate) => learningRate % 50 == 0 ? 0.95 * learningRate : learningRate, actionPerEpoch: (epoch, loss, eval) => { Console.WriteLine($"Loss: {loss:F10} Eval: {eval:F3} Epoch: {epoch}"); if (loss < 0.05) //критерий остановки достигнут, сохраем модель в файл и заканчиваем обучение (приблизительно на 112 эпохе) { model.SaveModel($"{model}.model", saveArchitectureDescription: false); return(true); } return(false); }, inputName: "Input"); Console.WriteLine($"Duration train: {fitResult.Duration}"); Console.WriteLine($"Epochs: {fitResult.EpochCount}"); Console.WriteLine($"Loss error: {fitResult.LossError}"); Console.WriteLine($"Eval error: {fitResult.EvaluationError}"); var metricsTrain = model .Evaluate(train.Select(p => p.featureSequence), train.Select(p => p.label), device) .GetRegressionMetrics(); var metricsTest = model .Evaluate(test.Select(p => p.featureSequence), test.Select(p => p.label), device) .GetRegressionMetrics(); Console.WriteLine($"Train => MAE: {metricsTrain[0].MAE} RMSE: {metricsTrain[0].RMSE} R2: {metricsTrain[0].Determination}"); //R2 ~ 0,983 Console.WriteLine($"Test => MAE: {metricsTest[0].MAE} RMSE: {metricsTest[0].RMSE} R2: {metricsTest[0].Determination}"); //R2 ~ 0,982 Console.ReadKey(); }