Пример #1
0
        public static void SmallNetwork(List <Tuple <bool, float[]> > train, List <Tuple <bool, float[]> > test)
        {
            int vectorSize = train[0].Item2.Length;
            //Load train data
            var nTrain = ListToNDarrays(train, vectorSize);
            var nTest  = ListToNDarrays(test, vectorSize);

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(8, activation: "relu", input_shape: new Shape(vectorSize)));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(16, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            //model.Compile(optimizer:"adam", loss:"sparse_categorical_crossentropy", metrics: new string[] { "accuracy" });
            model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(
                nTrain.Item2,
                nTrain.Item1,
                batch_size: 8,
                epochs: 50,
                verbose: 1,
                validation_data: new NDarray[] { nTest.Item2, nTest.Item1 });

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("./models/sm_model.json", json);
            model.SaveWeight("./models/sm_model.h5");
        }
Пример #2
0
        public static void Train()
        {
            var model = new Sequential();

            // embedding layer
            model.Add(new Embedding(output, 100, input_length: 32));

            model.Add(new Conv1D(64, 3, padding: "causal", activation: "tanh"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));


            model.Add(new Conv1D(128, 3, activation: "relu", dilation_rate: 2, padding: "causal"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));

            model.Add(new Conv1D(256, 3, activation: "relu", dilation_rate: 4, padding: "causal"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));

            //model.Add(new Conv1D(256, 5, activation: "relu"));
            model.Add(new GlobalMaxPooling1D());

            model.Add(new Dense(256, activation: "relu"));
            model.Add(new Dense(output, activation: "softmax"));

            model.Compile(loss: "sparse_categorical_crossentropy", optimizer: new Adam());
            model.Summary();

            var mc      = new ModelCheckpoint("best_model.h5", monitor: "val_loss", mode: "min", save_best_only: true, verbose: 1);
            var history = model.Fit(train_x, train_y, batch_size: 32, epochs: 100, validation_split: 0.25f, verbose: 1, callbacks: new Callback[] { mc });

            model.Save("last_epoch.h5");
        }
Пример #3
0
        public static void FitMnist()
        {
            var model = new Sequential();

            model.Add(new Conv2D(32, kernelSize: new int[] { 3, 3 }, inputShape: new int[] { 28, 28, 1 }, activation: "relu"));
            model.Add(new Conv2D(64, kernelSize: new int[] { 3, 3 }, activation: "relu"));
            // model.Add(new MaxPooling1D(poolSize: 2));
            model.Add(new MaxPooling2D(poolSize: new int[] { 2, 2 }));
            model.Add(new Dropout(0.25));
            model.Add(new Flatten());
            model.Add(new Dense(128, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(10, activation: "softmax"));

            var optimizer = new SGD(lr: 0.01);

            model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" });

            var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open));
            var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open));

            xtrain = xtrain.Cast(DType.Float32);
            xtrain = Ops.Div(null, xtrain, 255f);

            ytrain = ytrain.Cast(DType.Float32);

            model.Fit(xtrain, ytrain, batchSize: 128, epochs: 12);

            var stream = new FileStream("c:/ttt/mnist.model", FileMode.OpenOrCreate, FileAccess.Write);

            stream.SetLength(0);

            model.Save(stream);
        }
Пример #4
0
        public static void Run()
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(64, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1);
            var logs    = history.HistoryLogs;
            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");

            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");
        }
Пример #5
0
        public static void FitMnistSimple()
        {
            var model = new Sequential();

            model.Add(new Dense(512, activation: "relu", inputShape: new int[] { 784 }));
            model.Add(new Dropout(0.2));
            model.Add(new Dense(512, activation: "relu"));
            model.Add(new Dropout(0.2));
            model.Add(new Dense(10, activation: "softmax"));

            var optimizer = new SGD(lr: 0.01);

            model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" });

            var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open));
            var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open));

            xtrain = xtrain.Cast(DType.Float32);
            xtrain = Ops.Div(null, xtrain, 255f);

            ytrain = ytrain.Cast(DType.Float32);

            model.Fit(xtrain, ytrain, batchSize: 128, epochs: 20);

            var stream = new FileStream("c:/ttt/mnist-simple.model", FileMode.OpenOrCreate, FileAccess.Write);

            stream.SetLength(0);

            model.Save(stream);
        }
Пример #6
0
        public static void Run()
        {
            //var ((x_train, y_train), (x_test, y_test)) = IMDB.LoadData(num_words: top_words);
            var((x_train, y_train), (x_test, y_test)) = LoadDataRussianLanguageToxicComments(
                trainCount: train_count, testCount: test_count, numWords: top_words, maxWords: max_words);

            //Не нужно массивы дополнять до 500 элементов, т.к. они уже размером в 500 элементов
            //x_train = SequenceUtil.PadSequences(x_train, maxlen: max_words);
            //x_test = SequenceUtil.PadSequences(x_test, maxlen: max_words);

            //Create model
            Sequential model = new Sequential();

            model.Add(new Embedding(top_words, 32, input_length: max_words));
            model.Add(new Conv1D(filters: 32, kernel_size: 3, padding: "same", activation: "relu"));
            model.Add(new MaxPooling1D(pool_size: 2));
            model.Add(new Flatten());
            model.Add(new Dense(250, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            model.Compile(loss: "binary_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" });
            model.Summary();

            // Fit the model
            model.Fit(x_train, y_train, validation_data: new NDarray[] { x_test, y_test },
                      epochs: 2 /*10*/, batch_size: 128, verbose: 2);
            // Final evaluation of the model
            var scores = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Accuracy: " + (scores[1] * 100));

            model.Save("model.h5");
            File.WriteAllText("model.json", model.ToJson());    //save model
            //model.SaveTensorflowJSFormat("./");   //error - Cannot perform runtime binding on a null reference
        }
Пример #7
0
        private static void MNISTTraining()
        {
            uint batchSize = 32;
            var  trainIter = new MXDataIter("MNISTIter")
                             .SetParam("image", "./mnist_data/train-images-idx3-ubyte")
                             .SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
                             .SetParam("batch_size", batchSize)
                             .SetParam("flat", 1)
                             .CreateDataIter();
            var valIter = new MXDataIter("MNISTIter")
                          .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
                          .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
                          .SetParam("batch_size", batchSize)
                          .SetParam("flat", 1)
                          .CreateDataIter();

            var model = new Sequential(new Shape(28 * 28), 10);

            model.AddHidden(new Dense(28 * 28, ActivationType.ReLU, new GlorotUniform()));
            model.AddHidden(new Dropout(0.25f));
            model.AddHidden(new Dense(28 * 28, ActivationType.ReLU, new GlorotUniform()));

            model.Compile(OptimizerType.SGD, LossType.CategorialCrossEntropy, "accuracy");
            model.Fit(trainIter, 10, batchSize, valIter);
        }
Пример #8
0
        static void Main(string[] args)
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(64, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1);

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");

            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");

            var result = loaded_model.Predict(x);

            Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString());
        }
Пример #9
0
        private static void ORGate()
        {
            DataFrame train_x = new DataFrame(4, 2);
            DataFrame train_y = new DataFrame(4, 1);

            train_x.AddData(0, 0);
            train_x.AddData(0, 1);
            train_x.AddData(1, 0);
            train_x.AddData(1, 1);

            train_y.AddData(0);
            train_y.AddData(1);
            train_y.AddData(1);
            train_y.AddData(1);

            DataFrameIter train = new DataFrameIter(train_x, train_y);

            Sequential model = new Sequential(new Shape(2), 1);

            model.AddHidden(new Dense(4, ActivationType.ReLU, new GlorotUniform()));

            model.Compile(OptimizerType.SGD, LossType.BinaryCrossEntropy, "accuracy");

            model.Fit(train, 100, 2);
            model.SaveModel(@"C:\Users\bdkadmin\Desktop\SSHKeys\");
        }
Пример #10
0
        public void Dense_CustomKRegularizerAndKInitParams()
        {
            NDarray x = np.array(new float[, ] {
                { 1, 0 }, { 1, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            var model = new Sequential();

            model.Add(new Dense(1, activation: "sigmoid", input_shape: new Shape(x.shape[1]), kernel_regularizer: new L1L2(1000, 2000), kernel_initializer: new Constant(100)));

            var modelAsJson = JsonConvert.DeserializeObject <dynamic>(model.ToJson());

            Assert.AreEqual("Sequential", modelAsJson.class_name.Value);
            int i = 0;

            while (modelAsJson.config.layers[i].config.kernel_initializer == null && i < 3)
            {
                i++;
            }
            Assert.AreEqual(100, modelAsJson.config.layers[i].config.kernel_initializer.config.value.Value);
            Assert.AreEqual("Constant", modelAsJson.config.layers[i].config.kernel_initializer.class_name.Value);

            Assert.AreEqual("L1L2", modelAsJson.config.layers[i].config.kernel_regularizer.class_name.Value);
            Assert.AreEqual(1000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l1.Value);
            Assert.AreEqual(2000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l2.Value);

            // Compile and train
            model.Compile(optimizer: new Adam(lr: 0.001F), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(x, y, batch_size: x.shape[0], epochs: 100, verbose: 0);
            Assert.AreEqual(2, model.GetWeights().Count);
        }
Пример #11
0
        // Performs convolutional neural network model training:
        // Incorporated parameters include relu and softmax
        // Adds fixed preprocessing layers and pooling: could use further development with exposed parameters
        private static Sequential ProcessCnnModel(Shape input_shape, NDarray x_train, NDarray y_train, NDarray x_test, NDarray y_test,
                                                  int num_classes, string logname, Config config)
        {
            // Build CNN model
            Sequential model = new Sequential();

            model.Add(new Conv2D(16, kernel_size: (3, 3).ToTuple(), activation: "relu", input_shape: input_shape));
            model.Add(new Conv2D(32, (3, 3).ToTuple(), activation: "relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Flatten());

            Callback[] callbacks = GetCallbacks(config.isEarlyStop, logname);

            AddNodes(model, config);

            model.Add(new Dense(num_classes, activation: "softmax"));

            // Compile with loss, metrics and optimizer
            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new Adam(lr: (float)config.LearnRate, decay: (float)config.LearnDecay), metrics: new[] { "accuracy" });

            // Train the model
            model.Fit(x_train, y_train, batch_size: config.Batch, epochs: config.Epochs, verbose: 1,
                      validation_data: new[] { x_test, y_test }, callbacks: callbacks);

            return(model);
        }
Пример #12
0
        public void TrainXOR()
        {
            try {
                //Load train data
                float[,] testX = new float[, ] {
                    { 0, 1 },
                };
                float[,] x = new float[, ] {
                    { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
                };
                float[] y = new float[] { 0, 1, 1, 0 };

                //Build sequential model
                var model = new Sequential();
                model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
                model.Add(new Dense(32, activation: "relu"));
                model.Add(new Dropout(0.1d));
                model.Add(new Dense(1, activation: "sigmoid"));

                //Compile and train
                var optimizer = new Adam();
                model.Compile(optimizer: optimizer, loss: "mse", metrics: new string[] { "accuracy" });
                model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1);

                float[] predicts;
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();

                Stopwatch watch = new Stopwatch();
                watch.Restart();
                for (int i = 0; i < 5; ++i)
                {
                    predicts = model.PredictOnBatch(testX).GetData <float>();
                }
                watch.Stop();
                string batchMs = watch.GetElapsedMilliseconds().ToString();
                watch.Restart();
                for (int i = 0; i < 5; ++i)
                {
                    predicts = model.Predict(testX).GetData <float>();
                }
                watch.Stop();

                //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                //	MainWindow.Instance.DebugTextBox.Text = batchMs + " / " + watch.GetElapsedMilliseconds().ToString();
                //}));
            } catch (Exception ex) {
                //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                //	MainWindow.Instance.DebugTextBox.Text = ex.ToString();
                //}));
            }
        }
Пример #13
0
        /// <summary>
        /// Teaches an agent whose model is represented by a direct distribution network (non-recurrent). Used when the model operates only with the current state of the environment, not taking into account previous states.
        /// </summary>
        /// <param name="agent">Agent for training, a network of a given architecture</param>
        /// <param name="iterationCount">Number of learning iterations (eras)</param>
        /// <param name="rolloutCount">The number of runs (in the case of a game - passing the level until the end of the game <seealso cref="Environment.IsTerminated"/> ), which will be completed before the weights are updated.
        /// It can be interpreted as the amount of training data for one era.</param>
        /// <param name="minibatchSize">Minibatch size for training</param>
        /// <param name="actionPerIteration">The arbitrary action that each epoch requires. Allows you to interrupt the training process. Input parameters: era, loss error, evaluation error.
        /// Weekend: true - interrupt the training process, false - continue the training.
        /// Used for logging, displaying the learning process, saving intermediate model checkpoints, etc.</param>
        /// <param name="gamma">Reward attenuation coefficient (reward) when calculating Discounted reward</param>
        /// <returns></returns>
        public Sequential <T> Teach(Sequential <T> agent, int iterationCount, int rolloutCount, int minibatchSize, Func <int, double, double, bool> actionPerIteration = null, double gamma = 0.99)
        {
            for (int iteration = 0; iteration < iterationCount; iteration++)
            {
                var data = new LinkedList <(int rollout, int actionNumber, T[] state, T[] action, T reward)>();
                for (int rolloutNumber = 0; rolloutNumber < rolloutCount; rolloutNumber++)
                {
                    int actionNumber = 0;
                    while (!Environment.IsTerminated)
                    {
                        var currentState = Environment.GetCurrentState <T>();
                        var action       = agent.Predict(currentState, Device);
                        var reward       = Environment.PerformAction(action);
                        data.AddLast((rolloutNumber, ++actionNumber, currentState, action, reward));
                    }
                    Environment.Reset();
                }
                var discountedRewards = new T[data.Count];
                foreach (var rollout in data.GroupBy(p => p.rollout))
                {
                    var steps = rollout.ToList();
                    steps.Sort((a, b) => a.actionNumber > b.actionNumber ? 1 : a.actionNumber < b.actionNumber ? -1 : 0); //ascending actionNumber
                    for (int i = 0; i < steps.Count; i++)
                    {
                        var remainingRewards = steps.GetRange(i, steps.Count - i)
                                               .Select(p => Environment.HasRewardOnlyForRollout ? steps[steps.Count - 1].reward : p.reward)
                                               .ToArray();
                        discountedRewards[i] = CalculateDiscountedReward(remainingRewards, gamma);
                    }
                }

                var features = data.Select(p => p.state);
                var labels   = data.Zip(discountedRewards, (d, reward) => Multiply(d.action, reward));
                var dataset  = features.Zip(labels, (f, l) => f.Concat(l).ToArray()).ToArray();
                var inputDim = features.FirstOrDefault().Length;

                var fitResult = agent.Fit(dataset,
                                          inputDim,
                                          minibatchSize,
                                          GetLoss()[0],
                                          GetEvalLoss()[0],
                                          GetOptimizer()[0],
                                          1,
                                          false,
                                          Device);
                data.Clear();
                var needStop = actionPerIteration?.Invoke(iteration, fitResult.LossError, fitResult.EvaluationError);
                if (needStop.HasValue && needStop.Value)
                {
                    break;
                }
            }
            return(agent);
        }
        public static void Run()
        {
            //Load IMDb dataset
            var((x_train, y_train), (x_test, y_test)) = IMDB.LoadData();

            var X = np.concatenate(new NDarray[] { x_train, x_test }, axis: 0);
            var Y = np.concatenate(new NDarray[] { y_train, y_test }, axis: 0);

            Console.WriteLine("Shape of X: " + X.shape);
            Console.WriteLine("Shape of Y: " + Y.shape);

            //We can get an idea of the total number of unique words in the dataset.
            Console.WriteLine("Number of words: ");
            var hstack = np.hstack(new NDarray[] { X });
            //var unique = hstack.unique();
            //Console.WriteLine(np.unique(np.hstack(new NDarray[] { X })).Item1);

            // Load the dataset but only keep the top n words, zero the rest
            int top_words = 1000;// 5000;

            ((x_train, y_train), (x_test, y_test)) = IMDB.LoadData(num_words: top_words);

            int max_words = 500;

            x_train = SequenceUtil.PadSequences(x_train, maxlen: max_words);
            x_test  = SequenceUtil.PadSequences(x_test, maxlen: max_words);

            //Create model
            Sequential model = new Sequential();

            model.Add(new Embedding(top_words, 32, input_length: max_words));
            model.Add(new Conv1D(filters: 32, kernel_size: 3, padding: "same", activation: "relu"));
            model.Add(new MaxPooling1D(pool_size: 2));
            model.Add(new Flatten());
            model.Add(new Dense(250, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            model.Compile(loss: "binary_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" });
            model.Summary();

            // Fit the model
            model.Fit(x_train, y_train, validation_data: new NDarray[] { x_test, y_test },
                      epochs: 1 /*10*/, batch_size: 128, verbose: 2);
            // Final evaluation of the model
            var scores = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Accuracy: " + (scores[1] * 100));

            model.Save("model.h5");
            File.WriteAllText("model.json", model.ToJson());    //save model
            //model.SaveTensorflowJSFormat("./");   //error - Cannot perform runtime binding on a null reference
        }
Пример #15
0
        static void Main(string[] args)
        {
            NDArray x = new NDArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }).Reshape(4, 2);
            NDArray y = new NDArray(new float[] { 0, 1, 1, 0 }).Reshape(4, 1);

            var model = new Sequential();

            model.Add(new Dense(4, "relu", input_dim: 2));
            model.Add(new Dense(4, "relu"));
            model.Add(new Dense(1));

            model.Compile(new SGD(), "binary_crossentropy", new string[] { "binary_accuracy" });
            model.Fit(x, y, 2, 100);
        }
Пример #16
0
        public static void MainFunc(string[] args)
        {
            Console.WriteLine("Execution begins...");

            Console.WriteLine("Loading data...");
            var train =
                File.ReadAllLines(@"c:\data\mnist\train.csv")
                .Skip(1)
                .Select(x => x.Split(',').Select(double.Parse).ToArray())
                .Take(3000)
                .ToArray();
            var test =
                File.ReadAllLines(@"c:\data\mnist\train.csv")
                .Skip(1)
                .Select(x => x.Split(',').Select(double.Parse).ToArray())
                .Skip(3000)
                .Take(500)
                .ToArray();

            string envPythonHome = "C:\\winapp\\Miniconda3\\envs\\py36\\";
            string envPythonLib  = envPythonHome + "Lib;" + envPythonHome + "Lib\\site-packages";

            Environment.SetEnvironmentVariable("PATH", @"c:\winapp\Miniconda3\envs\py36;c:\winapp\Miniconda3\envs\py36\Library\mingw-w64\bin;c:\winapp\Miniconda3\envs\py36\Library\usr\bin;c:\winapp\Miniconda3\envs\py36\Library\bin;c:\winapp\Miniconda3\envs\py36\Scripts;c:\winapp\Miniconda3\envs\py36\bin;c:\winapp\Miniconda3\condabin;c:\winapp\miniconda\bin;c:\winapp\miniconda\scripts", EnvironmentVariableTarget.Process);
            PythonEngine.PythonHome = envPythonHome;

            Console.WriteLine("One-hot encoding...");
            var train_labels = Util.ToCategorical(np.array(train.Select(x => (int)x[0]).ToArray()));
            var test_labels  = Util.ToCategorical(np.array(test.Select(x => (int)x[0]).ToArray()));

            Console.WriteLine("Normalizing...");
            var train_norm = np.array(flatten(train)).reshape(-1, 785)[":,1:"] / 255.0;
            var test_norm  = np.array(flatten(test)).reshape(-1, 785)[":,1:"] / 255.0;

            var model = new Sequential();

            model.Add(new Dense(100, 784, activation: "relu"));
            model.Add(new Dense(10, activation: "softmax"));
            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new Adadelta(), metrics: new string[] { "accuracy" });
            model.Fit(train_norm, train_labels,
                      batch_size: 32,
                      epochs: 5,
                      validation_data: new NDarray[] { test_norm, test_labels });

            Console.WriteLine("Thanks for all the fish");
            Console.ReadKey();
        }
 private Tuple<double[], double[], double[]> TrainKerasModel(ModelSettings settings)
 {
     var data = GetData();
     var xTrain = data.Item1.Take((int)Math.Floor(0.8 * data.Item1.Count));
     var yTrain = data.Item2.Take((int)Math.Floor(0.8 * data.Item2.Count));
     var xVal = data.Item1.TakeLast(data.Item1.Count - (int)Math.Floor(0.8 * data.Item1.Count));
     var yVal = data.Item2.TakeLast(data.Item2.Count - (int)Math.Floor(0.8 * data.Item2.Count));
     var xValidation = xVal.Take(xVal.Count() / 2);
     var yValidation = yVal.Take(yVal.Count() / 2);
     var history = _model.Fit(np.array(xTrain), np.array(yTrain), epochs: settings.NoEpochs, validation_data: new NDarray[]
         { np.array(xValidation), np.array(yValidation)});
     var accuracies = history.HistoryLogs["accuracy"];
     var valAccuracies = history.HistoryLogs["val_accuracy"];
     var epochs = new double[history.Epoch.Length];
     for (var i = 0; i < history.Epoch.Length; i++)
         epochs[i] = history.Epoch[i] + 1;
     return new Tuple<double[], double[], double[]>(epochs, accuracies, valAccuracies);
 }
Пример #18
0
        private static void train()
        {
            try
            {
                Console.WriteLine("Начало тренировки нейросети");
                var rows = File.ReadAllLines(FILE_PATH).Skip(1).Take(TRAIN_ROWS_COUNT).ToList();

                Console.WriteLine("Заполняем датасет данными");
                float[,] inputArray  = new float[rows.Count, INPUT_LAYER_SIZE];
                float[,] outputArray = new float[rows.Count, RESULT_LAYER_SIZE];
                for (int i = 0; i < rows.Count; i++)
                {
                    Console.WriteLine("Итерация {0} из {1}", i + 1, TRAIN_ROWS_COUNT);
                    var values        = rows[i].Split(',');
                    var correctNumber = byte.Parse(values[0]);

                    byte[] inputValues = values.Skip(1).Select(x => byte.Parse(x)).ToArray();
                    for (int j = 0; j < inputValues.Length; j++)
                    {
                        inputArray[i, j] = inputValues[j];
                    }

                    outputArray[i, correctNumber] = 1;
                }

                var input = new NDarray(inputArray);
                input  = input.astype(np.float32);
                input /= 255;
                var output = new NDarray(outputArray);

                Console.WriteLine("Запускаем обучение");
                model.Fit(input, output, batch_size: batchSize, epochs: epochs, verbose: 2);

                Console.WriteLine("Сохраняем модель");
                File.WriteAllText("model.json", model.ToJson());
                model.SaveWeight("model.h5");
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
                throw;
            }
        }
Пример #19
0
        public static void Run()
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(4, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(1));

            var stoploss = Callback.Custom("AccHistory", "AccHistory.py");

            //Compile and train
            model.Compile(optimizer: new SGD(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            var history = model.Fit(x, y, batch_size: 2, epochs: 10, verbose: 1, callbacks: new Callback[] { stoploss });
        }
Пример #20
0
        public void Train()
        {
            // Build CNN model
            _model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(),
                                  padding: Settings.PaddingMode,
                                  input_shape: new Shape(Settings.ImgWidth, Settings.ImgHeight, Settings.Channels)));
            _model.Add(new Activation(Settings.ActivationFunction));
            _model.Add(new Conv2D(32, (3, 3).ToTuple()));
            _model.Add(new Activation(Settings.ActivationFunction));
            _model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            _model.Add(new Dropout(0.25));

            _model.Add(new Conv2D(64, kernel_size: (3, 3).ToTuple(),
                                  padding: Settings.PaddingMode));
            _model.Add(new Activation(Settings.ActivationFunction));
            _model.Add(new Conv2D(64, (3, 3).ToTuple()));
            _model.Add(new Activation(Settings.ActivationFunction));
            _model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            _model.Add(new Dropout(0.25));

            _model.Add(new Flatten());
            _model.Add(new Dense(Settings.FullyConnectedNodes));
            _model.Add(new Activation(Settings.ActivationFunction));
            _model.Add(new Dropout(0.5));
            _model.Add(new Dense(_dataset.NumberClasses));
            _model.Add(new Softmax());

            _model.Compile(loss: Settings.LossFunction,
                           optimizer: Settings.Optimizer,
                           metrics: new string[] { Settings.Accuracy });

            _model.Fit(_dataset.TrainX, _dataset.TrainY,
                       batch_size: Settings.BatchSize,
                       epochs: Settings.Epochs,
                       validation_data: new NDarray[] { _dataset.ValidationX, _dataset.ValidationY });

            var score = _model.Evaluate(_dataset.ValidationX, _dataset.ValidationY, verbose: 0);

            Console.WriteLine("Test loss:" + score[0]);
            Console.WriteLine("Test accuracy:" + score[1]);
        }
Пример #21
0
        public static void BuildAndTrain()
        {
            //Model to hold the neural network architecture which in this case is WaveNet
            var model = new Sequential();

            // Starts with embedding layer
            model.Add(new Embedding(output, 100, input_length: 32));

            model.Add(new Conv1D(64, 3, padding: "causal", activation: "tanh"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));

            model.Add(new Conv1D(128, 3, activation: "relu", dilation_rate: 2, padding: "causal"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));

            model.Add(new Conv1D(256, 3, activation: "relu", dilation_rate: 4, padding: "causal"));
            model.Add(new Dropout(0.2));
            model.Add(new MaxPooling1D(2));

            //model.Add(new Conv1D(256, 5, activation: "relu"));
            model.Add(new GlobalMaxPooling1D());

            model.Add(new Dense(256, activation: "relu"));
            model.Add(new Dense(output, activation: "softmax"));

            // Compile with Adam optimizer
            model.Compile(loss: "sparse_categorical_crossentropy", optimizer: new Adam());
            model.Summary();

            // Callback to store the best trained model
            var mc = new ModelCheckpoint("best_model.h5", monitor: "val_loss", mode: "min", save_best_only: true, verbose: 1);

            //Method to actually train the model for 100 iteration
            var history = model.Fit(train_x, train_y, batch_size: 32, epochs: 100, validation_split: 0.25f, verbose: 1, callbacks: new Callback[] { mc });

            // Save the final trained model which we are going to use for prediction
            model.Save("last_epoch.h5");
        }
        public static void Run()
        {
            //Load train data
            NDarray dataset = np.loadtxt(fname: "C:/Project/LSTMCoreApp/pima-indians-diabetes.data.csv", delimiter: ",");
            var     X       = dataset[":,0: 8"];
            var     Y       = dataset[":, 8"];

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(12, input_dim: 8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(X, Y, batch_size: 10, epochs: 150, verbose: 1);

            //Evaluate model
            var scores = model.Evaluate(X, Y, verbose: 1);

            Console.WriteLine("Accuracy: {0}", scores[1] * 100);

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");
            Console.WriteLine("Saved model to disk");
            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");
            Console.WriteLine("Loaded model from disk");

            loaded_model.Compile(optimizer: "rmsprop", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            scores = model.Evaluate(X, Y, verbose: 1);
            Console.WriteLine("Accuracy: {0}", scores[1] * 100);
        }
Пример #23
0
        public static bool TrainNetwork(string path)
        {
            Keras.Keras.DisablePySysConsoleLog = true;
            try
            {
                var records = ReadCsv(path);

                NDArray data = records.Select(x =>
                                              new[] {
                    x.Pregnancies,
                    x.Glucose,
                    x.BloodPressure,
                    x.SkinThickness,
                    x.Insulin,
                    x.BMI,
                    x.DiabetesPedigreeFunction,
                    x.Age
                }).ToArray();
                NDArray outcome = records.Select(x => (double)x.Outcome).ToArray();

                NDarray datanew    = data.ToNumpyNET();
                NDarray outcomenew = outcome.ToNumpyNET();

                var model = new Sequential();
                model.Add(new Dense(7, activation: "tanh", input_dim: 8, kernel_initializer: "uniform"));
                model.Add(new Dense(6, activation: "tanh", kernel_initializer: "uniform"));
                model.Add(new Dense(1, activation: "sigmoid", kernel_initializer: "uniform"));

                model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
                model.Fit(datanew, outcomenew, batch_size: 10, epochs: 800, verbose: 1);

                Utilities.Cache.Model = model;
                return(true);
            }catch (Exception e)
            {
                Console.WriteLine(e.Message);
                return(false);
            }
        }
Пример #24
0
        static void Main(string[] args)
        {
            // the data, split between train and test sets
            var((x_train, y_train), (x_test, y_test)) = BostonHousing.LoadData();

            //Explore the data structure using basic C# Commands
            WriteLine(value: $"Type of the Dataset: {(y_train).GetType()}");
            WriteLine($"Shape of Training Data : {x_train.shape}");
            WriteLine($"Shape of training label: {y_train.shape}");
            WriteLine($"Shape of Testing Data : {x_test.GetType()}");
            WriteLine($"Shape of testing Labels : {y_test.shape}");

            //Check the Contents of the training dataset Using the slicing notation
            WriteLine(x_train[":3,:"]);

            // Extract last 100 rows from the training data to create validation datasets
            var x_val = x_train["300:,"];
            var y_val = y_train["300:,"];

            //Define the model architecture
            var model = new Sequential();

            model.Add(new Dense(13, kernel_initializer: "normal", activation: "relu", input_dim: 13));
            model.Add(new Dense(6, input_dim: 6, activation: "relu", kernel_initializer: "normal"));
            model.Add(new Dense(1, input_dim: 1, kernel_initializer: "normal"));

            //Compile model
            model.Compile(loss: "mean_squared_error", optimizer: "adam", metrics: new string[] { "mean_absolute_percentage_error" });

            //Train the Model
            model.Fit(x_train, y_train, batch_size: 32, epochs: 10, validation_data: new NDarray[] { x_val, y_val });

            var results = model.Evaluate(x_test, y_test, verbose: 0);

            WriteLine("\n\n\n\n===========================================================");
            WriteLine($"Loss : {results[0]}");
            WriteLine($"Mean Absolute Percentage Error : {results[1]}");
        }
Пример #25
0
        public static void Run()
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(64, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            var lossHistory = Callback.Custom("LossHistory", "LossHistory.py");

            //Compile and train
            model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            var history = model.Fit(x, y, batch_size: 2, epochs: 10, verbose: 1, callbacks: new Callback[] { lossHistory });

            var customLosses = lossHistory.Get <double[]>("losses");
        }
Пример #26
0
        static void Main(string[] args)
        {
            //Mock data fix the file address
            NDarray dataset = Numpy.np.loadtxt(fname: "C:\\Natan\\csharp\\trial11\\pima-indians-diabetes.csv", delimiter: ",");
            var     X       = dataset[":,0: 8"];
            var     Y       = dataset[":, 8"];
            var     model   = new Sequential();

            model.Add(new Dense(12, input_dim: 8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));
            model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(X, Y, batch_size: 10, epochs: 150, verbose: 1);
            model.Save("modelAA.h5");
            double[] scores = model.Evaluate(X, Y);
            foreach (double sc in scores)
            {
                Console.WriteLine(sc);
                //Console.WriteLine(scmodel..metrics_names[1], scores[1] * 100))
            }

            Console.WriteLine("Hello World! we learned");
            Console.ReadKey();
        }
Пример #27
0
        public static void Run()
        {
            int batch_size  = 128;
            int num_classes = 10;
            int epochs      = 12;

            // input image dimensions
            int img_rows = 28, img_cols = 28;

            Shape input_shape = null;

            // the data, split between train and test sets
            var((x_train, y_train), (x_test, y_test)) = MNIST.LoadData();

            if (K.ImageDataFormat() == "channels_first")
            {
                x_train     = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols);
                x_test      = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols);
                input_shape = (1, img_rows, img_cols);
            }
            else
            {
                x_train     = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1);
                x_test      = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1);
                input_shape = (img_rows, img_cols, 1);
            }

            x_train  = x_train.astype(np.float32);
            x_test   = x_test.astype(np.float32);
            x_train /= 255;
            x_test  /= 255;
            Console.WriteLine("x_train shape: " + x_train.shape);
            Console.WriteLine(x_train.shape[0] + " train samples");
            Console.WriteLine(x_test.shape[0] + " test samples");

            // convert class vectors to binary class matrices
            y_train = Util.ToCategorical(y_train, num_classes);
            y_test  = Util.ToCategorical(y_test, num_classes);

            // Build CNN model
            var model = new Sequential();

            model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(),
                                 activation: "relu",
                                 input_shape: input_shape));
            model.Add(new Conv2D(64, (3, 3).ToTuple(), activation: "relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Dropout(0.25));
            model.Add(new Flatten());
            model.Add(new Dense(128, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(num_classes, activation: "softmax"));

            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new Adadelta(), metrics: new string[] { "accuracy" });

            model.Fit(x_train, y_train,
                      batch_size: batch_size,
                      epochs: epochs,
                      verbose: 1,
                      validation_data: new NDarray[] { x_test, y_test });


            var score = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Test loss:" + score[0]);
            Console.WriteLine("Test accuracy:" + score[1]);
        }
Пример #28
0
        public static void Run()
        {
            int batch_size  = 128;
            int num_classes = 10;
            int epochs      = 100;

            // the data, split between train and test sets
            var((x_train, y_train), (x_test, y_test)) = Cifar10.LoadData();

            Console.WriteLine("x_train shape: " + x_train.shape);
            Console.WriteLine(x_train.shape[0] + " train samples");
            Console.WriteLine(x_test.shape[0] + " test samples");

            // convert class vectors to binary class matrices
            y_train = Util.ToCategorical(y_train, num_classes);
            y_test  = Util.ToCategorical(y_test, num_classes);

            // Build CNN model
            var model = new Sequential();

            model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(),
                                 padding: "same",
                                 input_shape: new Shape(32, 32, 3)));
            model.Add(new Activation("relu"));
            model.Add(new Conv2D(32, (3, 3).ToTuple()));
            model.Add(new Activation("relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Dropout(0.25));

            model.Add(new Conv2D(64, kernel_size: (3, 3).ToTuple(),
                                 padding: "same"));
            model.Add(new Activation("relu"));
            model.Add(new Conv2D(64, (3, 3).ToTuple()));
            model.Add(new Activation("relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Dropout(0.25));

            model.Add(new Flatten());
            model.Add(new Dense(512));
            model.Add(new Activation("relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(num_classes));
            model.Add(new Activation("softmax"));

            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new RMSprop(lr: 0.0001f, decay: 1e-6f), metrics: new string[] { "accuracy" });

            x_train  = x_train.astype(np.float32);
            x_test   = x_test.astype(np.float32);
            x_train /= 255;
            x_test  /= 255;

            model.Fit(x_train, y_train,
                      batch_size: batch_size,
                      epochs: epochs,
                      verbose: 1,
                      validation_data: new NDarray[] { x_test, y_test },
                      shuffle: true);

            //Save model and weights
            //string model_path = "./model.json";
            //string weight_path = "./weights.h5";
            //string json = model.ToJson();
            //File.WriteAllText(model_path, json);
            //model.SaveWeight(weight_path);
            model.Save("model.h5");
            model.SaveTensorflowJSFormat("./");

            //Score trained model.
            var score = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Test loss:" + score[0]);
            Console.WriteLine("Test accuracy:" + score[1]);
        }
Пример #29
0
        static void Main(string[] args)
        {
            int batch_size  = 128; //Training batch size
            int num_classes = 10;  //No. of classes
            int epochs      = 12;  //No. of epoches we will train

            // input image dimensions
            int img_rows = 28, img_cols = 28;

            // Declare the input shape for the network
            Shape input_shape = null;

            // Load the MNIST dataset into Numpy array
            var((x_train, y_train), (x_test, y_test)) = MNIST.LoadData();

            //Check if its channel fist or last and rearrange the dataset accordingly
            if (K.ImageDataFormat() == "channels_first")
            {
                x_train     = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols);
                x_test      = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols);
                input_shape = (1, img_rows, img_cols);
            }
            else
            {
                x_train     = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1);
                x_test      = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1);
                input_shape = (img_rows, img_cols, 1);
            }

            //Normalize the input data
            x_train  = x_train.astype(np.float32);
            x_test   = x_test.astype(np.float32);
            x_train /= 255;
            x_test  /= 255;
            Console.WriteLine("x_train shape: " + x_train.shape);
            Console.WriteLine(x_train.shape[0] + " train samples");
            Console.WriteLine(x_test.shape[0] + " test samples");

            // Convert class vectors to binary class matrices
            y_train = Util.ToCategorical(y_train, num_classes);
            y_test  = Util.ToCategorical(y_test, num_classes);

            // Build CNN model
            var model = new Sequential();

            model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(),
                                 activation: "relu",
                                 input_shape: input_shape));
            model.Add(new Conv2D(64, (3, 3).ToTuple(), activation: "relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Dropout(0.25));
            model.Add(new Flatten());
            model.Add(new Dense(128, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(num_classes, activation: "softmax"));

            //Compile with loss, metrics and optimizer
            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new Adadelta(), metrics: new string[] { "accuracy" });

            //Train the model
            model.Fit(x_train, y_train,
                      batch_size: batch_size,
                      epochs: epochs,
                      verbose: 1,
                      validation_data: new NDarray[] { x_test, y_test });


            //Score the model for performance
            var score = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Test loss:" + score[0]);
            Console.WriteLine("Test accuracy:" + score[1]);

            // Save the model to HDF5 format which can be loaded later or ported to other application
            model.Save("model.h5");
            // Save it to Tensorflow JS format and we will test it in browser.
            var v = K.Instance;

            //model.SaveTensorflowJSFormat(@"C:\_temp\");
            //model.SaveOnnx(@"C:\_temp\");
            Console.ReadLine();
        }
Пример #30
0
        private (History, Sequential, Dictionary <string, int>) LearnNeuralNetwork(string trainCsvPath, int num_words, int max_news_len, int nb_classes)
        {
            NDarray x_train = null;
            NDarray y_train = null;

            var trainCSV          = Frame.ReadCsv(trainCsvPath, false, separators: ";");
            var trainYFloat       = trainCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <float>("Column1")); }).ValuesAll.ToList();
            var trainXString      = trainCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <string>("Column2")); }).ValuesAll.ToList();
            var trainXStringArray = trainXString.ToArray();

            //x_train = np.array(new float[,] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } });
            y_train = np.array(trainYFloat.ToArray());

            y_train = Util.ToCategorical(y_train, nb_classes);

            string[][] tokens = trainXStringArray.Tokenize();

            var dictionaryLikeIMDB = FrequencyDictionary.Learn(tokens);

            var bow = FrequencyDictionary.Transform(tokens, dictionaryLikeIMDB);

            // Create a new TF-IDF with options:

            /*var codebook = new Accord.MachineLearning.TFIDF()
             * {
             *  Tf = TermFrequency.Log,
             *  Idf = InverseDocumentFrequency.Default
             * };
             *
             * codebook.Learn(tokens);
             *
             * double[][] bow = codebook.Transform(tokens);*/

            var list = new List <NDarray>();

            foreach (var item in bow)
            {
                //var newItem = item.Take(max_news_len).ToArray();
                //var ndarray = np.array(newItem);
                var ndarray = np.array(item);
                list.Add(ndarray);
            }

            var sequences = np.array(list);

            //x_train = SequenceUtil.PadSequences(sequences, maxlen: max_news_len, dtype: "double");
            x_train = SequenceUtil.PadSequences(sequences, maxlen: max_news_len);

            var model = new Sequential();

            model.Add(new Embedding(num_words, 32, null, null, null, null, false, max_news_len));
            model.Add(new GRU(138));//16
            model.Add(new Dense(12, activation: "softmax"));

            model.Compile(optimizer: "adam", loss: "categorical_crossentropy", metrics: new string[] { "accuracy" });

            model.Summary();

            var model_gru_save_path     = "best_model_gru.h5";
            var checkpoint_callback_gru = new ModelCheckpoint(
                model_gru_save_path,
                "val_accuracy",
                1,
                true
                );

            var callbacks = new List <Callback>()
            {
                checkpoint_callback_gru
            };

            float validation_split = (float)0.1;

            var history_gru = model.Fit(x_train,
                                        y_train,
                                        batch_size: 128,
                                        epochs: 10,
                                        validation_split: validation_split,
                                        callbacks: callbacks.ToArray());

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);

            return(history_gru, model, dictionaryLikeIMDB);
        }