Пример #1
0
        public static void Run()
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(64, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1);
            var logs    = history.HistoryLogs;
            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");

            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");
        }
Пример #2
0
        static void Main(string[] args)
        {
            //Load train data
            NDarray x = np.array(new float[, ] {
                { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
            model.Add(new Dense(64, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1);

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");

            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");

            var result = loaded_model.Predict(x);

            Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString());
        }
Пример #3
0
        public static void SmallNetwork(List <Tuple <bool, float[]> > train, List <Tuple <bool, float[]> > test)
        {
            int vectorSize = train[0].Item2.Length;
            //Load train data
            var nTrain = ListToNDarrays(train, vectorSize);
            var nTest  = ListToNDarrays(test, vectorSize);

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(8, activation: "relu", input_shape: new Shape(vectorSize)));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(16, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            //model.Compile(optimizer:"adam", loss:"sparse_categorical_crossentropy", metrics: new string[] { "accuracy" });
            model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(
                nTrain.Item2,
                nTrain.Item1,
                batch_size: 8,
                epochs: 50,
                verbose: 1,
                validation_data: new NDarray[] { nTest.Item2, nTest.Item1 });

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("./models/sm_model.json", json);
            model.SaveWeight("./models/sm_model.h5");
        }
Пример #4
0
        public static void Run()
        {
            //var ((x_train, y_train), (x_test, y_test)) = IMDB.LoadData(num_words: top_words);
            var((x_train, y_train), (x_test, y_test)) = LoadDataRussianLanguageToxicComments(
                trainCount: train_count, testCount: test_count, numWords: top_words, maxWords: max_words);

            //Не нужно массивы дополнять до 500 элементов, т.к. они уже размером в 500 элементов
            //x_train = SequenceUtil.PadSequences(x_train, maxlen: max_words);
            //x_test = SequenceUtil.PadSequences(x_test, maxlen: max_words);

            //Create model
            Sequential model = new Sequential();

            model.Add(new Embedding(top_words, 32, input_length: max_words));
            model.Add(new Conv1D(filters: 32, kernel_size: 3, padding: "same", activation: "relu"));
            model.Add(new MaxPooling1D(pool_size: 2));
            model.Add(new Flatten());
            model.Add(new Dense(250, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            model.Compile(loss: "binary_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" });
            model.Summary();

            // Fit the model
            model.Fit(x_train, y_train, validation_data: new NDarray[] { x_test, y_test },
                      epochs: 2 /*10*/, batch_size: 128, verbose: 2);
            // Final evaluation of the model
            var scores = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Accuracy: " + (scores[1] * 100));

            model.Save("model.h5");
            File.WriteAllText("model.json", model.ToJson());    //save model
            //model.SaveTensorflowJSFormat("./");   //error - Cannot perform runtime binding on a null reference
        }
Пример #5
0
        public void Dense_CustomKRegularizerAndKInitParams()
        {
            NDarray x = np.array(new float[, ] {
                { 1, 0 }, { 1, 1 }, { 1, 0 }, { 1, 1 }
            });
            NDarray y = np.array(new float[] { 0, 1, 1, 0 });

            var model = new Sequential();

            model.Add(new Dense(1, activation: "sigmoid", input_shape: new Shape(x.shape[1]), kernel_regularizer: new L1L2(1000, 2000), kernel_initializer: new Constant(100)));

            var modelAsJson = JsonConvert.DeserializeObject <dynamic>(model.ToJson());

            Assert.AreEqual("Sequential", modelAsJson.class_name.Value);
            int i = 0;

            while (modelAsJson.config.layers[i].config.kernel_initializer == null && i < 3)
            {
                i++;
            }
            Assert.AreEqual(100, modelAsJson.config.layers[i].config.kernel_initializer.config.value.Value);
            Assert.AreEqual("Constant", modelAsJson.config.layers[i].config.kernel_initializer.class_name.Value);

            Assert.AreEqual("L1L2", modelAsJson.config.layers[i].config.kernel_regularizer.class_name.Value);
            Assert.AreEqual(1000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l1.Value);
            Assert.AreEqual(2000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l2.Value);

            // Compile and train
            model.Compile(optimizer: new Adam(lr: 0.001F), loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(x, y, batch_size: x.shape[0], epochs: 100, verbose: 0);
            Assert.AreEqual(2, model.GetWeights().Count);
        }
        public static void Run()
        {
            //Load IMDb dataset
            var((x_train, y_train), (x_test, y_test)) = IMDB.LoadData();

            var X = np.concatenate(new NDarray[] { x_train, x_test }, axis: 0);
            var Y = np.concatenate(new NDarray[] { y_train, y_test }, axis: 0);

            Console.WriteLine("Shape of X: " + X.shape);
            Console.WriteLine("Shape of Y: " + Y.shape);

            //We can get an idea of the total number of unique words in the dataset.
            Console.WriteLine("Number of words: ");
            var hstack = np.hstack(new NDarray[] { X });
            //var unique = hstack.unique();
            //Console.WriteLine(np.unique(np.hstack(new NDarray[] { X })).Item1);

            // Load the dataset but only keep the top n words, zero the rest
            int top_words = 1000;// 5000;

            ((x_train, y_train), (x_test, y_test)) = IMDB.LoadData(num_words: top_words);

            int max_words = 500;

            x_train = SequenceUtil.PadSequences(x_train, maxlen: max_words);
            x_test  = SequenceUtil.PadSequences(x_test, maxlen: max_words);

            //Create model
            Sequential model = new Sequential();

            model.Add(new Embedding(top_words, 32, input_length: max_words));
            model.Add(new Conv1D(filters: 32, kernel_size: 3, padding: "same", activation: "relu"));
            model.Add(new MaxPooling1D(pool_size: 2));
            model.Add(new Flatten());
            model.Add(new Dense(250, activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            model.Compile(loss: "binary_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" });
            model.Summary();

            // Fit the model
            model.Fit(x_train, y_train, validation_data: new NDarray[] { x_test, y_test },
                      epochs: 1 /*10*/, batch_size: 128, verbose: 2);
            // Final evaluation of the model
            var scores = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine("Accuracy: " + (scores[1] * 100));

            model.Save("model.h5");
            File.WriteAllText("model.json", model.ToJson());    //save model
            //model.SaveTensorflowJSFormat("./");   //error - Cannot perform runtime binding on a null reference
        }
Пример #7
0
        public void SaveFiles()
        {
            if (model == null || string.IsNullOrEmpty(starttime) || string.IsNullOrEmpty(config.Guid))
            {
                throw new Exception("Model Save Error");
            }

            config.StartTime = starttime;
            config.Model     = model.ToJson();

            string filename = Utils.GetFileWithoutExtension(model_dir, starttime);

            FileIO.SerializeXml(config, filename + config_ext);
            model.SaveWeight(filename + model_ext);
        }
Пример #8
0
        private static void train()
        {
            try
            {
                Console.WriteLine("Начало тренировки нейросети");
                var rows = File.ReadAllLines(FILE_PATH).Skip(1).Take(TRAIN_ROWS_COUNT).ToList();

                Console.WriteLine("Заполняем датасет данными");
                float[,] inputArray  = new float[rows.Count, INPUT_LAYER_SIZE];
                float[,] outputArray = new float[rows.Count, RESULT_LAYER_SIZE];
                for (int i = 0; i < rows.Count; i++)
                {
                    Console.WriteLine("Итерация {0} из {1}", i + 1, TRAIN_ROWS_COUNT);
                    var values        = rows[i].Split(',');
                    var correctNumber = byte.Parse(values[0]);

                    byte[] inputValues = values.Skip(1).Select(x => byte.Parse(x)).ToArray();
                    for (int j = 0; j < inputValues.Length; j++)
                    {
                        inputArray[i, j] = inputValues[j];
                    }

                    outputArray[i, correctNumber] = 1;
                }

                var input = new NDarray(inputArray);
                input  = input.astype(np.float32);
                input /= 255;
                var output = new NDarray(outputArray);

                Console.WriteLine("Запускаем обучение");
                model.Fit(input, output, batch_size: batchSize, epochs: epochs, verbose: 2);

                Console.WriteLine("Сохраняем модель");
                File.WriteAllText("model.json", model.ToJson());
                model.SaveWeight("model.h5");
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
                throw;
            }
        }
        public static void Run()
        {
            //Load train data
            NDarray dataset = np.loadtxt(fname: "C:/Project/LSTMCoreApp/pima-indians-diabetes.data.csv", delimiter: ",");
            var     X       = dataset[":,0: 8"];
            var     Y       = dataset[":, 8"];

            //Build sequential model
            var model = new Sequential();

            model.Add(new Dense(12, input_dim: 8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(8, kernel_initializer: "uniform", activation: "relu"));
            model.Add(new Dense(1, activation: "sigmoid"));

            //Compile and train
            model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            model.Fit(X, Y, batch_size: 10, epochs: 150, verbose: 1);

            //Evaluate model
            var scores = model.Evaluate(X, Y, verbose: 1);

            Console.WriteLine("Accuracy: {0}", scores[1] * 100);

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);
            model.SaveWeight("model.h5");
            Console.WriteLine("Saved model to disk");
            //Load model and weight
            var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json"));

            loaded_model.LoadWeight("model.h5");
            Console.WriteLine("Loaded model from disk");

            loaded_model.Compile(optimizer: "rmsprop", loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            scores = model.Evaluate(X, Y, verbose: 1);
            Console.WriteLine("Accuracy: {0}", scores[1] * 100);
        }
Пример #10
0
        private (History, Sequential, Dictionary <string, int>) LearnNeuralNetwork(string trainCsvPath, int num_words, int max_news_len, int nb_classes)
        {
            NDarray x_train = null;
            NDarray y_train = null;

            var trainCSV          = Frame.ReadCsv(trainCsvPath, false, separators: ";");
            var trainYFloat       = trainCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <float>("Column1")); }).ValuesAll.ToList();
            var trainXString      = trainCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <string>("Column2")); }).ValuesAll.ToList();
            var trainXStringArray = trainXString.ToArray();

            //x_train = np.array(new float[,] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } });
            y_train = np.array(trainYFloat.ToArray());

            y_train = Util.ToCategorical(y_train, nb_classes);

            string[][] tokens = trainXStringArray.Tokenize();

            var dictionaryLikeIMDB = FrequencyDictionary.Learn(tokens);

            var bow = FrequencyDictionary.Transform(tokens, dictionaryLikeIMDB);

            // Create a new TF-IDF with options:

            /*var codebook = new Accord.MachineLearning.TFIDF()
             * {
             *  Tf = TermFrequency.Log,
             *  Idf = InverseDocumentFrequency.Default
             * };
             *
             * codebook.Learn(tokens);
             *
             * double[][] bow = codebook.Transform(tokens);*/

            var list = new List <NDarray>();

            foreach (var item in bow)
            {
                //var newItem = item.Take(max_news_len).ToArray();
                //var ndarray = np.array(newItem);
                var ndarray = np.array(item);
                list.Add(ndarray);
            }

            var sequences = np.array(list);

            //x_train = SequenceUtil.PadSequences(sequences, maxlen: max_news_len, dtype: "double");
            x_train = SequenceUtil.PadSequences(sequences, maxlen: max_news_len);

            var model = new Sequential();

            model.Add(new Embedding(num_words, 32, null, null, null, null, false, max_news_len));
            model.Add(new GRU(138));//16
            model.Add(new Dense(12, activation: "softmax"));

            model.Compile(optimizer: "adam", loss: "categorical_crossentropy", metrics: new string[] { "accuracy" });

            model.Summary();

            var model_gru_save_path     = "best_model_gru.h5";
            var checkpoint_callback_gru = new ModelCheckpoint(
                model_gru_save_path,
                "val_accuracy",
                1,
                true
                );

            var callbacks = new List <Callback>()
            {
                checkpoint_callback_gru
            };

            float validation_split = (float)0.1;

            var history_gru = model.Fit(x_train,
                                        y_train,
                                        batch_size: 128,
                                        epochs: 10,
                                        validation_split: validation_split,
                                        callbacks: callbacks.ToArray());

            //Save model and weights
            string json = model.ToJson();

            File.WriteAllText("model.json", json);

            return(history_gru, model, dictionaryLikeIMDB);
        }
Пример #11
0
        //https://github.com/SciSharp/Keras.NET#mnist-cnn-example
        //Python example taken from: https://keras.io/examples/mnist_cnn/
        static void Main(string[] args)
        {
            int batch_size  = 128;
            int num_classes = 10;
            int epochs      = 12;

            // input image dimensions
            int img_rows = 28, img_cols = 28;

            Shape input_shape = null;

            // the data, split between train and test sets
            var((x_train, y_train), (x_test, y_test)) = MNIST.LoadData();

            if (Backend.ImageDataFormat() == "channels_first")
            {
                x_train     = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols);
                x_test      = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols);
                input_shape = (1, img_rows, img_cols);
            }
            else
            {
                x_train     = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1);
                x_test      = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1);
                input_shape = (img_rows, img_cols, 1);
            }

            x_train  = x_train.astype(np.float32);
            x_test   = x_test.astype(np.float32);
            x_train /= 255;
            x_test  /= 255;
            Console.WriteLine($"x_train shape: {x_train.shape}");
            Console.WriteLine($"{x_train.shape[0]} train samples");
            Console.WriteLine($"{x_test.shape[0]} test samples");

            // convert class vectors to binary class matrices
            y_train = Util.ToCategorical(y_train, num_classes);
            y_test  = Util.ToCategorical(y_test, num_classes);

            // Build CNN model
            var model = new Sequential();

            model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(),
                                 activation: "relu",
                                 input_shape: input_shape));
            model.Add(new Conv2D(64, (3, 3).ToTuple(), activation: "relu"));
            model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple()));
            model.Add(new Dropout(0.25));
            model.Add(new Flatten());
            model.Add(new Dense(128, activation: "relu"));
            model.Add(new Dropout(0.5));
            model.Add(new Dense(num_classes, activation: "softmax"));

            model.Compile(loss: "categorical_crossentropy",
                          optimizer: new Adadelta(), metrics: new string[] { "accuracy" });

            File.WriteAllText("model.json", model.ToJson());    //save model

            model.Fit(x_train, y_train,
                      batch_size: batch_size,
                      epochs: epochs,
                      verbose: 1,
                      validation_data: new NDarray[] { x_test, y_test });
            var score = model.Evaluate(x_test, y_test, verbose: 0);

            Console.WriteLine($"Test loss: {score[0]}");
            Console.WriteLine($"Test accuracy: {score[1]}");
        }