public static void Run() { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1); var logs = history.HistoryLogs; //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); }
static void Main(string[] args) { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); var result = loaded_model.Predict(x); Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString()); }
public static void SmallNetwork(List <Tuple <bool, float[]> > train, List <Tuple <bool, float[]> > test) { int vectorSize = train[0].Item2.Length; //Load train data var nTrain = ListToNDarrays(train, vectorSize); var nTest = ListToNDarrays(test, vectorSize); //Build sequential model var model = new Sequential(); model.Add(new Dense(8, activation: "relu", input_shape: new Shape(vectorSize))); model.Add(new Dropout(0.5)); model.Add(new Dense(16, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train //model.Compile(optimizer:"adam", loss:"sparse_categorical_crossentropy", metrics: new string[] { "accuracy" }); model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit( nTrain.Item2, nTrain.Item1, batch_size: 8, epochs: 50, verbose: 1, validation_data: new NDarray[] { nTest.Item2, nTest.Item1 }); //Save model and weights string json = model.ToJson(); File.WriteAllText("./models/sm_model.json", json); model.SaveWeight("./models/sm_model.h5"); }
public void SaveFiles() { if (model == null || string.IsNullOrEmpty(starttime) || string.IsNullOrEmpty(config.Guid)) { throw new Exception("Model Save Error"); } config.StartTime = starttime; config.Model = model.ToJson(); string filename = Utils.GetFileWithoutExtension(model_dir, starttime); FileIO.SerializeXml(config, filename + config_ext); model.SaveWeight(filename + model_ext); }
private static void train() { try { Console.WriteLine("Начало тренировки нейросети"); var rows = File.ReadAllLines(FILE_PATH).Skip(1).Take(TRAIN_ROWS_COUNT).ToList(); Console.WriteLine("Заполняем датасет данными"); float[,] inputArray = new float[rows.Count, INPUT_LAYER_SIZE]; float[,] outputArray = new float[rows.Count, RESULT_LAYER_SIZE]; for (int i = 0; i < rows.Count; i++) { Console.WriteLine("Итерация {0} из {1}", i + 1, TRAIN_ROWS_COUNT); var values = rows[i].Split(','); var correctNumber = byte.Parse(values[0]); byte[] inputValues = values.Skip(1).Select(x => byte.Parse(x)).ToArray(); for (int j = 0; j < inputValues.Length; j++) { inputArray[i, j] = inputValues[j]; } outputArray[i, correctNumber] = 1; } var input = new NDarray(inputArray); input = input.astype(np.float32); input /= 255; var output = new NDarray(outputArray); Console.WriteLine("Запускаем обучение"); model.Fit(input, output, batch_size: batchSize, epochs: epochs, verbose: 2); Console.WriteLine("Сохраняем модель"); File.WriteAllText("model.json", model.ToJson()); model.SaveWeight("model.h5"); } catch (Exception ex) { Console.WriteLine(ex.ToString()); throw; } }
public static void Run() { //Load train data NDarray dataset = np.loadtxt(fname: "C:/Project/LSTMCoreApp/pima-indians-diabetes.data.csv", delimiter: ","); var X = dataset[":,0: 8"]; var Y = dataset[":, 8"]; //Build sequential model var model = new Sequential(); model.Add(new Dense(12, input_dim: 8, kernel_initializer: "uniform", activation: "relu")); model.Add(new Dense(8, kernel_initializer: "uniform", activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(X, Y, batch_size: 10, epochs: 150, verbose: 1); //Evaluate model var scores = model.Evaluate(X, Y, verbose: 1); Console.WriteLine("Accuracy: {0}", scores[1] * 100); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); Console.WriteLine("Saved model to disk"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); Console.WriteLine("Loaded model from disk"); loaded_model.Compile(optimizer: "rmsprop", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); scores = model.Evaluate(X, Y, verbose: 1); Console.WriteLine("Accuracy: {0}", scores[1] * 100); }
public static void Run() { int batch_size = 128; int num_classes = 10; int epochs = 100; // the data, split between train and test sets var((x_train, y_train), (x_test, y_test)) = Cifar10.LoadData(); Console.WriteLine("x_train shape: " + x_train.shape); Console.WriteLine(x_train.shape[0] + " train samples"); Console.WriteLine(x_test.shape[0] + " test samples"); // convert class vectors to binary class matrices y_train = Util.ToCategorical(y_train, num_classes); y_test = Util.ToCategorical(y_test, num_classes); // Build CNN model var model = new Sequential(); model.Add(new Conv2D(32, kernel_size: (3, 3).ToTuple(), padding: "same", input_shape: new Shape(32, 32, 3))); model.Add(new Activation("relu")); model.Add(new Conv2D(32, (3, 3).ToTuple())); model.Add(new Activation("relu")); model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple())); model.Add(new Dropout(0.25)); model.Add(new Conv2D(64, kernel_size: (3, 3).ToTuple(), padding: "same")); model.Add(new Activation("relu")); model.Add(new Conv2D(64, (3, 3).ToTuple())); model.Add(new Activation("relu")); model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple())); model.Add(new Dropout(0.25)); model.Add(new Flatten()); model.Add(new Dense(512)); model.Add(new Activation("relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(num_classes)); model.Add(new Activation("softmax")); model.Compile(loss: "categorical_crossentropy", optimizer: new RMSprop(lr: 0.0001f, decay: 1e-6f), metrics: new string[] { "accuracy" }); x_train = x_train.astype(np.float32); x_test = x_test.astype(np.float32); x_train /= 255; x_test /= 255; model.Fit(x_train, y_train, batch_size: batch_size, epochs: epochs, verbose: 1, validation_data: new NDarray[] { x_test, y_test }, shuffle: true); //Save model and weights //string model_path = "./model.json"; //string weight_path = "weights.h5"; //string json = model.ToJson(); //File.WriteAllText(model_path, json); model.SaveWeight("weights.h5"); model.Save("model.h5"); model.SaveTensorflowJSFormat("./"); //Score trained model. var score = model.Evaluate(x_test, y_test, verbose: 0); Console.WriteLine("Test loss:" + score[0]); Console.WriteLine("Test accuracy:" + score[1]); }