static void Main(string[] args) { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); var result = loaded_model.Predict(x); Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString()); }
public void Load_model(string name) { var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight(name); loaded_model.LoadModel(name); }
public static void Run() { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1); var logs = history.HistoryLogs; //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); }
public static void Run() { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build functional model var input = new Input(shape: new Keras.Shape(2)); var hidden1 = new Dense(32, activation: "relu").Set(input); var hidden2 = new Dense(64, activation: "relu").Set(hidden1); var output = new Dense(1, activation: "sigmoid").Set(hidden2); var model = new Keras.Models.Model(new Input[] { input }, new BaseLayer[] { output }); //Compile and train model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); var history = model.Fit(x, y, batch_size: 2, epochs: 10, verbose: 1); //var weights = model.GetWeights(); //model.SetWeights(weights); var logs = history.HistoryLogs; //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); }
private static void test() { Console.WriteLine("Начало тестирования нейросети"); var index = 1; var rows = File.ReadAllLines(TEST_PATH).Skip(1).Take(TEST_ROWS_COUNT).ToList(); BaseModel loadedModel; if (model == null) { //model = (Sequential)BaseModel.ModelFromJson(File.ReadAllText("model.json")); //не может преобразовать BaseModel в Sequential loadedModel = Sequential.ModelFromJson(File.ReadAllText("model.json")); loadedModel.LoadWeight("model.h5"); loadedModel.Compile(loss: "mean_squared_error", optimizer: new SGD(lr: learningRate), metrics: new string[] { "accuracy" }); } else { loadedModel = model; } foreach (var row in rows) { Console.WriteLine("Итерация {0} из {1}", index++, TEST_ROWS_COUNT); var values = row.Split(','); var correctNumber = byte.Parse(values[0]); float[,] inputArray = new float[1, INPUT_LAYER_SIZE]; byte[] inputValues = values.Skip(1).Select(x => byte.Parse(x)).ToArray(); for (int i = 0; i < inputValues.Length; i++) { inputArray[0, i] = inputValues[i]; } var input = new NDarray(inputArray); input = input.astype(np.float32); input /= 255; float[,] outputArray = new float[1, RESULT_LAYER_SIZE]; outputArray[0, correctNumber] = 1; var output = new NDarray(outputArray); var score = loadedModel.Evaluate(input, output, verbose: 0); Console.WriteLine($"Test loss: {score[0]}"); Console.WriteLine($"Test accuracy: {score[1]}"); var outputActual = loadedModel.Predict(input, verbose: 0); var x = outputActual.argmax(); calculateStatistics(correctNumber, int.Parse(x.str)); } }
private void TestNeuralNetwork(string testCsvPath, int nb_classes, Dictionary <string, int> dictionaryLikeIMDB, int max_news_len) { NDarray x_test = null; NDarray y_test = null; var testCSV = Frame.ReadCsv(testCsvPath, false, separators: ";"); var testYFloat = testCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <float>("Column1")); }).ValuesAll.ToList(); var testXString = testCSV.Rows.Select(kvp => { return(kvp.Value.GetAs <string>("Column2")); }).ValuesAll.ToList(); var testXStringArray = testXString.ToArray(); y_test = np.array(testYFloat.ToArray()); y_test = Util.ToCategorical(y_test, nb_classes); string[][] tokens_test = testXStringArray.Tokenize(); int[][] bow_test = FrequencyDictionary.Transform(tokens_test, dictionaryLikeIMDB); //double[][] bow_test = codebook.Transform(tokens_test); var list_test = new List <NDarray>(); foreach (var item in bow_test) { //var newItem = item.Take(100).ToArray(); //var ndarray = np.array(newItem); var ndarray = np.array(item); list_test.Add(ndarray); } var sequences_test = np.array(list_test); x_test = SequenceUtil.PadSequences(sequences_test, maxlen: max_news_len); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("best_model_gru.h5"); loaded_model.Compile(optimizer: "adam", loss: "categorical_crossentropy", metrics: new string[] { "accuracy" }); loaded_model.Summary(); var scores = loaded_model.Evaluate(x_test, y_test, verbose: 0); Console.WriteLine("Test loss:" + scores[0] * 100); Console.WriteLine("Test accuracy:" + scores[1] * 100); }
public bool LoadModel() { if (File.Exists(ModelSettingFilename)) { generatorModel = Sequential.ModelFromJson(File.ReadAllText(ModelSettingFilename)); if (File.Exists(ModelWeightFilename)) { generatorModel.LoadWeight(ModelWeightFilename); } return(true); } else { return(false); } }
public static void Init() { try { using (Py.GIL()) { if (model == null) { string path = Directory.GetCurrentDirectory(); model = Sequential.ModelFromJson(File.ReadAllText(path + MODEL_PATH)); model.LoadWeight(path + WEIGHTS_PATH); } } } catch (Exception ex) { Console.WriteLine(ex.Message); throw; } }
public static void Run() { //Load train data NDarray dataset = np.loadtxt(fname: "C:/Project/LSTMCoreApp/pima-indians-diabetes.data.csv", delimiter: ","); var X = dataset[":,0: 8"]; var Y = dataset[":, 8"]; //Build sequential model var model = new Sequential(); model.Add(new Dense(12, input_dim: 8, kernel_initializer: "uniform", activation: "relu")); model.Add(new Dense(8, kernel_initializer: "uniform", activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(X, Y, batch_size: 10, epochs: 150, verbose: 1); //Evaluate model var scores = model.Evaluate(X, Y, verbose: 1); Console.WriteLine("Accuracy: {0}", scores[1] * 100); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); Console.WriteLine("Saved model to disk"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); Console.WriteLine("Loaded model from disk"); loaded_model.Compile(optimizer: "rmsprop", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); scores = model.Evaluate(X, Y, verbose: 1); Console.WriteLine("Accuracy: {0}", scores[1] * 100); }
public void ReadModel(int player_version) { model = Sequential.ModelFromJson(File.ReadAllText("player_NN.json")); model.LoadWeight("version" + player_version.ToString().PadLeft(4, '0') + ".h5"); }