public void Test_TanH_KerasModel() { string path = @"tests\test_tanh_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], -0.7615941762924194, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.9999667406082153, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 1.0, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 1.0, 0.00001); }
public void Test_SoftSign_KerasModel() { string path = @"tests\test_softsign_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], -0.5, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.8461538553237915, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 0.9487179517745972, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 0.8999999761581421, 0.00001); }
public ReaderKerasModel(string fname) { JObject model = JObject.Parse(File.ReadAllText(fname)); String modelType = (String)model.SelectToken("model_type"); if (!modelType.Equals("Sequential")) { throw new Exception("This reader only supports Sequential type models!"); } SequentialModel seq = new SequentialModel(); List <IKernelDescriptor> descriptors = ReadDescriptors(model); foreach (var d in descriptors) { seq.Add(d); } seq.Compile(new DefaultExecutor()); List <IData> weights = ReadWeights(model, descriptors); seq.SetWeights(weights); sequential = seq; }
public void Test_SoftPlus_KerasModel() { string path = @"tests\test_softplus_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 0.31326162815093994, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 5.504078388214111, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 18.5, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 9.000123023986816, 0.00001); }
public void Test_AvgPool1D_1_KerasModel() { string path = @"tests\test_avgpool_1D_1_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 5, 2, 1); inp[0, 0, 0, 0] = 0; inp[0, 0, 1, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 1, 1, 0] = 1; inp[0, 2, 0, 0] = 0; inp[0, 2, 1, 0] = 0; inp[0, 3, 0, 0] = 2; inp[0, 3, 1, 0] = 1; inp[0, 4, 0, 0] = 2; inp[0, 4, 1, 0] = 1; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 2); Assert.AreEqual(ou.GetDimension().w, 3); Assert.AreEqual(ou[0, 0, 0, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 1, 0, 0], 1.3333333730697632, 0.00001); Assert.AreEqual(ou[0, 1, 1, 0], 0.6666666865348816, 0.00001); Assert.AreEqual(ou[0, 2, 0, 0], 1.3333333730697632, 0.00001); Assert.AreEqual(ou[0, 2, 1, 0], 0.6666666865348816, 0.00001); }
public void Test_MaxPool1D_2_KerasModel() { string path = @"tests\test_maxpool_1D_2_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 5, 2, 1); inp[0, 0, 0, 0] = 0; inp[0, 0, 1, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 1, 1, 0] = 1; inp[0, 2, 0, 0] = 0; inp[0, 2, 1, 0] = 0; inp[0, 3, 0, 0] = 2; inp[0, 3, 1, 0] = 1; inp[0, 4, 0, 0] = 2; inp[0, 4, 1, 0] = 1; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 2); Assert.AreEqual(ou.GetDimension().w, 2); Assert.AreEqual(ou[0, 0, 0, 0], 2.0, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 1.0, 0.00001); Assert.AreEqual(ou[0, 1, 0, 0], 2.0, 0.00001); Assert.AreEqual(ou[0, 1, 1, 0], 1.0, 0.00001); }
public double[] Evaluate(Bitmap img) { if (model == null) { var reader = new ReaderKerasModel(cnn_nn); model = reader.GetSequentialExecutor(); } var array = new Data2D(28, 28, 1, 1); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { Color pixel = img.GetPixel(j, i); double value = 255 - pixel.A; value = value / 255; array[i, j, 0, 0] = value; } } var result = model.ExecuteNetwork(array) as Data2D; double[] toreturn = new double[10]; for (int i = 0; i < 10; i++) { toreturn[i] = result[0, 0, i, 0]; } return(toreturn); }
private Brain(double[] weights) { _model = TrainManager.Instance.DefaultModel; _layers = _model.GetLayers(); _weights = weights; _model.SetWeights(_layers, _weights); }
public void Test_ReLu_KerasModel() { string path = @"tests\test_relu_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 0.0, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 5.5, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 18.5, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 9.0, 0.00001); }
public static Brain Import(string path) { SequentialModel model = null; Brain brain = null; if (path.EndsWith(".json")) { model = new ReaderKerasModel(path).GetSequentialExecutor(); } else if (path.EndsWith(".txt")) { model = PersistSequentialModel.DeserializeModel(path); } if (model != null) { brain = new Brain(model) { _weights = model.GetWeights() } } ; return(brain); } }
public void Test_Sigmoid_KerasModel() { string path = @"tests\test_sigmoid_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 0.2689414322376251, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 0.9959298968315125, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 1.0, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 0.9998766183853149, 0.00001); }
public void Test_Softmax_KerasModel() { string path = @"tests\test_softmax_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 8, 1, 1); inp[0, 0, 0, 0] = 1; inp[0, 1, 0, 0] = 2; inp[0, 2, 0, 0] = -1; inp[0, 3, 0, 0] = 0; inp[0, 4, 0, 0] = 3; inp[0, 5, 0, 0] = 1; inp[0, 6, 0, 0] = 1; inp[0, 7, 0, 0] = 2; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 4); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 3.3980058766758248e-09, 1e-10); Assert.AreEqual(ou[0, 0, 1, 0], 2.26015504267707e-06, 1e-7); Assert.AreEqual(ou[0, 0, 2, 0], 0.9999228715896606, 0.00001); Assert.AreEqual(ou[0, 0, 3, 0], 7.484605885110795e-05, 1e-6); }
public static void SerializeModel(SequentialModel model, string fileName) { Stream stream = File.Create(fileName); BinaryFormatter serializer = new BinaryFormatter(); serializer.Serialize(stream, model); stream.Close(); }
public void Test_RepeatVector_KerasModel() { string path = @"tests\test_repeatvector_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D data = new Data2D(1, 1, 4, 2); for (int c = 0; c < 4; ++c) { data[0, 0, c, 0] = c + 1; data[0, 0, c, 1] = -(c + 1); } Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 2); Assert.AreEqual(dim.c, 4); Assert.AreEqual(dim.h, 1); Assert.AreEqual(dim.w, 3); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], 1, 0.0000001); Assert.AreEqual(output[0, 0, 1, 0], 2, 0.0000001); Assert.AreEqual(output[0, 0, 2, 0], 3, 0.0000001); Assert.AreEqual(output[0, 0, 3, 0], 4, 0.0000001); Assert.AreEqual(output[0, 1, 0, 0], 1, 0.0000001); Assert.AreEqual(output[0, 1, 1, 0], 2, 0.0000001); Assert.AreEqual(output[0, 1, 2, 0], 3, 0.0000001); Assert.AreEqual(output[0, 1, 3, 0], 4, 0.0000001); Assert.AreEqual(output[0, 2, 0, 0], 1, 0.0000001); Assert.AreEqual(output[0, 2, 1, 0], 2, 0.0000001); Assert.AreEqual(output[0, 2, 2, 0], 3, 0.0000001); Assert.AreEqual(output[0, 2, 3, 0], 4, 0.0000001); Assert.AreEqual(output[0, 0, 0, 1], -1, 0.0000001); Assert.AreEqual(output[0, 0, 1, 1], -2, 0.0000001); Assert.AreEqual(output[0, 0, 2, 1], -3, 0.0000001); Assert.AreEqual(output[0, 0, 3, 1], -4, 0.0000001); Assert.AreEqual(output[0, 1, 0, 1], -1, 0.0000001); Assert.AreEqual(output[0, 1, 1, 1], -2, 0.0000001); Assert.AreEqual(output[0, 1, 2, 1], -3, 0.0000001); Assert.AreEqual(output[0, 1, 3, 1], -4, 0.0000001); Assert.AreEqual(output[0, 2, 0, 1], -1, 0.0000001); Assert.AreEqual(output[0, 2, 1, 1], -2, 0.0000001); Assert.AreEqual(output[0, 2, 2, 1], -3, 0.0000001); Assert.AreEqual(output[0, 2, 3, 1], -4, 0.0000001); }
public void Test_Conv1D_2_KerasModel() { string path = @"tests\test_conv_1D_2_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 6, 4, 1); inp[0, 0, 0, 0] = 0; inp[0, 0, 1, 0] = 1; inp[0, 0, 2, 0] = 2; inp[0, 0, 3, 0] = 1.5; inp[0, 1, 0, 0] = 1; inp[0, 1, 1, 0] = 0; inp[0, 1, 2, 0] = 0; inp[0, 1, 3, 0] = 0.6; inp[0, 2, 0, 0] = 2; inp[0, 2, 1, 0] = 1; inp[0, 2, 2, 0] = 2; inp[0, 2, 3, 0] = 2.5; inp[0, 3, 0, 0] = 1; inp[0, 3, 1, 0] = 0; inp[0, 3, 2, 0] = -1; inp[0, 3, 3, 0] = 0; inp[0, 4, 0, 0] = 1; inp[0, 4, 1, 0] = -2; inp[0, 4, 2, 0] = 3; inp[0, 4, 3, 0] = 3.5; inp[0, 5, 0, 0] = 2; inp[0, 5, 1, 0] = 1; inp[0, 5, 2, 0] = 4; inp[0, 5, 3, 0] = 3.5; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 3); Assert.AreEqual(ou.GetDimension().w, 3); Assert.AreEqual(ou[0, 0, 0, 0], 9.399999618530273, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], -1.6999998092651367, 0.00001); Assert.AreEqual(ou[0, 0, 2, 0], 4.550000190734863, 0.00001); Assert.AreEqual(ou[0, 1, 0, 0], 8.5, 0.00001); Assert.AreEqual(ou[0, 1, 1, 0], -4.0, 0.00001); Assert.AreEqual(ou[0, 1, 2, 0], 12.25, 0.00001); Assert.AreEqual(ou[0, 2, 0, 0], 23.0, 0.00001); Assert.AreEqual(ou[0, 2, 1, 0], 7.5, 0.00001); Assert.AreEqual(ou[0, 2, 2, 0], 14.5, 0.00001); }
public void Test_SimpleRNN_KerasModel() { string path = @"tests\test_simplernn_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); // Initialize data. Data2D data = new Data2D(1, 3, 3, 4); int l = 0; for (int b = 0; b < 4; ++b) { for (int w = 0; w < 3; ++w) { for (int c = 0; c < 3; ++c) { l += 1; data[0, w, c, b] = l % 5 + 1; } } } Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 4); Assert.AreEqual(dim.c, 4); Assert.AreEqual(dim.h, 1); Assert.AreEqual(dim.w, 1); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], -54, 0.000001); Assert.AreEqual(output[0, 0, 1, 0], -39, 0.000001); Assert.AreEqual(output[0, 0, 2, 0], 36, 0.000001); Assert.AreEqual(output[0, 0, 3, 0], 72, 0.000001); Assert.AreEqual(output[0, 0, 0, 1], 12, 0.000001); Assert.AreEqual(output[0, 0, 1, 1], -19, 0.000001); Assert.AreEqual(output[0, 0, 2, 1], -10, 0.000001); Assert.AreEqual(output[0, 0, 3, 1], 10, 0.000001); Assert.AreEqual(output[0, 0, 0, 2], -72, 0.000001); Assert.AreEqual(output[0, 0, 1, 2], 16, 0.000001); Assert.AreEqual(output[0, 0, 2, 2], 74, 0.000001); Assert.AreEqual(output[0, 0, 3, 2], 68, 0.000001); Assert.AreEqual(output[0, 0, 0, 3], -161, 0.000001); Assert.AreEqual(output[0, 0, 1, 3], -14, 0.000001); Assert.AreEqual(output[0, 0, 2, 3], 158, 0.000001); Assert.AreEqual(output[0, 0, 3, 3], 141, 0.000001); }
public void Test_Reshape2D_KerasModel() { string path = @"tests\test_reshape_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D data = new Data2D(3, 3, 2, 1); int l = 0; for (int h = 0; h < 3; ++h) { for (int w = 0; w < 3; ++w) { for (int c = 0; c < 2; ++c) { l += 1; data[h, w, c, 0] = l + 1; } } } Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 1); Assert.AreEqual(dim.c, 3); Assert.AreEqual(dim.h, 3); Assert.AreEqual(dim.w, 2); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], 2, 0.0000001); Assert.AreEqual(output[0, 0, 1, 0], 3, 0.0000001); Assert.AreEqual(output[0, 0, 2, 0], 4, 0.0000001); Assert.AreEqual(output[0, 1, 0, 0], 5, 0.0000001); Assert.AreEqual(output[0, 1, 1, 0], 6, 0.0000001); Assert.AreEqual(output[0, 1, 2, 0], 7, 0.0000001); Assert.AreEqual(output[1, 0, 0, 0], 8, 0.0000001); Assert.AreEqual(output[1, 0, 1, 0], 9, 0.0000001); Assert.AreEqual(output[1, 0, 2, 0], 10, 0.0000001); Assert.AreEqual(output[1, 1, 0, 0], 11, 0.0000001); Assert.AreEqual(output[1, 1, 1, 0], 12, 0.0000001); Assert.AreEqual(output[1, 1, 2, 0], 13, 0.0000001); Assert.AreEqual(output[2, 0, 0, 0], 14, 0.0000001); Assert.AreEqual(output[2, 0, 1, 0], 15, 0.0000001); Assert.AreEqual(output[2, 0, 2, 0], 16, 0.0000001); Assert.AreEqual(output[2, 1, 0, 0], 17, 0.0000001); Assert.AreEqual(output[2, 1, 1, 0], 18, 0.0000001); Assert.AreEqual(output[2, 1, 2, 0], 19, 0.0000001); }
public void Test_LSTM_KerasModel() { string path = @"tests\test_lstm_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); // Initialize data. Data2D data = new Data2D(1, 3, 3, 5); int l = 0; for (int b = 0; b < 5; ++b) { for (int w = 0; w < 3; ++w) { for (int c = 0; c < 3; ++c) { l += 1; data[0, w, c, b] = (l % 5 + 1) / 10.0; } } } Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 5); Assert.AreEqual(dim.c, 2); Assert.AreEqual(dim.h, 1); Assert.AreEqual(dim.w, 1); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], 0.015777, 0.00001); Assert.AreEqual(output[0, 0, 1, 0], 0.0, 0.00001); Assert.AreEqual(output[0, 0, 0, 1], 0.01605, 0.00001); Assert.AreEqual(output[0, 0, 1, 1], 0.0, 0.00001); Assert.AreEqual(output[0, 0, 0, 2], 0.016398, 0.00001); Assert.AreEqual(output[0, 0, 1, 2], 0.0, 0.00001); Assert.AreEqual(output[0, 0, 0, 3], 0.006314, 0.00001); Assert.AreEqual(output[0, 0, 1, 3], 0.0, 0.00001); Assert.AreEqual(output[0, 0, 0, 4], 0.016303, 0.00001); Assert.AreEqual(output[0, 0, 1, 4], 0.0, 0.00001); }
public void Test_GRU_KerasModel() { string path = @"tests\test_gru_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); // Initialize data. Data2D data = new Data2D(1, 3, 3, 5); int l = 0; for (int b = 0; b < 5; ++b) { for (int w = 0; w < 3; ++w) { for (int c = 0; c < 3; ++c) { l += 1; data[0, w, c, b] = (l % 5 + 1) / 10.0; } } } Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Dimension dim = output.GetDimension(); Assert.AreEqual(dim.b, 5); Assert.AreEqual(dim.c, 2); Assert.AreEqual(dim.h, 1); Assert.AreEqual(dim.w, 1); // Checking calculation Assert.AreEqual(output[0, 0, 0, 0], 0.19632, 0.00001); Assert.AreEqual(output[0, 0, 1, 0], 0.37259, 0.00001); Assert.AreEqual(output[0, 0, 0, 1], 0.21991, 0.00001); Assert.AreEqual(output[0, 0, 1, 1], 0.37473, 0.00001); Assert.AreEqual(output[0, 0, 0, 2], 0.24834, 0.00001); Assert.AreEqual(output[0, 0, 1, 2], 0.38176, 0.00001); Assert.AreEqual(output[0, 0, 0, 3], 0.18727, 0.00001); Assert.AreEqual(output[0, 0, 1, 3], 0.35267, 0.00001); Assert.AreEqual(output[0, 0, 0, 4], 0.166619, 0.00001); Assert.AreEqual(output[0, 0, 1, 4], 0.35275, 0.00001); }
public Brain(SequentialModel model = null) { if (!TrainManager.Instance) // when not training, just load the passed model { if (model == null) { throw new NullReferenceException( "When not training, there should always a model be passed! But model is null!"); } Debug.Log("Running passed model."); _model = model; _weights = _model.GetWeights(); } else // when training, load default model { _model = TrainManager.Instance.DefaultModel; if (model != null) // if model not null, try to get the weights from pretrained model { _weights = model.GetWeights(); var defaultWeights = _model.GetWeights(); if (defaultWeights.Length != _weights.Length) { throw new ArgumentOutOfRangeException(nameof(ArgumentOutOfRangeException), $"The length of the default model {defaultWeights.Length} and the length of " + $"the pretrained model {_weights.Length} should be identical."); } //Debug.Log("Training with pretrained models."); } else // if model null, get weights from default model and train from scratch { _weights = _model.GetWeights(); for (var i = 0; i < _weights.Length; i++) { _weights[i] = Random.value * 2 - 1f; } //Debug.Log("Training from scratch models."); } } _layers = _model.GetLayers(); _model.SetWeights(_layers, _weights); }
public static void Sample1(string filePath) { // Read the previously created json. var reader = new ReaderKerasModel(filePath); SequentialModel model = reader.GetSequentialExecutor(); // Then create the data to run the executer on. // batch: should be set in the Keras model. int height = 0; int width = 0; int channel = 0; int batch = 0; Data2D input = new Data2D(height, width, channel, batch); // Calculate the network's output. IData output = model.ExecuteNetwork(input); }
static public void KerasModelTest(string pathIn, string pathModel, string pathOut, double accuracy = 0.00001) { var reader = new ReaderKerasModel(pathModel); SequentialModel model = reader.GetSequentialExecutor(); // Initialize data. Data2D data = Utils.ReadDataFromFile(pathIn); // Load expected output and calculate the actual output. Data2D expected = Utils.ReadDataFromFile(pathOut); Data2D output = model.ExecuteNetwork(data) as Data2D; // Checking sizes Utils.CheckDimensions(output, expected); // Checking calculation Utils.CheckResults(output, expected, accuracy); }
/// <summary> /// Method which load sequentional model which was serialized /// and saved to disk. /// </summary> /// <param name="pathToModel">Path to file with saved model</param> /// <returns>Loaded model</returns> public static SequentialModel LoadModel(string pathToModel) { SequentialModel model = null; try { FileStream stream = File.OpenRead(pathToModel); var formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); model = (SequentialModel)formatter.Deserialize(stream); stream.Close(); } catch (Exception ex) { ThrowException("Loading model failed with following error message " + ex.Message); } return(model); }
public static SequentialModel DeserializeModel(string fileName) { SequentialModel model = null; if (File.Exists(fileName)) { Stream stream = File.OpenRead(fileName); BinaryFormatter deserializer = new BinaryFormatter(); model = (SequentialModel)deserializer.Deserialize(stream); stream.Close(); } else { throw new Exception("Trying to serialize a non-existing file."); } return(model); }
public void Test_GlobalMaxPool2D_KerasModel() { string path = @"tests\test_globalmaxpool_2D_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(3, 3, 2, 1); inp[0, 0, 0, 0] = 1; inp[1, 0, 0, 0] = 2; inp[2, 0, 0, 0] = 0; inp[0, 1, 0, 0] = 3; inp[1, 1, 0, 0] = 4; inp[2, 1, 0, 0] = 0; inp[0, 2, 0, 0] = 2; inp[1, 2, 0, 0] = 2; inp[2, 2, 0, 0] = 0; inp[0, 0, 1, 0] = 0; inp[1, 0, 1, 0] = 3; inp[2, 0, 1, 0] = 1; inp[0, 1, 1, 0] = 1; inp[1, 1, 1, 0] = 1; inp[2, 1, 1, 0] = -1; inp[0, 2, 1, 0] = -3; inp[1, 2, 1, 0] = -1; inp[2, 2, 1, 0] = 0; Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 2); Assert.AreEqual(ou.GetDimension().w, 1); Assert.AreEqual(ou[0, 0, 0, 0], 4.0, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], 3.0, 0.00001); }
public static void SetWeights(this SequentialModel sequentialModel, int[] layers, double[] weights) { var dim = sequentialModel.GetInputDimension(); if (dim.c != layers[0]) { throw new ArgumentOutOfRangeException(nameof(ArgumentOutOfRangeException), "Size of input layer should be" + $"{sequentialModel.GetInputDimension().c} but is {layers[0]}."); } var data = new List <IData>(); var count = 0; for (var i = 0; i < layers.Length - 1; i++) { // create weight list data.Add(new Data2D(1, 1, layers[i], layers[i + 1])); data.Add(new DataArray(layers[i + 1])); data.Add(new DataArray(0)); // assign weights to weight list for (var j = 0; j < layers[i]; j++) { for (var k = 0; k < layers[i + 1]; k++) { ((Data2D)data[i * 3])[0, 0, j, k] = weights[count++]; } } // assign biases to weight list for (var j = 0; j < layers[i + 1]; j++) { ((DataArray)data[i * 3 + 1])[j] = weights[count++]; } } sequentialModel.SetWeights(data); }
public static int[] GetLayers(this SequentialModel sequentialModel) { var layers = new List <int>(); var l = sequentialModel.GetFieldValue <DefaultExecutor>("compiled").GetFieldValue <List <ILayer> >("layers"); var output = 0; foreach (var layer in l) { if (!(layer is Dense2DLayer)) { continue; } var denseSum = (layer as Dense2DLayer).GetLayerSummary(); layers.Add(denseSum.InputChannel); output = denseSum.OutputChannel; } layers.Add(output); return(layers.ToArray()); }
private static void AdditionTest() { var model = new SequentialModel( new DenseLayer(3, 1) ); var rng = new Random(); var errorFunction = new MeanSquareError(); model.Train( dataSource: () => { var a = rng.NextDouble() * 10; var b = rng.NextDouble() * 10; var c = rng.NextDouble() * 10; return(new double[] { a, b, c }, new double[] { a + b + c }); }, epochs: 10000, batchSize: 1000, learningRate: 0.0001d, errorFunction: new MeanSquareError(), callback: (i, error, _) => Console.WriteLine("[" + i + "] Error: " + error)); }
public static double[] GetWeights(this SequentialModel sequentialModel) { // reflection used because there is no other way to get the weights of a network in nnsharp var weights = new List <double>(); var compiled = sequentialModel.GetFieldValue <DefaultExecutor>("compiled"); var layers = compiled.GetFieldValue <IList <ILayer> >("layers"); var dim = sequentialModel.GetInputDimension(); // set the initInput field otherwise the error: input is not Data2D (Dense2DLayer) compiled.SetFieldValue <Data2D>("initInput", new Data2D(dim.h, dim.w, dim.c, dim.b)); foreach (var layer in layers) { if (layer is Dense2DLayer) { var t = (layer as Dense2DLayer).GetFieldValue <IData>("weights").GetFieldValue <double[, , , ]>("tensor") .GetEnumerator(); while (t.MoveNext()) { if (t.Current != null) { weights.Add((double)t.Current); } } } else if (layer is Bias2DLayer) { var a = layer.GetFieldValue <IData>("biases").GetFieldValue <double[]>("array"); weights.AddRange(a); } } return(weights.ToArray()); }
public void Test_Cropping1D_KerasModel() { string path = @"tests\test_crop_1D_model.json"; var reader = new ReaderKerasModel(path); SequentialModel model = reader.GetSequentialExecutor(); Data2D inp = new Data2D(1, 5, 2, 1); for (int l = 0; l < 5; ++l) { inp[0, l, 0, 0] = l + 1; inp[0, l, 1, 0] = -(l + 1); } Data2D ou = model.ExecuteNetwork(inp) as Data2D; Assert.AreEqual(ou.GetDimension().c, 2); Assert.AreEqual(ou.GetDimension().w, 2); Assert.AreEqual(ou[0, 0, 0, 0], 2.0, 0.00001); Assert.AreEqual(ou[0, 0, 1, 0], -2.0, 0.00001); Assert.AreEqual(ou[0, 1, 0, 0], 3.0, 0.00001); Assert.AreEqual(ou[0, 1, 1, 0], -3.0, 0.00001); }