private static void MNISTTraining() { uint batchSize = 32; var trainIter = new MXDataIter("MNISTIter") .SetParam("image", "./mnist_data/train-images-idx3-ubyte") .SetParam("label", "./mnist_data/train-labels-idx1-ubyte") .SetParam("batch_size", batchSize) .SetParam("flat", 1) .CreateDataIter(); var valIter = new MXDataIter("MNISTIter") .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte") .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte") .SetParam("batch_size", batchSize) .SetParam("flat", 1) .CreateDataIter(); var model = new Sequential(new Shape(28 * 28), 10); model.AddHidden(new Dense(28 * 28, ActivationType.ReLU, new GlorotUniform())); model.AddHidden(new Dropout(0.25f)); model.AddHidden(new Dense(28 * 28, ActivationType.ReLU, new GlorotUniform())); model.Compile(OptimizerType.SGD, LossType.CategorialCrossEntropy, "accuracy"); model.Fit(trainIter, 10, batchSize, valIter); }
private static void ORGate() { DataFrame train_x = new DataFrame(4, 2); DataFrame train_y = new DataFrame(4, 1); train_x.AddData(0, 0); train_x.AddData(0, 1); train_x.AddData(1, 0); train_x.AddData(1, 1); train_y.AddData(0); train_y.AddData(1); train_y.AddData(1); train_y.AddData(1); DataFrameIter train = new DataFrameIter(train_x, train_y); Sequential model = new Sequential(new Shape(2), 1); model.AddHidden(new Dense(4, ActivationType.ReLU, new GlorotUniform())); model.Compile(OptimizerType.SGD, LossType.BinaryCrossEntropy, "accuracy"); model.Fit(train, 100, 2); model.SaveModel(@"C:\Users\bdkadmin\Desktop\SSHKeys\"); }
private static Sequential CreateCNN() { var seq = new Sequential(); // 1 layer seq.Add(new Conv2D(32, new Tuple <int, int>(3, 3), activation: "relu", input_shape: new Keras.Shape(19, 19, 1))); seq.Add(new MaxPooling2D(new Tuple <int, int>(2, 2))); // 2 layer seq.Add(new Conv2D(64, new Tuple <int, int>(3, 3), activation: "relu")); seq.Add(new MaxPooling2D(new Tuple <int, int>(2, 2))); seq.Add(new Flatten()); // Fully-connected layer seq.Add(new Dense(219, activation: "relu")); // Output layer seq.Add(new Dense(2, activation: "softmax")); seq.Compile(new Keras.Optimizers.SGD(lr: 0.001f), "categorical_crossentropy", new string[] { "accuracy" }); return(seq); }
public void Dense_CustomKRegularizerAndKInitParams() { NDarray x = np.array(new float[, ] { { 1, 0 }, { 1, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); var model = new Sequential(); model.Add(new Dense(1, activation: "sigmoid", input_shape: new Shape(x.shape[1]), kernel_regularizer: new L1L2(1000, 2000), kernel_initializer: new Constant(100))); var modelAsJson = JsonConvert.DeserializeObject <dynamic>(model.ToJson()); Assert.AreEqual("Sequential", modelAsJson.class_name.Value); int i = 0; while (modelAsJson.config.layers[i].config.kernel_initializer == null && i < 3) { i++; } Assert.AreEqual(100, modelAsJson.config.layers[i].config.kernel_initializer.config.value.Value); Assert.AreEqual("Constant", modelAsJson.config.layers[i].config.kernel_initializer.class_name.Value); Assert.AreEqual("L1L2", modelAsJson.config.layers[i].config.kernel_regularizer.class_name.Value); Assert.AreEqual(1000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l1.Value); Assert.AreEqual(2000, modelAsJson.config.layers[i].config.kernel_regularizer.config.l2.Value); // Compile and train model.Compile(optimizer: new Adam(lr: 0.001F), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: x.shape[0], epochs: 100, verbose: 0); Assert.AreEqual(2, model.GetWeights().Count); }
public void perceptron_should_learn_all_except_xor_nor( [ValueSource("Backends")] string backend, [ValueSource("Targets")] float[] y, [Values(false, true)] bool useBias) { KerasSharp.Backends.Current.Switch(backend); var model = new Sequential(); model.Add(new Dense(1, input_dim: 2, kernel_initializer: new GlorotUniform(), bias_initializer: new GlorotUniform(), use_bias: useBias, activation: new Sigmoid())); model.Compile(loss: new MeanSquareError(), optimizer: new SGD(lr: 1), metrics: new[] { new Accuracy() }); model.fit(x, y, epochs: 1000, batch_size: y.Length); Array yy = model.predict(x, batch_size: y.Length)[0]; float[] pred = MatrixEx.Round(yy.To <float[, ]>()).GetColumn(0); if ((useBias && (y == xor)) || (!useBias && (y == xor || y == nor || y == and))) { Assert.AreNotEqual(y, pred); } else { Assert.AreEqual(y, pred); } }
public static void Train() { var model = new Sequential(); // embedding layer model.Add(new Embedding(output, 100, input_length: 32)); model.Add(new Conv1D(64, 3, padding: "causal", activation: "tanh")); model.Add(new Dropout(0.2)); model.Add(new MaxPooling1D(2)); model.Add(new Conv1D(128, 3, activation: "relu", dilation_rate: 2, padding: "causal")); model.Add(new Dropout(0.2)); model.Add(new MaxPooling1D(2)); model.Add(new Conv1D(256, 3, activation: "relu", dilation_rate: 4, padding: "causal")); model.Add(new Dropout(0.2)); model.Add(new MaxPooling1D(2)); //model.Add(new Conv1D(256, 5, activation: "relu")); model.Add(new GlobalMaxPooling1D()); model.Add(new Dense(256, activation: "relu")); model.Add(new Dense(output, activation: "softmax")); model.Compile(loss: "sparse_categorical_crossentropy", optimizer: new Adam()); model.Summary(); var mc = new ModelCheckpoint("best_model.h5", monitor: "val_loss", mode: "min", save_best_only: true, verbose: 1); var history = model.Fit(train_x, train_y, batch_size: 32, epochs: 100, validation_split: 0.25f, verbose: 1, callbacks: new Callback[] { mc }); model.Save("last_epoch.h5"); }
// Performs convolutional neural network model training: // Incorporated parameters include relu and softmax // Adds fixed preprocessing layers and pooling: could use further development with exposed parameters private static Sequential ProcessCnnModel(Shape input_shape, NDarray x_train, NDarray y_train, NDarray x_test, NDarray y_test, int num_classes, string logname, Config config) { // Build CNN model Sequential model = new Sequential(); model.Add(new Conv2D(16, kernel_size: (3, 3).ToTuple(), activation: "relu", input_shape: input_shape)); model.Add(new Conv2D(32, (3, 3).ToTuple(), activation: "relu")); model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple())); model.Add(new Flatten()); Callback[] callbacks = GetCallbacks(config.isEarlyStop, logname); AddNodes(model, config); model.Add(new Dense(num_classes, activation: "softmax")); // Compile with loss, metrics and optimizer model.Compile(loss: "categorical_crossentropy", optimizer: new Adam(lr: (float)config.LearnRate, decay: (float)config.LearnDecay), metrics: new[] { "accuracy" }); // Train the model model.Fit(x_train, y_train, batch_size: config.Batch, epochs: config.Epochs, verbose: 1, validation_data: new[] { x_test, y_test }, callbacks: callbacks); return(model); }
static void Main(string[] args) { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); var result = loaded_model.Predict(x); Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString()); }
static void Main(string[] args) { //Setup Engine Global.UseEngine(SiaNet.Backend.MxNetLib.SiaNetBackend.Instance, DeviceType.CPU); //Prep Data var(x, y) = PrepDataset(); x.Head(); DataFrameIter trainSet = new DataFrameIter(x, y); //Build model with simple fully connected layers var model = new Sequential(); model.EpochEnd += Model_EpochEnd; model.Add(new Dense(64, ActType.ReLU)); model.Add(new Dense(1, ActType.Sigmoid)); //Compile with Optimizer, Loss and Metric model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.BinaryAccurary); // Train for 100 epoch with batch size of 2 model.Train(trainSet, 1000, 2); //Create prediction data to evaluate DataFrame2D predX = new DataFrame2D(2); predX.Load(0, 0, 0, 1); //Result should be 0 and 1 var rawPred = model.Predict(predX); Console.ReadLine(); }
public static void Run() { //var ((x_train, y_train), (x_test, y_test)) = IMDB.LoadData(num_words: top_words); var((x_train, y_train), (x_test, y_test)) = LoadDataRussianLanguageToxicComments( trainCount: train_count, testCount: test_count, numWords: top_words, maxWords: max_words); //Не нужно массивы дополнять до 500 элементов, т.к. они уже размером в 500 элементов //x_train = SequenceUtil.PadSequences(x_train, maxlen: max_words); //x_test = SequenceUtil.PadSequences(x_test, maxlen: max_words); //Create model Sequential model = new Sequential(); model.Add(new Embedding(top_words, 32, input_length: max_words)); model.Add(new Conv1D(filters: 32, kernel_size: 3, padding: "same", activation: "relu")); model.Add(new MaxPooling1D(pool_size: 2)); model.Add(new Flatten()); model.Add(new Dense(250, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); model.Compile(loss: "binary_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" }); model.Summary(); // Fit the model model.Fit(x_train, y_train, validation_data: new NDarray[] { x_test, y_test }, epochs: 2 /*10*/, batch_size: 128, verbose: 2); // Final evaluation of the model var scores = model.Evaluate(x_test, y_test, verbose: 0); Console.WriteLine("Accuracy: " + (scores[1] * 100)); model.Save("model.h5"); File.WriteAllText("model.json", model.ToJson()); //save model //model.SaveTensorflowJSFormat("./"); //error - Cannot perform runtime binding on a null reference }
public static void Run() { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1); var logs = history.HistoryLogs; //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); }
public static void SmallNetwork(List <Tuple <bool, float[]> > train, List <Tuple <bool, float[]> > test) { int vectorSize = train[0].Item2.Length; //Load train data var nTrain = ListToNDarrays(train, vectorSize); var nTest = ListToNDarrays(test, vectorSize); //Build sequential model var model = new Sequential(); model.Add(new Dense(8, activation: "relu", input_shape: new Shape(vectorSize))); model.Add(new Dropout(0.5)); model.Add(new Dense(16, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train //model.Compile(optimizer:"adam", loss:"sparse_categorical_crossentropy", metrics: new string[] { "accuracy" }); model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit( nTrain.Item2, nTrain.Item1, batch_size: 8, epochs: 50, verbose: 1, validation_data: new NDarray[] { nTest.Item2, nTest.Item1 }); //Save model and weights string json = model.ToJson(); File.WriteAllText("./models/sm_model.json", json); model.SaveWeight("./models/sm_model.h5"); }
static void Main(string[] args) { SaveRateToFileTrainData("USD"); SaveRateToFileTestData("USD"); Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.CUDA, true); var train = PreparingExchangeRateData.LoadTrain(); var test = PreparingExchangeRateData.LoadTest(); var model = new Sequential(); model.EpochEnd += Model_EpochEnd; model.Add(new Dense(60, ActType.Sigmoid)); model.Add(new Dense(60, ActType.Sigmoid)); model.Add(new Dense(1, ActType.Linear)); //Compile with Optimizer, Loss and Metric model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.MSE); // Train for 1000 epoch with batch size of 2 model.Train(train, epochs: 1000, batchSize: 32); //Create prediction data to evaluate DataFrame2D predX = new DataFrame2D(2); predX.Load(0, 0, 0, 1, 1, 0, 1, 1); //Result should be 0, 1, 1, 0 var rawPred = model.Predict(test); Console.ReadLine(); }
public static void FitMnist() { var model = new Sequential(); model.Add(new Conv2D(32, kernelSize: new int[] { 3, 3 }, inputShape: new int[] { 28, 28, 1 }, activation: "relu")); model.Add(new Conv2D(64, kernelSize: new int[] { 3, 3 }, activation: "relu")); // model.Add(new MaxPooling1D(poolSize: 2)); model.Add(new MaxPooling2D(poolSize: new int[] { 2, 2 })); model.Add(new Dropout(0.25)); model.Add(new Flatten()); model.Add(new Dense(128, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(10, activation: "softmax")); var optimizer = new SGD(lr: 0.01); model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" }); var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open)); var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open)); xtrain = xtrain.Cast(DType.Float32); xtrain = Ops.Div(null, xtrain, 255f); ytrain = ytrain.Cast(DType.Float32); model.Fit(xtrain, ytrain, batchSize: 128, epochs: 12); var stream = new FileStream("c:/ttt/mnist.model", FileMode.OpenOrCreate, FileAccess.Write); stream.SetLength(0); model.Save(stream); }
public void sequential_guide_stacked_lstm() { int data_dim = 16; int timesteps = 8; int num_classes = 10; // expected input data shape: (batch_size, timesteps, data_dim) var model = new Sequential(); model.Add(new LSTM(32, return_sequences: true, input_shape: new[] { timesteps, data_dim })); // returns a sequence of vectors of dimension 32 model.Add(new LSTM(32, return_sequences: true)); // returns a sequence of vectors of dimension 32 model.Add(new LSTM(32)); // return a single vector of dimension 32 model.Add(new Dense(10, activation: "softmax")); model.Compile(loss: "categorical_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); // Generate dummy training data double[][][] x_train = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); // Generate dummy validation data double[,,] x_val = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_val = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); model.fit(x_train, y_train, batch_size: 64, epochs: 5, validation_data: new Array[] { x_val, y_val }); }
public void sequential_guide_stateful_stacked_lstm() { int data_dim = 16; int timesteps = 8; int num_classes = 10; int batch_size = 32; // Expected input batch shape: (batch_size, timesteps, data_dim) // Note that we have to provide the full batch_input_shape since the network is stateful. // the sample of index i in batch k is the follow-up for the sample i in batch k-1. var model = new Sequential(); model.Add(new LSTM(32, return_sequences: true, stateful: true, batch_input_shape: new int?[] { batch_size, timesteps, data_dim })); model.Add(new LSTM(32, return_sequences: true, stateful: true)); model.Add(new LSTM(32, stateful: true)); model.Add(new Dense(10, activation: "softmax")); model.Compile(loss: "categorical_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); // Generate dummy training data double[][][] x_train = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); // Generate dummy validation data double[,,] x_val = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_val = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); model.fit(x_train, y_train, batch_size: batch_size, epochs: 5, shuffle: Shuffle.False, validation_data: new Array[] { x_val, y_val }); }
public void sequential_guide_convnet() { // Generate dummy data double[,,,] x_train = (double[, , , ])Accord.Math.Matrix.Zeros <double>(new int[] { 100, 100, 100, 3 }); // TODO: Add a better overload in Accord int[] y_train = Accord.Math.Vector.Random(100, min: 0, max: 10); double[,,,] x_test = (double[, , , ])Accord.Math.Matrix.Zeros <double>(new int[] { 20, 100, 100, 3 }); // TODO: Add a better overload in Accord int[] y_test = Accord.Math.Vector.Random(100, min: 0, max: 10); var model = new Sequential(); // input: 100x100 images with 3 channels -> (100, 100, 3) tensors. // this applies 32 convolution filters of size 3x3 each. model.Add(new Conv2D(32, new[] { 3, 3 }, activation: "relu", input_shape: new int?[] { 100, 100, 3 })); model.Add(new Conv2D(32, new[] { 3, 3 }, activation: "relu")); model.Add(new MaxPooling2D(pool_size: new[] { 2, 2 })); model.Add(new Dropout(0.25)); model.Add(new Conv2D(64, new[] { 3, 3 }, activation: "relu")); model.Add(new Conv2D(64, new[] { 3, 3 }, activation: "relu")); model.Add(new MaxPooling2D(pool_size: new[] { 2, 2 })); model.Add(new Dropout(0.25)); model.Add(new Flatten()); model.Add(new Dense(256, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(10, activation: "softmax")); var sgd = new SGD(lr: 0.01, decay: 1e-6, momentum: 0.9, nesterov: true); model.Compile(loss: "categorical_crossentropy", optimizer: sgd); model.fit(x_train, y_train, batch_size: 32, epochs: 10); var score = model.evaluate(x_test, y_test, batch_size: 32); }
public void sequential_guide_mlp_binary() { // Generate dummy data double[,] x_train = Accord.Math.Matrix.Random(1000, 20); int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: 10); double[,] x_test = Accord.Math.Matrix.Random(1000, 20); int[] y_test = Accord.Math.Vector.Random(1000, min: 0, max: 10); var model = new Sequential(); model.Add(new Dense(64, input_dim: 20, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(64, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(1, activation: "sigmoid")); model.Compile(loss: "binary_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); model.fit(x_train, y_train, epochs: 20, batch_size: 128); var score = model.evaluate(x_test, y_test, batch_size: 128); }
public void sequential_guide_mlp_multiclass() { // Generate dummy data double[,] x_train = Accord.Math.Matrix.Random(1000, 20); int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: 10); double[,] x_test = Accord.Math.Matrix.Random(1000, 20); int[] y_test = Accord.Math.Vector.Random(1000, min: 0, max: 10); var model = new Sequential(); // Dense(64) is a fully-connected layer with 64 hidden units. // in the first layer, you must specify the expected input data shape: // here, 20-dimensional vectors. model.Add(new Dense(64, activation: "relu", input_dim: 20)); model.Add(new Dropout(0.5)); model.Add(new Dense(64, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(10, activation: "softmax")); var sgd = new SGD(lr: 0.01, decay: 1e-6, momentum: 0.9, nesterov: true); model.Compile(loss: "categorical_crossentropy", optimizer: sgd, metrics: new[] { "accuracy" }); model.fit(x_train, y_train, epochs: 20, batch_size: 128); var score = model.evaluate(x_test, y_test, batch_size: 128); }
static Sequential DefineDiscriminator() { var model = new Sequential(); model.Add(new Conv2D(64, new Tuple <int, int>(3, 3), padding: "same", input_shape: (imageWidth, imageHeight, 3))); model.Add(new LeakyReLU(0.2f)); model.Add(new Conv2D(128, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same")); model.Add(new LeakyReLU(0.2f)); model.Add(new Conv2D(128, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same")); model.Add(new LeakyReLU(0.2f)); model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same")); model.Add(new LeakyReLU(0.2f)); model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same")); model.Add(new LeakyReLU(0.2f)); model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same")); model.Add(new LeakyReLU(0.2f)); model.Add(new Flatten()); model.Add(new Dropout(0.4f)); model.Add(new Dense(1, activation: "sigmoid")); var opt = new Adam(0.0002f, 0.5f); model.Compile(opt, loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); return(model); }
public void mlp_should_learn_all( [ValueSource("Backends")] string backend, [ValueSource("Targets")] float[] y, [Values(false, true)] bool useBias) { KerasSharp.Backends.Current.Switch(backend); var model = new Sequential(); model.Add(new Dense(5, input_dim: 2, kernel_initializer: new GlorotUniform(), bias_initializer: new GlorotUniform(), use_bias: useBias, activation: new Sigmoid())); model.Add(new Dense(1, kernel_initializer: new GlorotUniform(), bias_initializer: new GlorotUniform(), use_bias: useBias, activation: new Sigmoid())); model.Compile(loss: new MeanSquareError(), optimizer: new SGD(lr: 1), metrics: new[] { new Accuracy() }); model.fit(x, y, epochs: 1000, batch_size: y.Length); double[] pred = Matrix.Round(model.predict(x, batch_size: y.Length)[0].To <double[, ]>()).GetColumn(0); Assert.AreEqual(y, pred); }
private Sequential CreateModel() { int imgHeight = 75; Shape inputShape; if (Backend.ImageDataFormat() == "channels_first") { inputShape = new Shape(3, imgHeight, imgHeight); } else { inputShape = new Shape(imgHeight, imgHeight, 3); } Sequential newModel = new Sequential(); newModel.Add(new Conv2D(5, new Tuple <int, int>(5, 5), input_shape: inputShape, padding: "same", activation: "relu")); newModel.Add(new MaxPooling2D()); newModel.Add(new Conv2D(5, new Tuple <int, int>(3, 3), input_shape: inputShape, padding: "same", activation: "relu")); newModel.Add(new MaxPooling2D()); newModel.Add(new Dropout(0.2)); newModel.Add(new Flatten()); newModel.Add(new Dropout(0.5)); newModel.Add(new Dense(26, activation: "softmax")); newModel.Compile(loss: "categorical_crossentropy", optimizer: "adam", metrics: new string[] { "accuracy" }); return(newModel); }
public static void Train() { var compiledModel = model.Compile(); compiledModel.EpochEnd += CompiledModel_EpochEnd; compiledModel.Fit(train, 10, 32, optimizer: new SiaNet.Optimizers.SGD(learningRate: 0.01), lossMetric: new SiaNet.Metrics.CrossEntropy(), evaluationMetric: new SiaNet.Metrics.Accuracy(), shuffle: false); }
public static void FitMnistSimple() { var model = new Sequential(); model.Add(new Dense(512, activation: "relu", inputShape: new int[] { 784 })); model.Add(new Dropout(0.2)); model.Add(new Dense(512, activation: "relu")); model.Add(new Dropout(0.2)); model.Add(new Dense(10, activation: "softmax")); var optimizer = new SGD(lr: 0.01); model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" }); var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open)); var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open)); xtrain = xtrain.Cast(DType.Float32); xtrain = Ops.Div(null, xtrain, 255f); ytrain = ytrain.Cast(DType.Float32); model.Fit(xtrain, ytrain, batchSize: 128, epochs: 20); var stream = new FileStream("c:/ttt/mnist-simple.model", FileMode.OpenOrCreate, FileAccess.Write); stream.SetLength(0); model.Save(stream); }
public static void Train() { var compiledModel = model.Compile(); compiledModel.TrainingEnd += CompiledModel_TrainingEnd; compiledModel.Fit(traintest, 100, 32, optimizer: new Model.Optimizers.Adam(), lossMetric: new Model.Metrics.MeanSquaredError(), evaluationMetric: new Model.Metrics.MeanAbsoluteError(), shuffle: true); }
public static void Run() { var(time, series) = GenerateData(); //plot_series(time, series); var split_time = 3000; // var time_train = time[..split_time]; // var time_valid = time[split_time..]; // var x_train = series[..split_time]; // var x_valid = series[split_time..]; var time_train = time[$":{split_time}"]; var time_valid = time[$"{split_time}:"]; var x_train = series[$":{split_time}"]; var x_valid = series[$"{split_time}:"]; var window_size = 20; var batch_size = 32; var shuffle_buffer_size = 1000; var dataset = windowed_dataset(/*x_train*/ np.arange(100), window_size, batch_size, shuffle_buffer_size); var hidden1 = new Dense(100, input_shape: new Keras.Shape(window_size), activation: "relu"); var hidden2 = new Dense(10, activation: "relu"); var model = new Sequential(new BaseLayer[] { hidden1, hidden2, new Dense(1) }); model.Compile(loss: "mse", optimizer: new SGD(lr: 1e-6f, momentum: 0.9f)); //model.Fit(x: dataset, epochs: 100, verbose: 0); // //Load train data // // var x = np.array(new float[,] {{0, 0}, {0, 1}, {1, 0}, {1, 1}}); // var y = np.array(new float[] {0, 1, 1, 0}); // // //Build functional model // var input = new Input(shape: new Shape(2)); // var hidden1 = new Dense(32, activation: "relu").Set(input); // var hidden2 = new Dense(64, activation: "relu").Set(hidden1); // var output = new Dense(1, activation: "sigmoid").Set(hidden2); // var model = new Model(new BaseLayer[] {input}, new[] {output}); // // //Compile and train // model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new[] {"accuracy"}); // // var history = model.Fit(x, y, batch_size: 2, epochs: 10); // //var weights = model.GetWeights(); // //model.SetWeights(weights); // var logs = history.HistoryLogs; // //Save model and weights // string json = model.ToJson(); // File.WriteAllText("model.json", json); // model.SaveWeight("model.h5"); // //Load model and weight // var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); // loaded_model.LoadWeight("model.h5"); }
private static void createModel() { model = new Sequential(); model.Add(new Dense(INPUT_LAYER_SIZE, activation: "sigmoid" /*, input_dim: 1*/)); //relu - better model.Add(new Dense(ASSOCIATIONS_LAYER_SIZE /* *5 better */, activation: "sigmoid")); // relu - better model.Add(new Dense(RESULT_LAYER_SIZE, activation: "sigmoid")); model.Compile(loss: "mean_squared_error" /*binary_crossentropy - better*/, optimizer: new SGD(lr: learningRate), metrics: new string[] { "accuracy" }); }
public static void Train() { var compiledModel = model.Compile(); compiledModel.EpochEnd += CompiledModel_EpochEnd; compiledModel.TrainingEnd += CompiledModel_TrainingEnd; compiledModel.Fit(train, 100, 5, new Model.Optimizers.Adam(), new Model.Metrics.MeanSquaredError()); }
public static void Train() { //model.Compile(OptOptimizers.SGD, OptLosses.CrossEntropy, OptMetrics.Accuracy); var compiledModel = model.Compile(); compiledModel.EpochEnd += CompiledModel_EpochEnd; compiledModel.Fit(trainData, 100, 2, new Model.Optimizers.SGD(), new Model.Metrics.BinaryCrossEntropy(), new Model.Metrics.Accuracy()); }
public void TrainXOR() { try { //Load train data float[,] testX = new float[, ] { { 0, 1 }, }; float[,] x = new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }; float[] y = new float[] { 0, 1, 1, 0 }; //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(32, activation: "relu")); model.Add(new Dropout(0.1d)); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train var optimizer = new Adam(); model.Compile(optimizer: optimizer, loss: "mse", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); float[] predicts; predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); Stopwatch watch = new Stopwatch(); watch.Restart(); for (int i = 0; i < 5; ++i) { predicts = model.PredictOnBatch(testX).GetData <float>(); } watch.Stop(); string batchMs = watch.GetElapsedMilliseconds().ToString(); watch.Restart(); for (int i = 0; i < 5; ++i) { predicts = model.Predict(testX).GetData <float>(); } watch.Stop(); //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => { // MainWindow.Instance.DebugTextBox.Text = batchMs + " / " + watch.GetElapsedMilliseconds().ToString(); //})); } catch (Exception ex) { //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => { // MainWindow.Instance.DebugTextBox.Text = ex.ToString(); //})); } }