public static void SmallNetwork(List <Tuple <bool, float[]> > train, List <Tuple <bool, float[]> > test) { int vectorSize = train[0].Item2.Length; //Load train data var nTrain = ListToNDarrays(train, vectorSize); var nTest = ListToNDarrays(test, vectorSize); //Build sequential model var model = new Sequential(); model.Add(new Dense(8, activation: "relu", input_shape: new Shape(vectorSize))); model.Add(new Dropout(0.5)); model.Add(new Dense(16, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train //model.Compile(optimizer:"adam", loss:"sparse_categorical_crossentropy", metrics: new string[] { "accuracy" }); model.Compile(optimizer: "adam", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit( nTrain.Item2, nTrain.Item1, batch_size: 8, epochs: 50, verbose: 1, validation_data: new NDarray[] { nTest.Item2, nTest.Item1 }); //Save model and weights string json = model.ToJson(); File.WriteAllText("./models/sm_model.json", json); model.SaveWeight("./models/sm_model.h5"); }
public static void BuildModel() { model = new Sequential(new Shape(10)); model.Add(new Dense(dim: 20, activation: new Model.Layers.Activations.ReLU())); model.Add(new Dense(dim: 20, activation: new Model.Layers.Activations.ReLU())); model.Add(new Dense(dim: 1)); }
static void Main(string[] args) { SaveRateToFileTrainData("USD"); SaveRateToFileTestData("USD"); Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.CUDA, true); var train = PreparingExchangeRateData.LoadTrain(); var test = PreparingExchangeRateData.LoadTest(); var model = new Sequential(); model.EpochEnd += Model_EpochEnd; model.Add(new Dense(60, ActType.Sigmoid)); model.Add(new Dense(60, ActType.Sigmoid)); model.Add(new Dense(1, ActType.Linear)); //Compile with Optimizer, Loss and Metric model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.MSE); // Train for 1000 epoch with batch size of 2 model.Train(train, epochs: 1000, batchSize: 32); //Create prediction data to evaluate DataFrame2D predX = new DataFrame2D(2); predX.Load(0, 0, 0, 1, 1, 0, 1, 1); //Result should be 0, 1, 1, 0 var rawPred = model.Predict(test); Console.ReadLine(); }
public static void BuildModel() { model = new Sequential(new Shape(lookback)); model.Add(new LSTM(dim: 4, returnSequence: true)); model.Add(new LSTM(dim: 4)); model.Add(new Dense(dim: 1)); }
public static void FitMnistSimple() { var model = new Sequential(); model.Add(new Dense(512, activation: "relu", inputShape: new int[] { 784 })); model.Add(new Dropout(0.2)); model.Add(new Dense(512, activation: "relu")); model.Add(new Dropout(0.2)); model.Add(new Dense(10, activation: "softmax")); var optimizer = new SGD(lr: 0.01); model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" }); var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open)); var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open)); xtrain = xtrain.Cast(DType.Float32); xtrain = Ops.Div(null, xtrain, 255f); ytrain = ytrain.Cast(DType.Float32); model.Fit(xtrain, ytrain, batchSize: 128, epochs: 20); var stream = new FileStream("c:/ttt/mnist-simple.model", FileMode.OpenOrCreate, FileAccess.Write); stream.SetLength(0); model.Save(stream); }
private static void BuildMLP(int[] imageDim, int numClasses) { model.Add(new Dense(3072, imageDim[0], OptActivations.ReLU)); model.Add(new Dense(2000, OptActivations.ReLU)); model.Add(new Dropout(0.2)); model.Add(new Dense(numClasses)); }
static void Main(string[] args) { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: "sgd", loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); var result = loaded_model.Predict(x); Console.WriteLine("Предсказание для [{0}] = [{1}]", x.ToString(), result.ToString()); }
// Performs convolutional neural network model training: // Incorporated parameters include relu and softmax // Adds fixed preprocessing layers and pooling: could use further development with exposed parameters private static Sequential ProcessCnnModel(Shape input_shape, NDarray x_train, NDarray y_train, NDarray x_test, NDarray y_test, int num_classes, string logname, Config config) { // Build CNN model Sequential model = new Sequential(); model.Add(new Conv2D(16, kernel_size: (3, 3).ToTuple(), activation: "relu", input_shape: input_shape)); model.Add(new Conv2D(32, (3, 3).ToTuple(), activation: "relu")); model.Add(new MaxPooling2D(pool_size: (2, 2).ToTuple())); model.Add(new Flatten()); Callback[] callbacks = GetCallbacks(config.isEarlyStop, logname); AddNodes(model, config); model.Add(new Dense(num_classes, activation: "softmax")); // Compile with loss, metrics and optimizer model.Compile(loss: "categorical_crossentropy", optimizer: new Adam(lr: (float)config.LearnRate, decay: (float)config.LearnDecay), metrics: new[] { "accuracy" }); // Train the model model.Fit(x_train, y_train, batch_size: config.Batch, epochs: config.Epochs, verbose: 1, validation_data: new[] { x_test, y_test }, callbacks: callbacks); return(model); }
public void sequential_guide_2() { var model = new Sequential(); model.Add(new Dense(32, input_dim: 784)); model.Add(new Activation("relu")); }
public RNN(uint n_inputs, uint n_mem, uint[] shape, IActivation act) { n_memory = n_mem; n_outputs = shape[shape.Length - 1]; mem_stack = new float[n_memory, n_outputs + n_inputs]; flat_mem_stack = Utils.FZerosArray(mem_stack); activation = act; //IActivation Tanh = new Tanh(); h_layers = new Sequential(); h_layers.Add(new Layer((uint)(n_inputs + flat_mem_stack.Length), shape[0], Activations.Tanh)); //h_layers[0] = new Layer((uint) (n_inputs + flat_mem_stack.Length), hidden_dim); for (int i = 1; i < shape.Length; i++) { //h_layers[i] = new Layer(hidden_dim, hidden_dim); h_layers.Add(new Layer(shape[i - 1], shape[i], Activations.Tanh)); } // Setting the output layer activation to the given activation Layer lastLayer = (Layer)h_layers.NeuralNetworks[h_layers.NeuralNetworks.Count - 1]; lastLayer.SetActivation(activation); h_layers.NeuralNetworks[h_layers.CountLayer() - 1] = lastLayer; this.n_outputs = shape[shape.Length - 1]; }
public static void BuildModel() { model = new Sequential(new Shape(trainData.Features.DataShape[0])); model.Add(new Dense(dim: 20, activation: new SiaNet.Layers.Activations.ReLU())); model.Add(new Dense(dim: 20, activation: new SiaNet.Layers.Activations.ReLU())); model.Add(new Dense(dim: trainData.Labels.DataShape[0])); }
public static void Run() { //Load train data NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(64, activation: "relu")); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train model.Compile(optimizer: new Adam(), loss: "binary_crossentropy", metrics: new string[] { "accuracy" }); var history = model.Fit(x, y, batch_size: 2, epochs: 100, verbose: 1); var logs = history.HistoryLogs; //Save model and weights string json = model.ToJson(); File.WriteAllText("model.json", json); model.SaveWeight("model.h5"); //Load model and weight var loaded_model = Sequential.ModelFromJson(File.ReadAllText("model.json")); loaded_model.LoadWeight("model.h5"); }
public void mlp_should_learn_all( [ValueSource("Backends")] string backend, [ValueSource("Targets")] float[] y, [Values(false, true)] bool useBias) { KerasSharp.Backends.Current.Switch(backend); var model = new Sequential(); model.Add(new Dense(5, input_dim: 2, kernel_initializer: new GlorotUniform(), bias_initializer: new GlorotUniform(), use_bias: useBias, activation: new Sigmoid())); model.Add(new Dense(1, kernel_initializer: new GlorotUniform(), bias_initializer: new GlorotUniform(), use_bias: useBias, activation: new Sigmoid())); model.Compile(loss: new MeanSquareError(), optimizer: new SGD(lr: 1), metrics: new[] { new Accuracy() }); model.fit(x, y, epochs: 1000, batch_size: y.Length); double[] pred = Matrix.Round(model.predict(x, batch_size: y.Length)[0].To <double[, ]>()).GetColumn(0); Assert.AreEqual(y, pred); }
public void sequential_guide_stateful_stacked_lstm() { int data_dim = 16; int timesteps = 8; int num_classes = 10; int batch_size = 32; // Expected input batch shape: (batch_size, timesteps, data_dim) // Note that we have to provide the full batch_input_shape since the network is stateful. // the sample of index i in batch k is the follow-up for the sample i in batch k-1. var model = new Sequential(); model.Add(new LSTM(32, return_sequences: true, stateful: true, batch_input_shape: new int?[] { batch_size, timesteps, data_dim })); model.Add(new LSTM(32, return_sequences: true, stateful: true)); model.Add(new LSTM(32, stateful: true)); model.Add(new Dense(10, activation: "softmax")); model.Compile(loss: "categorical_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); // Generate dummy training data double[][][] x_train = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); // Generate dummy validation data double[,,] x_val = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_val = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); model.fit(x_train, y_train, batch_size: batch_size, epochs: 5, shuffle: Shuffle.False, validation_data: new Array[] { x_val, y_val }); }
public static void Main(string[] args) { // CreateHostBuilder(args).Build().Run(); //CreateWebHostBuilder(args).Build().Run(); Sequential Seq = new Sequential(); Seq.Add(new Dense(32, activation: "relu", input_shape: new Shape(250, 250, 3))); Seq.Add(new Dense(64, activation: "relu")); Seq.Add(new Dense(1, activation: "sigmoid")); Console.WriteLine(Backend.GetBackend()); var function = Backend.Function(Seq.Layers(0), Seq.Layers(1)); Console.WriteLine(function); NDarray x = np.array(new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }); NDarray y = np.array(new float[] { 0, 1, 1, 0 }); var val = (42, 2); // var data = tf.ones(new TensorShape(new int[] {1, 259, 259, 3})); var data = Backend.ones(); KerasIterator iter = new KerasIterator(data); var z = new PyIter(iter.PyObject); z.MoveNext(); var output = z.Current; var res = function(data); Console.WriteLine("function Results:"); Console.WriteLine(res); }
public void sequential_guide_stacked_lstm() { int data_dim = 16; int timesteps = 8; int num_classes = 10; // expected input data shape: (batch_size, timesteps, data_dim) var model = new Sequential(); model.Add(new LSTM(32, return_sequences: true, input_shape: new[] { timesteps, data_dim })); // returns a sequence of vectors of dimension 32 model.Add(new LSTM(32, return_sequences: true)); // returns a sequence of vectors of dimension 32 model.Add(new LSTM(32)); // return a single vector of dimension 32 model.Add(new Dense(10, activation: "softmax")); model.Compile(loss: "categorical_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); // Generate dummy training data double[][][] x_train = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); // Generate dummy validation data double[,,] x_val = null; // Accord.Math.Jagged.Random(1000, timesteps, data_dim); // TODO: Add better method in Accord int[] y_val = Accord.Math.Vector.Random(1000, min: 0, max: num_classes); model.fit(x_train, y_train, batch_size: 64, epochs: 5, validation_data: new Array[] { x_val, y_val }); }
public void sequential_guide_mlp_binary() { // Generate dummy data double[,] x_train = Accord.Math.Matrix.Random(1000, 20); int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: 10); double[,] x_test = Accord.Math.Matrix.Random(1000, 20); int[] y_test = Accord.Math.Vector.Random(1000, min: 0, max: 10); var model = new Sequential(); model.Add(new Dense(64, input_dim: 20, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(64, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(1, activation: "sigmoid")); model.Compile(loss: "binary_crossentropy", optimizer: "rmsprop", metrics: new[] { "accuracy" }); model.fit(x_train, y_train, epochs: 20, batch_size: 128); var score = model.evaluate(x_test, y_test, batch_size: 128); }
public void sequential_guide_mlp_multiclass() { // Generate dummy data double[,] x_train = Accord.Math.Matrix.Random(1000, 20); int[] y_train = Accord.Math.Vector.Random(1000, min: 0, max: 10); double[,] x_test = Accord.Math.Matrix.Random(1000, 20); int[] y_test = Accord.Math.Vector.Random(1000, min: 0, max: 10); var model = new Sequential(); // Dense(64) is a fully-connected layer with 64 hidden units. // in the first layer, you must specify the expected input data shape: // here, 20-dimensional vectors. model.Add(new Dense(64, activation: "relu", input_dim: 20)); model.Add(new Dropout(0.5)); model.Add(new Dense(64, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(10, activation: "softmax")); var sgd = new SGD(lr: 0.01, decay: 1e-6, momentum: 0.9, nesterov: true); model.Compile(loss: "categorical_crossentropy", optimizer: sgd, metrics: new[] { "accuracy" }); model.fit(x_train, y_train, epochs: 20, batch_size: 128); var score = model.evaluate(x_test, y_test, batch_size: 128); }
private static void BuildMLP(int[] imageDim, int numClasses) { model.Add(new Dense(dim: 3072, shape: imageDim[0], act: OptActivations.ReLU)); model.Add(new Dense(dim: 2000, act: OptActivations.ReLU)); model.Add(new Dropout(0.2)); model.Add(new Dense(dim: numClasses)); }
static void Main(string[] args) { //Setup Engine Global.UseEngine(SiaNet.Backend.MxNetLib.SiaNetBackend.Instance, DeviceType.CPU); //Prep Data var(x, y) = PrepDataset(); x.Head(); DataFrameIter trainSet = new DataFrameIter(x, y); //Build model with simple fully connected layers var model = new Sequential(); model.EpochEnd += Model_EpochEnd; model.Add(new Dense(64, ActType.ReLU)); model.Add(new Dense(1, ActType.Sigmoid)); //Compile with Optimizer, Loss and Metric model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.BinaryAccurary); // Train for 100 epoch with batch size of 2 model.Train(trainSet, 1000, 2); //Create prediction data to evaluate DataFrame2D predX = new DataFrame2D(2); predX.Load(0, 0, 0, 1); //Result should be 0 and 1 var rawPred = model.Predict(predX); Console.ReadLine(); }
public static void BuildModel() { model = new Sequential(); model.Add(new Dense(dim: 2, shape: 2, act: OptActivations.Sigmoid)); model.Add(new Dense(dim: 2)); model.OnEpochEnd += Model_OnEpochEnd; }
public static void BuildModel() { model = new Sequential(); model.Add(new Dense(dim: 2, shape: 2, act: OptActivations.Sigmoid, weightInitializer: new Model.Initializers.Xavier())); model.Add(new Dense(dim: 2)); model.OnEpochEnd += Model_OnEpochEnd; }
private static void createModel() { model = new Sequential(); model.Add(new Dense(INPUT_LAYER_SIZE, activation: "sigmoid" /*, input_dim: 1*/)); //relu - better model.Add(new Dense(ASSOCIATIONS_LAYER_SIZE /* *5 better */, activation: "sigmoid")); // relu - better model.Add(new Dense(RESULT_LAYER_SIZE, activation: "sigmoid")); model.Compile(loss: "mean_squared_error" /*binary_crossentropy - better*/, optimizer: new SGD(lr: learningRate), metrics: new string[] { "accuracy" }); }
private static void BuildMLP() { model = new Sequential(new Shape(imgDim)); model.Add(new Dense(dim: 200, activation: new SiaNet.Layers.Activations.ReLU())); model.Add(new Dense(dim: 400, activation: new SiaNet.Layers.Activations.ReLU())); model.Add(new Dropout(0.2)); model.Add(new Dense(dim: labelDim)); }
public static void Run() { // Create var trainX = new NDArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }).Reshape(4, 2); var trainY = new NDArray(new float[] { 0, 1, 1, 0 }); var batch_size = 2; var train_data = new NDArrayIter(trainX, trainY, batch_size); var val_data = new NDArrayIter(trainX, trainY, batch_size); var net = new Sequential(); net.Add(new Dense(64, ActivationType.Relu)); net.Add(new Dense(1)); var gpus = TestUtils.ListGpus(); var ctxList = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu() }; net.Initialize(new Uniform(), ctxList.ToArray()); var trainer = new Trainer(net.CollectParams(), new Adam()); var epoch = 1000; var metric = new BinaryAccuracy(); var binary_crossentropy = new LogisticLoss(); float lossVal = 0; for (var iter = 0; iter < epoch; iter++) { train_data.Reset(); lossVal = 0; while (!train_data.End()) { var batch = train_data.Next(); var data = Utils.SplitAndLoad(batch.Data[0], ctxList); var label = Utils.SplitAndLoad(batch.Label[0], ctxList); NDArrayList outputs = null; using (var ag = Autograd.Record()) { outputs = Enumerable.Zip(data, label, (x, y) => { var z = net.Call(x); NDArray loss = binary_crossentropy.Call(z, y); loss.Backward(); lossVal += loss.Mean(); return(z); }).ToList(); } metric.Update(label, outputs.ToArray()); trainer.Step(batch.Data[0].Shape[0]); } var(name, acc) = metric.Get(); metric.Reset(); Console.WriteLine($"Loss: {lossVal}"); Console.WriteLine($"Training acc at epoch {iter}: {name}={acc * 100}%"); } }
private static Sequential BuildFCModel() { Sequential model = new Sequential(); model.Add(new Dense(dim: 784, activation: ActType.ReLU)); model.Add(new Dense(dim: 10, activation: ActType.Softmax)); return(model); }
public void TrainXOR() { try { //Load train data float[,] testX = new float[, ] { { 0, 1 }, }; float[,] x = new float[, ] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }; float[] y = new float[] { 0, 1, 1, 0 }; //Build sequential model var model = new Sequential(); model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2))); model.Add(new Dense(32, activation: "relu")); model.Add(new Dropout(0.1d)); model.Add(new Dense(1, activation: "sigmoid")); //Compile and train var optimizer = new Adam(); model.Compile(optimizer: optimizer, loss: "mse", metrics: new string[] { "accuracy" }); model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1); float[] predicts; predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); predicts = model.Predict(x).GetData <float>(); predicts = model.PredictOnBatch(x).GetData <float>(); Stopwatch watch = new Stopwatch(); watch.Restart(); for (int i = 0; i < 5; ++i) { predicts = model.PredictOnBatch(testX).GetData <float>(); } watch.Stop(); string batchMs = watch.GetElapsedMilliseconds().ToString(); watch.Restart(); for (int i = 0; i < 5; ++i) { predicts = model.Predict(testX).GetData <float>(); } watch.Stop(); //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => { // MainWindow.Instance.DebugTextBox.Text = batchMs + " / " + watch.GetElapsedMilliseconds().ToString(); //})); } catch (Exception ex) { //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => { // MainWindow.Instance.DebugTextBox.Text = ex.ToString(); //})); } }
public static void BuildModel() { model = new Sequential(); model.Add(new Reshape(targetshape: Shape.Create(1, train.XFrame.Shape[1]), shape: Shape.Create(lookback))); model.Add(new LSTM(dim: 5, shape: Shape.Create(1, train.XFrame.Shape[1]))); model.Add(new Dense(dim: 1)); model.OnEpochEnd += Model_OnEpochEnd; model.OnTrainingEnd += Model_OnTrainingEnd; }
public static void BuildModel() { model = new Sequential(); model.Add(new Dense(dim: 20, shape: 13, act: OptActivations.LeakyReLU)); model.Add(new Dense(dim: 13, act: OptActivations.LeakyReLU)); model.Add(new Dropout(rate: 0.2)); model.Add(new Dense(dim: 1, act: OptActivations.LeakyReLU)); model.OnEpochEnd += Model_OnEpochEnd; model.OnTrainingEnd += Model_OnTrainingEnd; }
public static void BuildModel() { model = new Sequential(); model.Add(new LSTM(dim: 4, shape: Shape.Create(lookback), returnSequence: true)); model.Add(new LSTM(dim: 4, shape: Shape.Create(lookback))); model.Add(new Dense(dim: 1)); model.OnEpochEnd += Model_OnEpochEnd; model.OnTrainingEnd += Model_OnTrainingEnd; }