Пример #1
0
        static void Main(string[] args)
        {
            //Setup Engine

            Global.UseEngine(SiaNet.Backend.MxNetLib.SiaNetBackend.Instance, DeviceType.CPU);

            //Prep Data
            var(x, y) = PrepDataset();
            x.Head();
            DataFrameIter trainSet = new DataFrameIter(x, y);

            //Build model with simple fully connected layers
            var model = new Sequential();

            model.EpochEnd += Model_EpochEnd;
            model.Add(new Dense(64, ActType.ReLU));
            model.Add(new Dense(1, ActType.Sigmoid));

            //Compile with Optimizer, Loss and Metric
            model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.BinaryAccurary);

            // Train for 100 epoch with batch size of 2
            model.Train(trainSet, 1000, 2);

            //Create prediction data to evaluate
            DataFrame2D predX = new DataFrame2D(2);

            predX.Load(0, 0, 0, 1); //Result should be 0 and 1

            var rawPred = model.Predict(predX);

            Console.ReadLine();
        }
Пример #2
0
        static void Main(string[] args)
        {
            SaveRateToFileTrainData("USD");
            SaveRateToFileTestData("USD");
            Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.CUDA, true);


            var train = PreparingExchangeRateData.LoadTrain();
            var test  = PreparingExchangeRateData.LoadTest();

            var model = new Sequential();

            model.EpochEnd += Model_EpochEnd;
            model.Add(new Dense(60, ActType.Sigmoid));
            model.Add(new Dense(60, ActType.Sigmoid));
            model.Add(new Dense(1, ActType.Linear));

            //Compile with Optimizer, Loss and Metric
            model.Compile(OptimizerType.SGD, LossType.MeanSquaredError, MetricType.MSE);
            // Train for 1000 epoch with batch size of 2
            model.Train(train, epochs: 1000, batchSize: 32);

            //Create prediction data to evaluate
            DataFrame2D predX = new DataFrame2D(2);

            predX.Load(0, 0, 0, 1, 1, 0, 1, 1); //Result should be 0, 1, 1, 0
            var rawPred = model.Predict(test);

            Console.ReadLine();
        }
Пример #3
0
        public static void Run()
        {
            var(x, y) = LoadTrain();
            var        test  = LoadTest();
            Sequential model = new Sequential();

            model.Add(new Dense(32, activation: SuperNeuro.Engine.ActType.ReLU));
            model.Add(new Dense(32, activation: SuperNeuro.Engine.ActType.ReLU));
            model.Add(new Dense(1));

            model.EpochEnd += Model_EpochEnd;
            model.Compile(SuperNeuro.Engine.OptimizerType.Adam, SuperNeuro.Engine.LossType.MeanSquaredError, SuperNeuro.Engine.MetricType.MAE);
            model.Train(new SuperNeuro.Data.DataFrameIter(x, y), 100, 32);
        }
Пример #4
0
        public static void Run()
        {
            var(x, y) = LoadTrain();
            Sequential model = new Sequential();

            model.Add(new Dense(32, activation: SuperNeuro.Engine.ActType.ReLU));
            model.Add(new Dense(32, activation: SuperNeuro.Engine.ActType.ReLU));
            model.Add(new Dense(1, activation: SuperNeuro.Engine.ActType.Sigmoid));

            model.EpochEnd += Model_EpochEnd;
            model.Compile(SuperNeuro.Engine.OptimizerType.Adam, SuperNeuro.Engine.LossType.BinaryCrossEntropy, SuperNeuro.Engine.MetricType.BinaryAccurary);
            model.Train(new SuperNeuro.Data.DataFrameIter(x, y), 100, 32);

            var test = model.Predict(LoadTest());

            test.Head();
        }
Пример #5
0
        private static void RunTest()
        {
            Random    Rnd      = new Random();
            DataFrame trnX_fin = new DataFrame();
            DataFrame trnY_fin = new DataFrame();

            for (int cc = 0; (cc < 100); cc++)
            {
                float[] sngLst = new float[100];
                for (int indx = 0; (indx < 100); indx++)
                {
                    sngLst[indx] = (float)Rnd.NextDouble();
                }

                trnX_fin.Add(sngLst);
            }

            for (int cc = 0; (cc < 100); cc++)
            {
                float[] sngLst = new float[3];
                //  fake one hot just for check
                sngLst[0] = 0;
                sngLst[1] = 1;
                sngLst[2] = 0;
                trnY_fin.Add(sngLst);
            }

            XYFrame XYfrm = new XYFrame();

            XYfrm.XFrame = trnX_fin;
            XYfrm.YFrame = trnY_fin;
            //  Split
            TrainTestFrame trainTestFrame = XYfrm.SplitTrainTest(0.3);
            //  init some values
            int        shape_of_input = XYfrm.XFrame.Shape[1];
            int        embval         = 100;
            int        seed           = 2;
            Sequential model          = new Sequential();

            model.Add(new Reshape(Shape.Create(1, embval), Shape.Create(shape_of_input)));
            model.Add(new LSTM(64, returnSequence: false, cellDim: 4, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), recurrentInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), biasInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed)));
            model.Add(new Dense(3, act: "sigmoid", useBias: true, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed)));
            model.Compile(OptOptimizers.Adam, OptLosses.MeanSquaredError, OptMetrics.Accuracy);
            model.Train(trainTestFrame.Train, 200, 8, trainTestFrame.Test);
        }
Пример #6
0
        public void ConvolutionalMnist()
        {
            // Load Data
            var lines = File.ReadAllLines("..\\..\\..\\..\\..\\datasets\\mnist_train.csv").ToList();

            var mnistLables = new List <Matrix>();
            var mnistData   = new List <Matrix>();

            for (var j = 1; j < lines.Count; j++)
            {
                var t    = lines[j];
                var data = t.Split(',').ToList();
                mnistLables.Add(new Matrix(1, 1).Fill(int.Parse(data[0])));

                var mnist = new Matrix(784, 1);
                for (var i = 1; i < data.Count; i++)
                {
                    mnist[i - 1, 0] = double.Parse(data[i]);
                }

                mnistData.Add(mnist);
            }

            // Create Network
            var net = new Sequential(new QuadraticCost(), new NesterovMomentum(0.03));

            net.CreateLayer(new Convolutional(784, new Average(), new Tanh()));
            net.CreateLayer(new Convolutional(100, new Average(), new Tanh()));
            net.CreateLayer(new Dense(100, new Tanh()));
            net.CreateLayer(new Output(10, new Softmax()));
            net.InitNetwork();

            // Train Network
            for (var i = 0; i < 800; i++)
            {
                net.Train(mnistData[i % mnistData.Count], mnistLables[i % mnistData.Count]);
            }

            // Write Acc Result
            // Trace.WriteLine(" Metrics Accuracy: " + acc);
            // Assert.IsTrue(acc > 80.0, "Network did not learn MNIST");
        }
Пример #7
0
        static void Main(string[] args)
        {
            Global.UseGpu();

            Tensor x = Tensor.FromArray(Global.Device, new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 });

            x = x.Reshape(3, 3);

            var result = TOps.Diag(x);

            result.Print();

            string datasetFolder = @"C:\dataset\MNIST";
            bool   useDenseModel = false;

            var((trainX, trainY), (valX, valY)) = MNISTParser.LoadDataSet(datasetFolder, trainCount: 60000, testCount: 10000, flatten: useDenseModel);
            Console.WriteLine("Train and Test data loaded");
            DataFrameIter trainIter = new DataFrameIter(trainX, trainY);
            DataFrameIter valIter   = new DataFrameIter(valX, valY);

            Sequential model = null;

            if (useDenseModel)
            {
                model = BuildFCModel();
            }
            else
            {
                model = BuildConvModel();
            }

            model.Compile(OptimizerType.Adam, LossType.CategorialCrossEntropy, MetricType.Accuracy);
            Console.WriteLine("Model compiled.. initiating training");

            model.EpochEnd += Model_EpochEnd;

            model.Train(trainIter, 10, 32, valIter);

            Console.ReadLine();
        }
Пример #8
0
        static void Main(string[] args)
        {
            //Setup Engine
            Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.CPU);

            //Prep Data
            var(x, y) = PrepDataset();
            DataFrameIter trainSet = new DataFrameIter(x, y);

            //Build model with simple fully connected layers
            var model = new Sequential();

            model.EpochEnd += Model_EpochEnd;
            model.Add(new Dense(100, ActType.ReLU));
            model.Add(new Dense(50, ActType.ReLU));
            model.Add(new Dense(1, ActType.Sigmoid));

            //Compile with Optimizer, Loss and Metric
            model.Compile(OptimizerType.Adam, LossType.MeanSquaredError, MetricType.MAE);

            // Train for 100 epoch with batch size of 2
            model.Train(trainSet, 25, 2);
        }
Пример #9
0
        static void Main(string[] args)
        {
            //Setup Engine
            Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.Default);

            var train = LoadTrain(); //Load train data
            var test  = LoadTest();  //Load test data

            var model = new Sequential();

            model.EpochEnd += Model_EpochEnd;
            model.Add(new Dense(128, ActType.ReLU));
            model.Add(new Dense(64, ActType.ReLU));
            model.Add(new Dense(1, ActType.Sigmoid));

            //Compile with Optimizer, Loss and Metric
            model.Compile(OptimizerType.Adam, LossType.BinaryCrossEntropy, MetricType.BinaryAccurary);

            // Perform training with train and val dataset
            model.Train(train, epochs: 100, batchSize: 200);

            var prediction = model.Predict(test);
        }
Пример #10
0
        static void Main(string[] args)
        {
            //Setup Engine
            Global.UseEngine(SiaNet.Backend.ArrayFire.SiaNetBackend.Instance, DeviceType.CPU);

            //Load Train and Test CSV data
            var ds = LoadTrain("./train.csv");

            //Build Model
            var model = new Sequential();

            model.EpochEnd += Model_EpochEnd;
            model.Add(new Dense(64, activation: ActType.ReLU));
            model.Add(new Dense(32, activation: ActType.ReLU));
            model.Add(new Dense(1, activation: ActType.Sigmoid));

            //Compile with Optimizer, Loss and Metric
            model.Compile(OptimizerType.Adam, LossType.MeanSquaredError, MetricType.MAE);

            // Train for 100 epoch with batch size of 32
            model.Train(ds, 25, 32);

            Console.ReadLine();
        }
 public static void Train()
 {
     model.Compile(OptOptimizers.Adam, OptLosses.MeanSquaredError, OptMetrics.MSLE);
     model.Train(traintest.Train, 500, 32, traintest.Test);
 }
Пример #12
0
 public static void Train()
 {
     //model.Compile(OptOptimizers.SGD, OptLosses.CrossEntropy, OptMetrics.Accuracy);
     model.Compile(new SGD(0.01), OptLosses.CrossEntropy, OptMetrics.Accuracy, Regulizers.RegL2(0.01));
     model.Train(train, 25, 64, null);
 }
Пример #13
0
 public static void Train()
 {
     model.Compile(OptOptimizers.SGD, OptLosses.CrossEntropy, OptMetrics.Accuracy);
     model.Train(trainData, 100, 2);
 }
Пример #14
0
 public static void Train()
 {
     //model.Compile(OptOptimizers.SGD, OptLosses.CrossEntropy, OptMetrics.Accuracy);
     model.Compile(new SGD(0.003125), OptLosses.CrossEntropy, OptMetrics.Accuracy);
     model.Train(train, 5, 32, null);
 }
Пример #15
0
 public static void Train()
 {
     model.Compile(OptOptimizers.Adam, OptLosses.MeanSquaredError, OptMetrics.MSE);
     model.Train(train, 10, 64);
 }
Пример #16
0
        public void AdaDeltaTest()
        {
            var net = new Sequential(new QuadraticCost(), new AdaDelta(0.03));

            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Output(1, new Tanh()));

            net.InitNetwork();

            var inputs  = new List <Matrix>();
            var outputs = new List <Matrix>();

            // 0 0 0    => 0
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 0.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 0 0 1    => 1
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 0.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 0 1 0    => 1
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 1.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 0 1 1    => 0
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 1.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 1 0 0    => 1
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 0.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 1 0 1    => 0
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 0.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 1 1 0    => 0
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 1.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 1 1 1    => 1
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 1.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            for (var i = 0; i < 8; i++)
            {
                net.Train(inputs[i % 8], outputs[i % 8]);
            }

            var correct = 0;

            for (var i = 0; i < 10; i++)
            {
                correct += Math.Abs(net.Forward(inputs[0])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[1])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[2])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[3])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[4])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[5])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[6])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[7])[0, 0]) - 1 < 0.1 ? 1 : 0;
            }
            var acc = correct / 80.0 * 100.0;

            Trace.WriteLine(" Acc: " + acc);
            Assert.IsTrue(acc > 80.0, "Network did not learn XOR");
        }
Пример #17
0
        public void DenseMnist()
        {
            // Load Train Data
            var lines = File.ReadAllLines("..\\..\\..\\..\\..\\datasets\\mnist_train.csv").ToList();

            var mnistLables = new List <Matrix>();
            var mnistData   = new List <Matrix>();

            for (var j = 1; j < lines.Count; j++)
            {
                var t    = lines[j];
                var data = t.Split(',').ToList();
                mnistLables.Add(new Matrix(10, 1).Fill(0));
                mnistLables[j - 1][int.Parse(data[0]), 0] = 1.0;

                var mnist = new Matrix(784, 1);
                for (var i = 1; i < data.Count; i++)
                {
                    mnist[i - 1, 0] = double.Parse(data[i]);
                }

                mnistData.Add(mnist);
            }

            // Load Test Data
            var testlines = File.ReadAllLines("..\\..\\..\\..\\..\\datasets\\mnist_test.csv").ToList();

            var mnistTestLables = new List <Matrix>();
            var mnistTestData   = new List <Matrix>();

            for (var j = 1; j < testlines.Count; j++)
            {
                var t    = testlines[j];
                var data = t.Split(',').ToList();
                mnistTestLables.Add(new Matrix(10, 1).Fill(0));
                mnistTestLables[j - 1][int.Parse(data[0]), 0] = 1.0;

                var mnist = new Matrix(784, 1);
                for (var i = 1; i < data.Count; i++)
                {
                    mnist[i - 1, 0] = double.Parse(data[i]);
                }

                mnistTestData.Add(mnist);
            }

            // Create Network
            var net = new Sequential(new CategoricalCrossEntropy(), new Adam(0.3), null, 128);

            net.CreateLayer(new Dense(784, new Relu()));
            net.CreateLayer(new Dense(128, new Relu()));
            net.CreateLayer(new Output(10, new Softmax()));
            net.InitNetwork();

            // Train Network
            for (var i = 0; i < mnistData.Count / 100; i++)
            {
                net.Train(mnistData[i % mnistData.Count], mnistLables[i % mnistData.Count]);
            }

            // Test Network
            for (var i = 0; i < mnistTestData.Count / 100; i++)
            {
                var mat  = net.Forward(mnistTestData[i % mnistTestData.Count]);
                var matx = mnistTestLables[i % mnistTestData.Count];
            }

            Trace.WriteLine(" Metrics Accuracy: ");
            //Assert.IsTrue(acc > 80.0, "Network did not learn MNIST");
        }