コード例 #1
0
ファイル: Training.cs プロジェクト: sportbilly21/MxNet.Sharp
        public NDArray Predict(NDArray x, uint?batchSize = null)
        {
            NDArray      result   = new NDArray();
            List <float> preds    = new List <float>();
            NDArrayIter  dataIter = new NDArrayIter(new NDArray[] { x }, null);

            if (!batchSize.HasValue)
            {
                batchSize = x.Shape[0];
            }

            List <uint> inputShape  = new List <uint>();
            NDArrayDict predictArgs = new NDArrayDict();

            Model.InferArgsMap(mx.Device, predictArgs, args);
            predictArgs["X"]     = new NDArray(x.Shape);
            predictArgs["label"] = new NDArray(new Shape(batchSize.Value));
            using (var exec = Model.SimpleBind(mx.Device, predictArgs))
            {
                dataIter.BatchSize = batchSize.Value;
                dataIter.Reset();
                while (dataIter.IterNext())
                {
                    var batch = dataIter.Next();
                    batch.Data[0].CopyTo(predictArgs["X"]);
                    exec.Forward(false);
                    preds.AddRange(exec.Output.GetValues <float>());
                }
            }

            return(new NDArray(preds.ToArray()).Reshape((int)x.Shape[0], -1));
        }
コード例 #2
0
ファイル: XORGate.cs プロジェクト: quakemaster/MxNet.Sharp
        public static void Run()
        {
            // Create
            var trainX = new NDArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }).Reshape(4, 2);
            var trainY = new NDArray(new float[] { 0, 1, 1, 0 });

            var batch_size = 2;
            var train_data = new NDArrayIter(trainX, trainY, batch_size);
            var val_data   = new NDArrayIter(trainX, trainY, batch_size);

            var net = new Sequential();

            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(1));

            var gpus    = TestUtils.ListGpus();
            var ctxList = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu() };

            net.Initialize(new Uniform(), ctxList.ToArray());
            var   trainer             = new Trainer(net.CollectParams(), new Adam());
            var   epoch               = 1000;
            var   metric              = new BinaryAccuracy();
            var   binary_crossentropy = new LogisticLoss();
            float lossVal             = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var         batch   = train_data.Next();
                    var         data    = Utils.SplitAndLoad(batch.Data[0], ctxList);
                    var         label   = Utils.SplitAndLoad(batch.Label[0], ctxList);
                    NDArrayList outputs = null;
                    using (var ag = Autograd.Record())
                    {
                        outputs = Enumerable.Zip(data, label, (x, y) =>
                        {
                            var z        = net.Call(x);
                            NDArray loss = binary_crossentropy.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            return(z);
                        }).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.WriteLine($"Loss: {lossVal}");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={acc * 100}%");
            }
        }
コード例 #3
0
        public void LoadDataBatch(DataBatch data_batch)
        {
            if (sym_gen != null)
            {
                var key = data_batch.BucketKey.Value;
                if (execgrp_bucket.ContainsKey(key))
                {
                    symbol  = sym_gen(key);
                    execgrp = new DataParallelExecutorGroup(symbol, arg_names, param_names, contexts, slices,
                                                            NDArrayIter.FromBatch(data_batch), execgrp);
                    execgrp_bucket[key] = execgrp;
                }

                curr_execgrp = execgrp_bucket[key];
            }
            else
            {
                curr_execgrp = execgrp;
            }

            curr_execgrp.LoadDataBatch(data_batch);
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: sportbilly21/MxNet.Sharp
        static void Main(string[] args)
        {
            //Environment.SetEnvironmentVariable("MXNET_ENGINE_TYPE", "NaiveEngine");
            mx.SetDevice(DeviceType.CPU);
            //uint batchSize = 200;
            //uint numFeatures = 13;
            //var x = Symbol.Variable("x");

            //var trainData = ReadCsv("./data/train.csv");
            //var x_train = trainData.SliceAxis(1, 1, 14);
            //var y_train = trainData.SliceAxis(1, 14, 15);

            //NDArrayIter dataIter = new NDArrayIter(x_train, y_train);

            //var fc1 = sym.FullyConnected(x, Symbol.Variable("fc1_w"), null, 64, no_bias: true, symbol_name: "fc1");
            //var fc2 = sym.Relu(sym.FullyConnected(fc1, Symbol.Variable("fc2_w"), null, 32, no_bias: true, symbol_name: "fc2"), "relu2");
            //var fc3 = sym.FullyConnected(fc2, Symbol.Variable("fc3_w"), null, 1, no_bias: true, symbol_name: "fc3");
            //var output = sym.LinearRegressionOutput(fc3, Symbol.Variable("label"), symbol_name: "model");

            //NDArrayDict parameters = new NDArrayDict();
            //parameters["x"] = new NDArray(new Shape(batchSize, numFeatures));
            //parameters["label"] = new NDArray(new Shape(batchSize));
            //output.InferArgsMap(MXNet.Device, parameters, parameters);

            //foreach (var item in parameters.ToList())
            //{
            //    if (item.Key == "x" || item.Key == "label")
            //        continue;

            //    item.Value.SampleUniform();
            //}

            //var opt = new Adam();
            //BaseMetric metric = new MAE();
            //using (var exec = output.SimpleBind(MXNet.Device, parameters))
            //{
            //    dataIter.SetBatch(batchSize);
            //    var argNames = output.ListArguments();
            //    DataBatch batch;
            //    for (int iter = 1; iter <= 1000; iter++)
            //    {
            //        dataIter.Reset();
            //        metric.Reset();

            //        while (dataIter.Next())
            //        {
            //            batch = dataIter.GetDataBatch();
            //            batch.Data.CopyTo(parameters["x"]);
            //            batch.Label.CopyTo(parameters["label"]);
            //            exec.Forward(true);
            //            exec.Backward();

            //            for (var i = 0; i < argNames.Count; ++i)
            //            {
            //                if (argNames[i] == "x" || argNames[i] == "label")
            //                    continue;

            //                opt.Update(iter, i, exec.ArgmentArrays[i], exec.GradientArrays[i]);
            //            }

            //            metric.Update(parameters["label"], exec.Output);
            //        }

            //        Console.WriteLine("Iteration: {0}, Metric: {1}", iter, metric.Get());
            //    }
            //}

            //Global.Device = Context.Cpu();

            ////Read Data
            CsvDataFrame trainReader = new CsvDataFrame("./data/train.csv", true);

            trainReader.ReadCsv();
            var trainX = trainReader[1, 14];
            var trainY = trainReader[14, 15];

            CsvDataFrame valReader = new CsvDataFrame("./data/test.csv", true);

            valReader.ReadCsv();

            var valX = valReader[1, 14];

            NDArrayIter train = new NDArrayIter(trainX, trainY);

            //Build Model
            var model = new Module(13);

            model.Add(new Dense(64, ActivationType.ReLU));
            model.Add(new Dense(32, ActivationType.ReLU));
            model.Add(new Dense(1));

            model.Compile(OptimizerRegistry.Adam(), LossType.MeanSquaredError, new MSE());
            model.Fit(train, 1000, 32);

            Console.ReadLine();
        }
コード例 #5
0
        public static void RunSimple()
        {
            var mnist      = TestUtils.GetMNIST(); //Get the MNIST dataset, it will download if not found
            var batch_size = 100;                  //Set training batch size
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            // Define simple network with dense layers
            var net = new Sequential();

            net.Add(new Dense(128, ActivationType.Relu));
            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(10));

            //Set context, multi-gpu supported
            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            //Initialize the weights
            net.Initialize(new Xavier(magnitude: 2.24f), ctx);

            //Create the trainer with all the network parameters and set the optimizer
            var trainer = new Trainer(net.CollectParams(), new Adam());

            var   epoch  = 10;
            var   metric = new Accuracy(); //Use Accuracy as the evaluation metric.
            var   softmax_cross_entropy_loss = new SoftmaxCrossEntropyLoss();
            float lossVal = 0;             //For loss calculation

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                // Reset the train data iterator.
                train_data.Reset();
                lossVal = 0;

                // Loop over the train data iterator.
                while (!train_data.End())
                {
                    var batch = train_data.Next();

                    // Splits train data into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var data = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);

                    // Splits train labels into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();

                    // Inside training scope
                    NDArray loss = null;
                    for (int i = 0; i < data.Length; i++)
                    {
                        using (var ag = Autograd.Record())
                        {
                            var x = data[i];
                            var y = label[i];
                            var z = net.Call(x);
                            // Computes softmax cross entropy loss.
                            loss = softmax_cross_entropy_loss.Call(z, y);
                            outputs.Add(z);
                        }

                        // Backpropagate the error for one iteration.
                        loss.Backward();
                        lossVal += loss.Mean();
                    }

                    // Updates internal evaluation
                    metric.Update(label, outputs.ToArray());

                    // Make one step of parameter update. Trainer needs to know the
                    // batch size of data to normalize the gradient by 1/batch_size.
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                // Gets the evaluation result.
                var(name, acc) = metric.Get();

                // Reset evaluation result to initial state.
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
コード例 #6
0
        public static void RunConv()
        {
            var mnist      = TestUtils.GetMNIST();
            var batch_size = 128;
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size, true);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            var net = new Sequential();

            net.Add(new Conv2D(20, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Conv2D(50, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Flatten());
            net.Add(new Dense(500, ActivationType.Tanh));
            net.Add(new Dense(10));

            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            net.Initialize(new Xavier(magnitude: 2.24f), ctx);
            var trainer = new Trainer(net.CollectParams(), new SGD(learning_rate: 0.02f));

            var   epoch  = 10;
            var   metric = new Accuracy();
            var   softmax_cross_entropy_loss = new SoftmaxCELoss();
            float lossVal = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var batch = train_data.Next();
                    var data  = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();
                    using (var ag = Autograd.Record())
                    {
                        for (var i = 0; i < data.Length; i++)
                        {
                            var x = data[i];
                            var y = label[i];

                            var     z    = net.Call(x);
                            NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            outputs.Add(z);
                        }

                        //outputs = Enumerable.Zip(data, label, (x, y) =>
                        //{
                        //    var z = net.Call(x);
                        //    NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                        //    loss.Backward();
                        //    lossVal += loss.Mean();
                        //    return z;
                        //}).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }