Exemplo n.º 1
0
        public static NDArrayList SplitData(NDArray data, int num_slice, int batch_axis = 0, bool even_split = true)
        {
            var size = data.Shape[batch_axis];

            if (even_split && size % num_slice != 0)
            {
                throw new ArgumentException(string.Format(
                                                "data with shape {0} cannot be evenly split into {1} slices along axis {2}. " +
                                                "Use a batch size that's multiple of {3} or set even_split=False to allow " +
                                                "uneven partitioning of data.", data.Shape, num_slice, batch_axis, num_slice));
            }

            var step = (int)Math.Truncate((double)size / num_slice);

            if (!even_split && size < num_slice)
            {
                step      = 1;
                num_slice = size;
            }

            var slices = new NDArrayList();

            if (batch_axis == 0)
            {
                for (var i = 0; i < num_slice; i++)
                {
                    if (i < num_slice)
                    {
                        slices.Add(data[string.Format("{0}:{1}", i * step, (i + 1) * step)]);
                    }
                }
            }
            else if (even_split)
            {
                slices.Add(nd.Split(data, num_slice, batch_axis));
            }
            else
            {
                for (var i = 0; i < num_slice; i++)
                {
                    if (i < num_slice - 1)
                    {
                        slices.Add(data[string.Format("{0}:{1}", i * step, (i + 1) * step)]);
                    }
                    else
                    {
                        slices.Add(nd.SliceAxis(data, batch_axis, i * step, (i + 1) * step));
                    }
                }
            }

            return(slices.ToArray());
        }
        public override NDArrayOrSymbol Call(NDArrayOrSymbol x, NDArrayOrSymbol[] args)
        {
            if (args != null)
            {
                var list = new NDArrayList();
                list.Add(_fn.Call(x));
                list.Add(args.ToList().ToNDArrays());
                return(new NDArrayOrSymbol(list));
            }

            return(_fn.Call(x, null));
        }
Exemplo n.º 3
0
        public void RowSparsePull(string key, NDArrayList @out, int priority = 0, NDArrayList row_ids = null)
        {
            if (@out == null)
            {
                throw new ArgumentNullException("@out");
            }

            if (row_ids == null)
            {
                throw new ArgumentNullException("row_ids");
            }

            var first_out = new NDArrayList();

            if (row_ids.Length == 1)
            {
                first_out.Add(@out[0]);
            }
            else
            {
                first_out = @out.ToList();
            }

            NativeMethods.MXKVStorePullRowSparseEx(handle, 1, new[] { key },
                                                   MxUtil.GetNDArrayHandles(first_out.ToArray()), MxUtil.GetNDArrayHandles(row_ids), priority);
        }
Exemplo n.º 4
0
        public NDArrayList Invoke(NDArrayList outputs)
        {
            var paramKeys   = new List <string>();
            var paramValues = new List <string>();

            foreach (var data in _params)
            {
                paramKeys.Add(data.Key);
                paramValues.Add(data.Value);
            }

            var numInputs  = _inputNDArrays.Count;
            var numOutputs = outputs.Count;

            var      outputHandles   = outputs.Select(s => s.Handle).ToArray();
            var      outputsReceiver = IntPtr.Zero;
            GCHandle?gcHandle        = null;

            try
            {
                if (outputs.Count > 0)
                {
                    gcHandle        = GCHandle.Alloc(outputHandles, GCHandleType.Pinned);
                    outputsReceiver = gcHandle.Value.AddrOfPinnedObject();
                }

                NDArrayHandle[] outputsReceivers = { outputsReceiver };

                CheckCall(_LIB.MXImperativeInvoke(_handle, numInputs, _inputNDArrays.ToArray(), ref numOutputs,
                                                  ref outputsReceiver,
                                                  paramKeys.Count, paramKeys.ToArray(), paramValues.ToArray()));

                if (outputs.Count > 0)
                {
                    gcHandle?.Free();
                    return(outputs);
                }

                outputHandles = new NDArrayHandle[numOutputs];

                Marshal.Copy(outputsReceiver, outputHandles, 0, numOutputs);

                foreach (var outputHandle in outputHandles)
                {
                    outputs.Add(new NDArray(outputHandle));
                }

                gcHandle?.Free();
            }
            catch (Exception ex)
            {
                throw ex;
            }
            finally
            {
                gcHandle?.Free();
            }

            return(outputs);
        }
Exemplo n.º 5
0
        public NDArrayIter(NDArrayList data, NDArrayList label = null, int batch_size    = 1, bool shuffle           = false,
                           string last_batch_handle            = "pad", string data_name = "data", string label_name = "softmax_label")
        {
            this.data              = IOUtils.InitData(data, false, data_name);
            this.label             = IOUtils.InitData(label, false, label_name);
            BatchSize              = batch_size;
            Cursor                 = batch_size;
            num_data               = data[0].Shape[0];
            this.last_batch_handle = last_batch_handle;
            this.shuffle           = shuffle;

            Reset();
            data_list.Add(data);
            data_list.Add(label);
            _cache_data  = null;
            _cache_label = null;
        }
Exemplo n.º 6
0
        private NDArrayList _concat(NDArrayList first_data, NDArrayList second_data)
        {
            if (first_data.Length != second_data.Length)
            {
                throw new Exception("Data source should be of same size.");
            }

            var result = new NDArrayList();

            for (var i = 0; i < first_data.Length; i++)
            {
                result.Add(
                    nd.Concat(new NDArrayList(first_data[i], second_data[i]), 0)
                    );
            }

            return(result.ToArray());
        }
Exemplo n.º 7
0
        private NDArrayList _getdata(NDArrayDict data_source, int?start = null, int?end = null)
        {
            if (!start.HasValue && !end.HasValue)
            {
                throw new ArgumentException("Should atleast specify start or end");
            }

            start = start.HasValue ? start : 0;
            end   = end.HasValue ? end : data_source.First().Value.Shape[0];

            var result = new NDArrayList();

            foreach (var x in data_source)
            {
                result.Add(x.Value.Slice(start.Value, end));
            }

            return(result.ToArray());
        }
Exemplo n.º 8
0
        public IEnumerable <(NDArrayList, int, DataBatch)> IterPredict(DataIter eval_data, int?num_batch = null,
                                                                       bool reset = true, int epoch = 0, Func <DataBatch, NDArrayDict> sparse_row_id_fn = null)
        {
            if (!Binded && !ParamsInitialized)
            {
                throw new Exception("Module not binded and param initialized");
            }

            if (reset)
            {
                eval_data.Reset();
            }

            while (eval_data.End())
            {
                if (num_batch.HasValue && eval_data.Cursor == num_batch.Value)
                {
                    break;
                }

                var eval_batch = eval_data.Next();
                Prepare(eval_batch, sparse_row_id_fn);
                Forward(eval_batch, false);
                var pad     = eval_batch.Pad.Value;
                var outputs = new NDArrayList();
                foreach (var list in GetOutputs())
                {
                    foreach (var @out in list)
                    {
                        outputs.Add(@out[$"0:{@out.Shape[0] - pad}"]);
                    }
                }

                yield return(outputs.ToArray(), eval_data.Cursor, eval_batch);
            }
        }
Exemplo n.º 9
0
        public static void RunSimple()
        {
            var mnist      = TestUtils.GetMNIST(); //Get the MNIST dataset, it will download if not found
            var batch_size = 100;                  //Set training batch size
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            // Define simple network with dense layers
            var net = new Sequential();

            net.Add(new Dense(128, ActivationType.Relu));
            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(10));

            //Set context, multi-gpu supported
            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            //Initialize the weights
            net.Initialize(new Xavier(magnitude: 2.24f), ctx);

            //Create the trainer with all the network parameters and set the optimizer
            var trainer = new Trainer(net.CollectParams(), new Adam());

            var   epoch  = 10;
            var   metric = new Accuracy(); //Use Accuracy as the evaluation metric.
            var   softmax_cross_entropy_loss = new SoftmaxCrossEntropyLoss();
            float lossVal = 0;             //For loss calculation

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                // Reset the train data iterator.
                train_data.Reset();
                lossVal = 0;

                // Loop over the train data iterator.
                while (!train_data.End())
                {
                    var batch = train_data.Next();

                    // Splits train data into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var data = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);

                    // Splits train labels into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();

                    // Inside training scope
                    NDArray loss = null;
                    for (int i = 0; i < data.Length; i++)
                    {
                        using (var ag = Autograd.Record())
                        {
                            var x = data[i];
                            var y = label[i];
                            var z = net.Call(x);
                            // Computes softmax cross entropy loss.
                            loss = softmax_cross_entropy_loss.Call(z, y);
                            outputs.Add(z);
                        }

                        // Backpropagate the error for one iteration.
                        loss.Backward();
                        lossVal += loss.Mean();
                    }

                    // Updates internal evaluation
                    metric.Update(label, outputs.ToArray());

                    // Make one step of parameter update. Trainer needs to know the
                    // batch size of data to normalize the gradient by 1/batch_size.
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                // Gets the evaluation result.
                var(name, acc) = metric.Get();

                // Reset evaluation result to initial state.
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Exemplo n.º 10
0
        public static void RunConv()
        {
            var mnist      = TestUtils.GetMNIST();
            var batch_size = 128;
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size, true);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            var net = new Sequential();

            net.Add(new Conv2D(20, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Conv2D(50, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Flatten());
            net.Add(new Dense(500, ActivationType.Tanh));
            net.Add(new Dense(10));

            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            net.Initialize(new Xavier(magnitude: 2.24f), ctx);
            var trainer = new Trainer(net.CollectParams(), new SGD(learning_rate: 0.02f));

            var   epoch  = 10;
            var   metric = new Accuracy();
            var   softmax_cross_entropy_loss = new SoftmaxCELoss();
            float lossVal = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var batch = train_data.Next();
                    var data  = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();
                    using (var ag = Autograd.Record())
                    {
                        for (var i = 0; i < data.Length; i++)
                        {
                            var x = data[i];
                            var y = label[i];

                            var     z    = net.Call(x);
                            NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            outputs.Add(z);
                        }

                        //outputs = Enumerable.Zip(data, label, (x, y) =>
                        //{
                        //    var z = net.Call(x);
                        //    NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                        //    loss.Backward();
                        //    lossVal += loss.Mean();
                        //    return z;
                        //}).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Exemplo n.º 11
0
        public List <NDArrayList> Predict(DataIter eval_data, int?num_batch = null, bool merge_batches = true,
                                          bool reset = true, bool always_output_list = true, Func <DataBatch, NDArrayDict> sparse_row_id_fn = null)
        {
            if (!Binded && !ParamsInitialized)
            {
                throw new Exception("Module not binded and param initialized");
            }

            if (reset)
            {
                eval_data.Reset();
            }

            var output_list  = new List <NDArrayList>();
            var output_list2 = new NDArrayList();

            while (eval_data.End())
            {
                if (num_batch.HasValue && eval_data.Cursor == num_batch.Value)
                {
                    break;
                }

                var eval_batch = eval_data.Next();
                Prepare(eval_batch, sparse_row_id_fn);
                Forward(eval_batch, false);
                var pad     = eval_batch.Pad.Value;
                var outputs = new NDArrayList();
                foreach (var list in GetOutputs())
                {
                    foreach (var @out in list)
                    {
                        outputs.Add(@out[$"0:{@out.Shape[0] - pad}"].Copy());
                    }
                }

                output_list.Add(outputs.ToArray());
            }

            if (output_list.Count == 0)
            {
                return(output_list);
            }

            if (merge_batches)
            {
                var num_outputs = output_list[0].Length;
                foreach (var @out in output_list)
                {
                    if (@out.Length != num_outputs)
                    {
                        throw new Exception("Cannot merge batches, as num of outputs is not the same " +
                                            "in mini-batches. Maybe bucketing is used?");
                    }

                    output_list2.Add(nd.Concat(@out));
                }

                return(new List <NDArrayList> {
                    output_list2.ToArray()
                });
            }

            return(output_list);
        }