Ejemplo n.º 1
0
        private static void Main(string[] args)
        {
            mx.SetDevice(DeviceType.CPU);
            var feat = Runtime.FeatureList();
            var x    = new NDArray(new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, new Shape(3, 3)).Reshape(new Shape(3, 3));
            var buff = x.GetBuffer();
            var x1   = NDArray.LoadFromBuffer(buff);

            var zeros = nd.Zeros(new Shape(3, 3));

            x[":,3"] = x[":,2"];
            var data1 = x.AsArray <float>();

            x = nd.Flip(x, 1);
            x = nd.Square(x);
            var a = Autograd.GetSymbol(x);
            var y = nd.EqualScalar(x, 3);
            //var acc = new Accuracy();
            var data = y.GetValues <float>();
            //acc.Update(x, y);
            var eq = nd.Equal(x, y);

            for (var i = 1; i <= 100000; i++)
            {
                x.SampleUniform();
                x = nd.Square(x);
                Console.WriteLine(i);
            }

            Console.ReadLine();
        }
Ejemplo n.º 2
0
        private static void Main(string[] args)
        {
            var npx = new np.random().uniform(newdims: new shape(3, 3));
            var x   = new NDArray(new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, new Shape(3, 3)).Reshape(new Shape(3, 3));
            var ndx = nd.Array(npx.astype(np.Int8));



            var buff = x.GetBuffer();
            var x1   = NDArray.LoadFromBuffer(buff);

            var zeros = nd.Zeros(new Shape(3, 3));

            x[":,3"] = x[":,2"];
            var data1 = x.AsArray <float>();

            x = nd.Flip(x, 1);
            x = nd.Square(x);
            var a = Autograd.GetSymbol(x);
            var y = nd.EqualScalar(x, 3);
            //var acc = new Accuracy();
            var data = y.GetValues <float>();
            //acc.Update(x, y);
            var eq = nd.Equal(x, y);

            for (var i = 1; i <= 100000; i++)
            {
                x.SampleUniform();
                x = nd.Square(x);
                Console.WriteLine(i);
            }

            Console.ReadLine();
        }
Ejemplo n.º 3
0
        public static void Run()
        {
            // Create
            var trainX = new NDArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }).Reshape(4, 2);
            var trainY = new NDArray(new float[] { 0, 1, 1, 0 });

            var batch_size = 2;
            var train_data = new NDArrayIter(trainX, trainY, batch_size);
            var val_data   = new NDArrayIter(trainX, trainY, batch_size);

            var net = new Sequential();

            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(1));

            var gpus    = TestUtils.ListGpus();
            var ctxList = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu() };

            net.Initialize(new Uniform(), ctxList.ToArray());
            var   trainer             = new Trainer(net.CollectParams(), new Adam());
            var   epoch               = 1000;
            var   metric              = new BinaryAccuracy();
            var   binary_crossentropy = new LogisticLoss();
            float lossVal             = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var         batch   = train_data.Next();
                    var         data    = Utils.SplitAndLoad(batch.Data[0], ctxList);
                    var         label   = Utils.SplitAndLoad(batch.Label[0], ctxList);
                    NDArrayList outputs = null;
                    using (var ag = Autograd.Record())
                    {
                        outputs = Enumerable.Zip(data, label, (x, y) =>
                        {
                            var z        = net.Call(x);
                            NDArray loss = binary_crossentropy.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            return(z);
                        }).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.WriteLine($"Loss: {lossVal}");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={acc * 100}%");
            }
        }
Ejemplo n.º 4
0
        private NDArrayOrSymbol F(Symbol box_preds, Symbol gt_boxes, Symbol obj_t, Symbol centers_t, Symbol scales_t, Symbol weights_t, Symbol clas_t)
        {
            using (var ag = Autograd.Pause())
            {
                SymbolList dynamic_t = this._dynamic_target.Call(box_preds, gt_boxes).SymXList;
                // use fixed target to override dynamic targets
                var _tup_1 = Enumerable.Zip(dynamic_t, new List <Symbol> {
                    obj_t,
                    centers_t,
                    scales_t,
                    weights_t,
                    clas_t
                }, (i0, i1) => {
                    return(i0, i1);
                }).ToList();
                var obj            = _tup_1[0];
                var centers        = _tup_1[1];
                var scales         = _tup_1[2];
                var weights        = _tup_1[3];
                var clas           = _tup_1[4];
                var mask           = obj.i1 > 0;
                var objectness     = sym.Where(mask, obj.i1, obj.i0);
                var mask2          = mask.Tile(reps: new Shape(2));
                var center_targets = sym.Where(mask2, centers.i1, centers.i0);
                var scale_targets  = sym.Where(mask2, scales.i1, scales.i0);
                var weights1       = sym.Where(mask2, weights.i1, weights.i0);
                var mask3          = mask.Tile(reps: new Shape(this._num_class));
                var class_targets  = sym.Where(mask3, clas.i1, clas.i0);
                var smooth_weight  = 1 / this._num_class;
                if (this._label_smooth)
                {
                    smooth_weight = (int)Math.Min(1 / this._num_class, 1 / 40);
                    class_targets = sym.Where(class_targets > 0.5f, class_targets - smooth_weight, class_targets);
                    class_targets = sym.Where((class_targets < -0.5f) + (class_targets > 0.5f), class_targets, sym.OnesLike(class_targets) * smooth_weight);
                }


                var class_mask = mask.Tile(reps: new Shape(this._num_class)) * (class_targets >= 0);
                return(new NDArrayOrSymbol((from x in new List <Symbol> {
                    objectness,
                    center_targets,
                    scale_targets,
                    weights1,
                    class_targets,
                    class_mask
                }
                                            select sym.StopGradient(x)).ToArray()));
            }
        }
Ejemplo n.º 5
0
        private NDArrayOrSymbol F(NDArray x, NDArray gamma, NDArray beta)
        {
            NDArray y = null;

            // normalization
            using (var ag = Autograd.TrainMode())
            {
                y = x.ExpandDims(0).Reshape(0, 0, this.Ngroups, -1);
                y = y.Reshape(1, -3, -1);
                var batch = x.Shape[0];
                y = nd.BatchNorm(y, nd.Ones(new Shape(batch * this.Ngroups), ctx: x.Context), nd.Zeros(new Shape(batch * this.Ngroups), ctx: x.Context), nd.Zeros(new Shape(batch * this.Ngroups), ctx: x.Context), nd.Ones(new Shape(batch * this.Ngroups), ctx: x.Context), eps: Epsilon, axis: Axis);
            }

            // scale and shift
            y = nd.ReshapeLike(y, x).Reshape(0, 0, -1);
            y = y * gamma.Reshape(1, -1, 1) + beta.Reshape(1, -1, 1);
            return(nd.ReshapeLike(y, x));
        }
        private static float TrainModel()
        {
            float cumulative_train_loss = 0;

            foreach (var(data, label) in train_dataloader)
            {
                NDArray loss_result = null;
                using (var ag = Autograd.Record())
                {
                    var output = net.Call(data);
                    loss_result = loss.Call(output, label);
                    loss_result.Backward();
                }

                trainer.Step(batch_size);
                cumulative_train_loss += nd.Sum(loss_result).AsScalar <float>();
            }

            return(cumulative_train_loss);
        }
Ejemplo n.º 7
0
        protected override void BeginProcessing()
        {
            if (Scope != null)
            {
                try
                {
                    Scope.Dispose();
                }
                finally
                {
                    Scope = null;
                }
            }

            Scope = Autograd.Record();
            if (PassThru)
            {
                WriteObject(Scope);
            }
        }
        private NDArrayOrSymbol F(Symbol box_preds, Symbol gt_boxes)
        {
            Symbol objness_t = null;
            Symbol center_t  = null;
            Symbol scale_t   = null;
            Symbol weight_t  = null;
            Symbol class_t   = null;

            using (var ag = Autograd.Pause())
            {
                box_preds = box_preds.Reshape(0, -1, 4);
                objness_t = sym.ZerosLike(box_preds.SliceAxis(axis: -1, begin: 0, end: 1));
                center_t  = sym.ZerosLike(box_preds.SliceAxis(axis: -1, begin: 0, end: 2));
                scale_t   = sym.ZerosLike(box_preds.SliceAxis(axis: -1, begin: 0, end: 2));
                weight_t  = sym.ZerosLike(box_preds.SliceAxis(axis: -1, begin: 0, end: 2));
                class_t   = sym.OnesLike(objness_t.Tile(reps: new Shape(this._num_class))) * -1;
                var batch_ious = this._batch_iou.Call(box_preds, gt_boxes);
                var ious_max   = sym.Max(batch_ious, axis: new Shape(-1), keepdims: true);
                objness_t = (ious_max > this._ignore_iou_thresh) * -1;
            }

            return(new NDArrayOrSymbol(objness_t, center_t, scale_t, weight_t, class_t));
        }
Ejemplo n.º 9
0
        public static void RunSimple()
        {
            var mnist      = TestUtils.GetMNIST(); //Get the MNIST dataset, it will download if not found
            var batch_size = 100;                  //Set training batch size
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            // Define simple network with dense layers
            var net = new Sequential();

            net.Add(new Dense(128, ActivationType.Relu));
            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(10));

            //Set context, multi-gpu supported
            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            //Initialize the weights
            net.Initialize(new Xavier(magnitude: 2.24f), ctx);

            //Create the trainer with all the network parameters and set the optimizer
            var trainer = new Trainer(net.CollectParams(), new Adam());

            var   epoch  = 10;
            var   metric = new Accuracy(); //Use Accuracy as the evaluation metric.
            var   softmax_cross_entropy_loss = new SoftmaxCrossEntropyLoss();
            float lossVal = 0;             //For loss calculation

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                // Reset the train data iterator.
                train_data.Reset();
                lossVal = 0;

                // Loop over the train data iterator.
                while (!train_data.End())
                {
                    var batch = train_data.Next();

                    // Splits train data into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var data = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);

                    // Splits train labels into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();

                    // Inside training scope
                    NDArray loss = null;
                    for (int i = 0; i < data.Length; i++)
                    {
                        using (var ag = Autograd.Record())
                        {
                            var x = data[i];
                            var y = label[i];
                            var z = net.Call(x);
                            // Computes softmax cross entropy loss.
                            loss = softmax_cross_entropy_loss.Call(z, y);
                            outputs.Add(z);
                        }

                        // Backpropagate the error for one iteration.
                        loss.Backward();
                        lossVal += loss.Mean();
                    }

                    // Updates internal evaluation
                    metric.Update(label, outputs.ToArray());

                    // Make one step of parameter update. Trainer needs to know the
                    // batch size of data to normalize the gradient by 1/batch_size.
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                // Gets the evaluation result.
                var(name, acc) = metric.Get();

                // Reset evaluation result to initial state.
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Ejemplo n.º 10
0
        public static void RunConv()
        {
            var mnist      = TestUtils.GetMNIST();
            var batch_size = 128;
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size, true);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            var net = new Sequential();

            net.Add(new Conv2D(20, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Conv2D(50, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Flatten());
            net.Add(new Dense(500, ActivationType.Tanh));
            net.Add(new Dense(10));

            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            net.Initialize(new Xavier(magnitude: 2.24f), ctx);
            var trainer = new Trainer(net.CollectParams(), new SGD(learning_rate: 0.02f));

            var   epoch  = 10;
            var   metric = new Accuracy();
            var   softmax_cross_entropy_loss = new SoftmaxCELoss();
            float lossVal = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var batch = train_data.Next();
                    var data  = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();
                    using (var ag = Autograd.Record())
                    {
                        for (var i = 0; i < data.Length; i++)
                        {
                            var x = data[i];
                            var y = label[i];

                            var     z    = net.Call(x);
                            NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            outputs.Add(z);
                        }

                        //outputs = Enumerable.Zip(data, label, (x, y) =>
                        //{
                        //    var z = net.Call(x);
                        //    NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                        //    loss.Backward();
                        //    lossVal += loss.Mean();
                        //    return z;
                        //}).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Ejemplo n.º 11
0
        public static void Run()
        {
            var mnist_train = new FashionMNIST(train: true);

            var(x, y) = mnist_train[0];
            Console.WriteLine($"X shape: {x.Shape}, X dtype: {x.DataType}, Y shape: {y.Shape}, Y dtype: {y.DataType}");

            var transformer = new Compose(
                new ToTensor(),
                new Normalize(new MxNet.Tuple <float>(0.13f, 0.31f))
                );

            var train      = mnist_train.TransformFirst(transformer);
            int batch_size = 256;
            var train_data = new DataLoader(train, batch_size: batch_size, shuffle: true);

            foreach (var(data, label) in train_data)
            {
                Console.WriteLine(data.Shape + ", " + label.Shape);
                break;
            }

            var mnist_valid = new FashionMNIST(train: false);
            var valid_data  = new DataLoader(mnist_valid, batch_size: batch_size, shuffle: true);

            var net = new Sequential();

            net.Add(new Conv2D(channels: 6, kernel_size: (5, 5), activation: ActivationType.Relu),
                    new MaxPool2D(pool_size: (2, 2), strides: (2, 2)),
                    new Conv2D(channels: 16, kernel_size: (3, 3), activation: ActivationType.Relu),
                    new MaxPool2D(pool_size: (2, 2), strides: (2, 2)),
                    new Flatten(),
                    new Dense(120, activation: ActivationType.Relu),
                    new Dense(84, activation: ActivationType.Relu),
                    new Dense(10));

            net.Initialize(new Xavier());

            var softmax_cross_entropy = new SoftmaxCrossEntropyLoss();
            var trainer = new Trainer(net.CollectParams(), new SGD(learning_rate: 0.1f));

            for (int epoch = 0; epoch < 10; epoch++)
            {
                var   tic        = DateTime.Now;
                float train_loss = 0;
                float train_acc  = 0;
                float valid_acc  = 0;

                foreach (var(data, label) in train_data)
                {
                    NDArray loss   = null;
                    NDArray output = null;
                    // forward + backward
                    using (Autograd.Record())
                    {
                        output = net.Call(data);
                        loss   = softmax_cross_entropy.Call(output, label);
                    }

                    loss.Backward();

                    //update parameters
                    trainer.Step(batch_size);

                    //calculate training metrics
                    train_loss += loss.Mean();
                    train_acc  += Acc(output, label);
                }

                // calculate validation accuracy
                foreach (var(data, label) in valid_data)
                {
                    valid_acc += Acc(net.Call(data), label);
                }

                Console.WriteLine($"Epoch {epoch}: loss {train_loss / train_data.Length}," +
                                  $" train acc {train_acc / train_data.Length}, " +
                                  $"test acc {train_acc / train_data.Length} " +
                                  $"in {(DateTime.Now - tic).TotalMilliseconds} ms");
            }

            net.SaveParameters("net.params");
        }
Ejemplo n.º 12
0
        protected override void BeginProcessing()
        {
            if (Context == null)
            {
                Context = Context.CurrentContext;
            }

            var stopWatch = new Stopwatch();

            stopWatch.Start();

            for (var epoch = 1; epoch <= MaxEpoch; ++epoch)
            {
                TrainingData.Reset();
                var totalLoss = 0.0f;
                var dataSize  = 0;

                while (!TrainingData.End())
                {
                    var batch = TrainingData.Next();
                    var data  = batch.Data[0].AsInContext(Context);
                    var label = batch.Label[0].AsInContext(Context);

                    using (Autograd.Record())
                    {
                        var output = Model.Call(data);
                        var loss   = (NDArray)LossFunction.Call(output, label);
                        loss.Backward();
                        totalLoss += loss.Sum();
                        dataSize  += data.Shape[0];

                        Trainer.Step(batch.Data[0].Shape[0]);
                    }
                }

                if (epoch % DisplayStep == 0 || epoch == MaxEpoch)
                {
                    totalLoss /= dataSize;

                    ValidationData.Reset();
                    var totalValidLoss = 0.0f;
                    var validDataSize  = 0;

                    if (MetricFunction != null)
                    {
                        MetricFunction.Reset();
                    }

                    while (!ValidationData.End())
                    {
                        var batch = ValidationData.Next();
                        var data  = batch.Data[0].AsInContext(Context);
                        var label = batch.Label[0].AsInContext(Context);

                        var output    = Model.Call(data);
                        var validLoss = (NDArray)LossFunction.Call(output, label);
                        totalValidLoss += validLoss.Sum();
                        validDataSize  += data.Shape[0];

                        if (MetricFunction != null)
                        {
                            MetricFunction.Update(label, output);
                        }
                    }
                    totalValidLoss /= validDataSize;

                    string metricName = null;
                    float  metric     = float.NaN;

                    if (MetricFunction != null)
                    {
                        (metricName, metric) = MetricFunction.Get();
                    }

                    var status = new TrainingStatus(epoch,
                                                    (float)Math.Round(totalLoss, DisplayDigits),
                                                    (float)Math.Round(totalValidLoss, DisplayDigits),
                                                    metricName,
                                                    (float)Math.Round(metric, DisplayDigits),
                                                    stopWatch.Elapsed);

                    WriteObject(status);
                }
            }
        }
Ejemplo n.º 13
0
        public override NDArrayOrSymbol Forward(NDArrayOrSymbol x, params NDArrayOrSymbol[] args)
        {
            NDArray     img         = x;
            NDArrayList xs          = args[0].NdXList;
            NDArray     anchors     = args[1];
            NDArray     offsets     = args[2];
            NDArray     gt_boxes    = args[3];
            NDArray     gt_ids      = args[4];
            NDArray     gt_mixratio = args[5];

            var all_anchors = nd.Concat((from i in Enumerable.Range(0, anchors.Shape[0])
                                         select anchors[i].Reshape(-1, 2)).ToList(), dim: 0);

            var all_offsets = nd.Concat((from i in Enumerable.Range(0, anchors.Shape[0])
                                         select offsets[i].Reshape(-1, 2)).ToList(), dim: 0);

            var l = (from i in Enumerable.Range(0, anchors.Shape[0])
                     select(anchors[i].Size / 2)).ToArray();
            var num_anchors = np.cumsum(np.array((from i in Enumerable.Range(0, anchors.Shape[0])
                                                  select(anchors[i].Size / 2)).ToArray()));
            var num_offsets = np.cumsum(np.array((from i in Enumerable.Range(0, offsets.Shape[0])
                                                  select(offsets[i].Size / 2)).ToArray()));

            var _offsets = new List <int> {
                0
            };

            _offsets.AddRange(num_offsets.AsInt32Array());

            Debug.Assert((xs.Length == anchors.Shape[0]) && (anchors.Shape[0] == offsets.Shape[0]));
            // orig image size
            var     orig_height    = img.Shape[2];
            var     orig_width     = img.Shape[3];
            ndarray center_targets = null;
            ndarray scale_targets  = null;
            ndarray weights        = null;
            ndarray objectness     = null;
            ndarray class_targets  = null;

            using (var ag = Autograd.Pause())
            {
                // outputs
                var shape_like = all_anchors.Reshape(1, -1, 2) * all_offsets.Reshape(-1, 1, 2).ExpandDims(0).Repeat(repeats: gt_ids.Shape[0], axis: 0);
                center_targets     = nd.ZerosLike(shape_like);
                scale_targets      = nd.ZerosLike(shape_like);
                weights            = nd.ZerosLike(shape_like);
                objectness         = nd.ZerosLike(nd.Split(weights, axis: -1, num_outputs: 2)[0]);
                class_targets      = nd.OneHot(nd.Squeeze(new NDArrayList(objectness), axis: new Shape(-1)), depth: this._num_class);
                class_targets[":"] = -1;
                // for each ground-truth, find the best matching anchor within the particular grid
                // for instance, center of object 1 reside in grid (3, 4) in (16, 16) feature map
                // then only the anchor in (3, 4) is going to be matched
                var     _tup_1             = this.bbox2center.Call(gt_boxes);
                NDArray gtx                = _tup_1[0];
                NDArray gty                = _tup_1[1];
                NDArray gtw                = _tup_1[2];
                NDArray gth                = _tup_1[3];
                var     shift_gt_boxes     = nd.Concat(new NDArrayList(-0.5f * gtw, -0.5f * gth, 0.5f * gtw, 0.5f * gth), dim: -1);
                var     anchor_boxes       = nd.Concat(new NDArrayList(0 * all_anchors, all_anchors), dim: -1);
                var     shift_anchor_boxes = this.bbox2corner.Call(anchor_boxes);
                var     ious               = nd.Contrib.BoxIou(shift_anchor_boxes, shift_gt_boxes).Transpose(new Shape(1, 0, 2));
                // real value is required to process, convert to Numpy
                var matches   = ious.Argmax(axis: 1).AsNumpy();
                var valid_gts = (gt_boxes >= 0).Prod(axis: -1).ArrayData;

                var np_gtx          = gtx.AsNumpy();
                var np_gty          = gty.AsNumpy();
                var np_gtw          = gtw.AsNumpy();
                var np_gth          = gth.AsNumpy();
                var np_anchors      = all_anchors.AsNumpy();
                var np_gt_ids       = gt_ids.AsNumpy();
                var np_gt_mixratios = gt_mixratio != null?gt_mixratio.AsNumpy() : null;

                // TODO(zhreshold): the number of valid gt is not a big number, therefore for loop
                // should not be a problem right now. Switch to better solution is needed.
                foreach (var b in Enumerable.Range(0, (int)matches.shape.iDims[0]))
                {
                    foreach (var m in Enumerable.Range(0, (int)matches.shape.iDims[1]))
                    {
                        if ((int)valid_gts.GetValue(b, m) < 1)
                        {
                            break;
                        }

                        var match  = Convert.ToInt32(matches[b, m]);
                        var nlayer = (int)np.nonzero(num_anchors > match)[0][0];
                        var height = xs[nlayer].Shape[2];
                        var width  = xs[nlayer].Shape[3];
                        var(gtx_bm, gty_bm, gtw_bm, gth_bm) = ((ndarray)np_gtx[b, m, 0], (ndarray)np_gty[b, m, 0], (ndarray)np_gtw[b, m, 0], (ndarray)np_gth[b, m, 0]);
                        // compute the location of the gt centers
                        var loc_x = Convert.ToInt32(gtx / orig_width * width);
                        var loc_y = Convert.ToInt32(gty / orig_height * height);
                        // write back to targets
                        var index = _offsets[nlayer] + loc_y * width + loc_x;
                        center_targets[b, index, match, 0]  = gtx_bm / orig_width * width - loc_x;
                        center_targets[b, index, match, 1]  = gty_bm / orig_height * height - loc_y;
                        scale_targets[b, index, match, 0]   = np.log(np.maximum(gtw_bm, 1) / np_anchors[match, 0]);
                        scale_targets[b, index, match, 1]   = np.log(np.maximum(gth_bm, 1) / np_anchors[match, 1]);
                        weights[b, index, match, ":"]       = 2 - gtw * gth / orig_width / orig_height;
                        objectness[b, index, match, 0]      = np_gt_mixratios != null ? np_gt_mixratios[b, m, 0] : 1;
                        class_targets[b, index, match, ":"] = 0;
                        class_targets[b, index, match, Convert.ToInt32(np_gt_ids[b, m, 0])] = 1;
                    }
                }
                // since some stages won't see partial anchors, so we have to slice the correct targets
                objectness     = this.Slice(objectness, num_anchors, num_offsets);
                center_targets = this.Slice(center_targets, num_anchors, num_offsets);
                scale_targets  = this.Slice(scale_targets, num_anchors, num_offsets);
                weights        = this.Slice(weights, num_anchors, num_offsets);
                class_targets  = this.Slice(class_targets, num_anchors, num_offsets);
            }

            return(new NDArrayOrSymbol(objectness, center_targets, scale_targets, weights, class_targets));
        }