Exemplo n.º 1
0
        public NDArrayIter(NDArrayList data, NDArrayList label = null, int batch_size    = 1, bool shuffle           = false,
                           string last_batch_handle            = "pad", string data_name = "data", string label_name = "softmax_label")
        {
            this.data              = IOUtils.InitData(data, false, data_name);
            this.label             = IOUtils.InitData(label, false, label_name);
            BatchSize              = batch_size;
            Cursor                 = batch_size;
            num_data               = data[0].Shape[0];
            this.last_batch_handle = last_batch_handle;
            this.shuffle           = shuffle;

            Reset();
            data_list.Add(data);
            data_list.Add(label);
            _cache_data  = null;
            _cache_label = null;
        }
Exemplo n.º 2
0
        private NDArrayList _concat(NDArrayList first_data, NDArrayList second_data)
        {
            if (first_data.Length != second_data.Length)
            {
                throw new Exception("Data source should be of same size.");
            }

            var result = new NDArrayList();

            for (var i = 0; i < first_data.Length; i++)
            {
                result.Add(
                    nd.Concat(new NDArrayList(first_data[i], second_data[i]), 0)
                    );
            }

            return(result.ToArray());
        }
Exemplo n.º 3
0
        public override void Forward(bool is_train, OpGradReq[] req, NDArrayList in_data, NDArrayList out_data, NDArrayList aux)
        {
            ndarray disable_indices;
            var     matches = in_data[0];
            var     ious    = in_data[1];
            var     max_pos = Convert.ToInt32(Math.Round(this._pos_ratio * this._num_sample));
            var     max_neg = Convert.ToInt32(this._neg_ratio * this._num_sample);

            foreach (var i in Enumerable.Range(0, matches.Shape[0]))
            {
                // init with 0s, which are ignored
                var result = nd.ZerosLike(matches[i]);
                // negative samples with label -1
                var ious_max = ious.Max(axis: -1)[i];
                var neg_mask = ious_max < this._neg_thresh_high;
                neg_mask = neg_mask * (ious_max >= this._neg_thresh_low);
                result   = nd.Where(neg_mask, nd.OnesLike(result) * -1, result);
                // positive samples
                result = nd.Where(matches[i] >= 0, nd.OnesLike(result), result);
                result = nd.Where(ious_max >= this._pos_thresh, nd.OnesLike(result), result);
                // re-balance if number of positive or negative exceed limits
                var np_result = result.AsNumpy();
                var num_pos   = Convert.ToInt32((result > 0).Sum());
                if (num_pos > max_pos)
                {
                    disable_indices            = new np.random().choice(np.where (np_result > 0), size: num_pos - max_pos, replace: false);
                    np_result[disable_indices] = 0;
                }
                var num_neg = Convert.ToInt32((result < 0).Sum());
                if (this._fill_negative)
                {
                    // if pos_sample is less than quota, we can have negative samples filling the gap
                    max_neg = Math.Max(this._num_sample - Math.Min(num_pos, max_pos), max_neg);
                }
                if (num_neg > max_neg)
                {
                    disable_indices            = new np.random().choice(np.where (np_result < 0), size: num_neg - max_neg, replace: false);
                    np_result[disable_indices] = 0;
                }

                this.Assign(out_data[0][i], req[0], nd.Array(np_result));
            }
        }
Exemplo n.º 4
0
        private NDArrayList _getdata(NDArrayDict data_source, int?start = null, int?end = null)
        {
            if (!start.HasValue && !end.HasValue)
            {
                throw new ArgumentException("Should atleast specify start or end");
            }

            start = start.HasValue ? start : 0;
            end   = end.HasValue ? end : data_source.First().Value.Shape[0];

            var result = new NDArrayList();

            foreach (var x in data_source)
            {
                result.Add(x.Value.Slice(start.Value, end));
            }

            return(result.ToArray());
        }
Exemplo n.º 5
0
        public static NDArray clip_global_norm(NDArrayList arrays, float max_norm, bool check_isfinite = true)
        {
            Func <NDArray, NDArray> norm = array =>
            {
                if (array.SType == StorageStype.Default)
                {
                    var x = array.Reshape(-1);
                    return(nd.Dot(x, x));
                }

                return(array.Norm().Square());
            };

            if (arrays.Length == 0)
            {
                throw new ArgumentException("arrays.Length == 0");
            }

            var ctx        = arrays[0].Context;
            var total_norm = nd.AddN(arrays.Select(x => x.AsInContext(ctx)).ToArray());

            total_norm = total_norm.Sqrt();
            if (check_isfinite)
            {
                if (float.IsInfinity(total_norm.AsScalar <float>()))
                {
                    Logger.Warning("nan or inf is detected. " +
                                   "Clipping results will be undefined.");
                }
            }

            var scale = max_norm / (total_norm + 1e-8f);

            scale = nd.Min(nd.Concat(new NDArrayList(scale, nd.Ones(new Shape(1), ctx)), 0));
            for (var i = 0; i < arrays.Length; i++)
            {
                arrays[i] *= scale.AsInContext(arrays[i].Context);
            }

            return(total_norm);
        }
Exemplo n.º 6
0
        public static void CheckArrayLengthConsistency(NDArrayList inputs, NDArrayList targets, NDArrayList weights)
        {
            Func <NDArrayList, int[]> set_of_lengths = x => {
                if (x == null)
                {
                    return(new int[0]);
                }

                return(x.Select(i => i.Shape[0]).ToArray());
            };

            var set_x = set_of_lengths(inputs);
            var set_y = set_of_lengths(targets);
            var set_w = set_of_lengths(weights);

            if (set_x.Length > 1)
            {
                throw new Exception("All input arrays (x) should have the same number of samples. Got array shapes: " + string.Join("|", (from x in inputs
                                                                                                                                          select x.Shape.ToString())));
            }
            if (set_y.Length > 1)
            {
                throw new Exception("All target arrays (y) should have the same number of samples. Got array shapes: " + string.Join("|", (from y in inputs
                                                                                                                                           select y.Shape.ToString())));
            }
            if (set_x != null && set_y != null && set_x[0] != set_y[0])
            {
                throw new Exception("Input arrays should have the same number of samples as target arrays. Found " + set_x[0] + " input samples and " + set_y[0] + " target samples.");
            }
            if (set_w.Length > 1)
            {
                throw new Exception("All sample_weight arrays should have the same number of samples. Got array shapes: " + string.Join("|", (from w in inputs
                                                                                                                                              select w.Shape.ToString())));
            }
            if (set_y != null && set_w != null && set_y[0] != set_w[0])
            {
                throw new Exception("Sample_weight arrays should have the same number of samples as target arrays. Got " + set_y[0] + " input samples and " + set_w[0] + " target samples.");
            }
        }
Exemplo n.º 7
0
        public override DataBatch Next()
        {
            if (!IterNext())
            {
                throw new Exception("Stop Iteration");
            }

            var d = GetData();
            var l = GetLabel();

            // iter should stop when last batch is not complete
            if (d[0].Shape[0] != BatchSize)
            {
                //in this case, cache it for next epoch
                _cache_data  = d;
                _cache_label = l;
                throw new Exception("Stop Iteration");
            }


            return(new DataBatch(d, l, GetPad()));
        }
Exemplo n.º 8
0
        public static NDArrayList StandardizeSampleOrClassWeight(NDArrayList x_weight, string[] output_names, string weight_type)
        {
            if (x_weight == null || x_weight.Length == 0)
            {
                return((from _ in output_names
                        select new NDArray()).ToList());
            }
            if (output_names.Length == 1)
            {
                if (x_weight.Length == 1)
                {
                    return(x_weight);
                }
            }

            if (x_weight.Length != output_names.Length)
            {
                throw new Exception("Provided `" + weight_type + "` was a list of " + x_weight.Length + " elements, but the model has " + output_names.Length + " outputs. You should provide one `" + weight_type + "`array per model output.");
            }

            return(x_weight);
        }
Exemplo n.º 9
0
        public override void OnEpochEnd(int epoch, Dictionary <string, float> logs = null)
        {
            var current = this.GetMonitorValue(logs);

            if (current == null)
            {
                return;
            }

            if (this.monitor_op(current.Value - this.min_delta, this.best))
            {
                this.best = current.Value;
                this.wait = 0;
                if (this.restore_best_weights)
                {
                    this.best_weights = this.model.GetWeights();
                }
            }
            else
            {
                this.wait += 1;
                if (this.wait >= this.patience)
                {
                    this.stopped_epoch       = epoch;
                    this.model.stop_training = true;
                    if (this.restore_best_weights)
                    {
                        if (this.verbose > 0)
                        {
                            Console.WriteLine("Restoring model weights from the end of the best epoch");
                        }

                        this.model.SetWeights(this.best_weights);
                    }
                }
            }
        }
Exemplo n.º 10
0
        public IEnumerable <(NDArrayList, int, DataBatch)> IterPredict(DataIter eval_data, int?num_batch = null,
                                                                       bool reset = true, int epoch = 0, Func <DataBatch, NDArrayDict> sparse_row_id_fn = null)
        {
            if (!Binded && !ParamsInitialized)
            {
                throw new Exception("Module not binded and param initialized");
            }

            if (reset)
            {
                eval_data.Reset();
            }

            while (eval_data.End())
            {
                if (num_batch.HasValue && eval_data.Cursor == num_batch.Value)
                {
                    break;
                }

                var eval_batch = eval_data.Next();
                Prepare(eval_batch, sparse_row_id_fn);
                Forward(eval_batch, false);
                var pad     = eval_batch.Pad.Value;
                var outputs = new NDArrayList();
                foreach (var list in GetOutputs())
                {
                    foreach (var @out in list)
                    {
                        outputs.Add(@out[$"0:{@out.Shape[0] - pad}"]);
                    }
                }

                yield return(outputs.ToArray(), eval_data.Cursor, eval_batch);
            }
        }
 public (NDArray, NDArray, NDArrayList, NDArrayList, NDArrayList) Call(NDArrayList data)
 {
     throw new NotImplementedException();
 }
Exemplo n.º 12
0
 public override void UpdateMetric(EvalMetric eval_metric, NDArrayList labels, bool pre_sliced = false)
 {
     _exec_group.UpdateMetric(eval_metric, labels, pre_sliced);
 }
Exemplo n.º 13
0
 public static float[] TestLoop(Model model, Func <NDArrayList, float[]> f, NDArrayList ins, int batch_size = 32, int verbose = 0, int?steps = null)
 {
     throw new NotImplementedException();
 }
Exemplo n.º 14
0
        public static History FitLoop(Model model, Func <NDArrayList, float[]> fit_function, NDArrayList fit_inputs, string[] out_labels = null, int?batch_size = null, int epochs = 100,
                                      int verbose = 1, CallbackList callbacks             = null, Func <NDArrayList, float[]> val_function = null, NDArrayList val_inputs = null, bool shuffle = true,
                                      string[] callback_metrics = null, int initial_epoch = 0, int?steps_per_epoch                         = null, int?validation_steps = null)
        {
            NDArrayList ins_batch = null;

            float[] val_outs = null;
            float[] outs     = null;
            Dictionary <string, float> batch_logs;
            Model   callback_model;
            string  count_mode;
            NDArray index_array   = null;
            var     do_validation = false;

            if (val_function != null && val_inputs != null)
            {
                do_validation = true;
                if (verbose > 0 && fit_inputs != null)
                {
                    Console.WriteLine(String.Format("Train on %d samples, validate on %d samples", fit_inputs[0].Shape[0], val_inputs[0].Shape[0]));
                }
            }

            if (validation_steps != null)
            {
                do_validation = true;
                if (steps_per_epoch == null)
                {
                    throw new Exception("Can only use `validation_steps` when doing step-wise training, i.e. `steps_per_epoch` must be set.");
                }
            }
            else if (do_validation)
            {
                if (!steps_per_epoch.HasValue)
                {
                    throw new Exception("Must specify `validation_steps` to perform validation when doing step-wise training.");
                }
            }

            var num_train_samples = TrainingUtils.CheckNumSamples(fit_inputs, batch_size: batch_size, steps: steps_per_epoch, steps_name: "steps_per_epoch");

            if (num_train_samples > 0)
            {
                index_array = nd.Arange(num_train_samples);
            }

            model.history = new History();
            var _callbacks = new List <Callback> {
                new BaseLogger(stateful_metrics: model.stateful_metric_names.ToArray())
            };

            if (verbose > 0)
            {
                if (steps_per_epoch != null)
                {
                    count_mode = "steps";
                }
                else
                {
                    count_mode = "samples";
                }
                _callbacks.Add(new ProgbarLogger(count_mode, stateful_metrics: model.stateful_metric_names.ToArray()));
            }

            _callbacks.Add(model.history);

            callbacks  = new CallbackList(_callbacks.ToArray());
            out_labels = out_labels ?? new string[0];
            // it's possible to callback a different model than itself
            // (used by Sequential models)
            callback_model = model;

            callbacks.SetModel(callback_model);
            callbacks.SetParams(new Dictionary <string, object> {
                {
                    "batch_size",
                    batch_size
                },
                {
                    "epochs",
                    epochs
                },
                {
                    "steps",
                    steps_per_epoch
                },
                {
                    "samples",
                    num_train_samples
                },
                {
                    "verbose",
                    verbose
                },
                {
                    "do_validation",
                    do_validation
                },
                {
                    "metrics",
                    callback_metrics ?? new string[0]
                }
            });

            callbacks.OnTrainBegin();
            callback_model.stop_training = false;
            foreach (var cbk in callbacks.callbacks)
            {
                cbk.validation_data = val_inputs;
            }

            // To prevent a slowdown,
            // we find beforehand the arrays that need conversion.
            List <KerasSymbol> feed = new List <KerasSymbol>();

            feed.AddRange(model._feed_inputs);
            feed.AddRange(model._feed_targets);
            feed.AddRange(model._feed_sample_weights);

            var indices_for_conversion_to_dense = new List <int>();

            foreach (var i in Enumerable.Range(0, feed.Count))
            {
                if (!K.IsSparse(feed[i]))
                {
                    indices_for_conversion_to_dense.Add(i);
                }
            }

            foreach (var epoch in Enumerable.Range(initial_epoch, epochs - initial_epoch))
            {
                // Reset stateful metrics
                //ToDo: Recheck code
                //foreach (var m in model.stateful_metric_functions)
                //{
                //    m.reset_states();
                //}

                callbacks.OnEpochBegin(epoch);
                var epoch_logs = new Dictionary <string, float>
                {
                };
                if (steps_per_epoch != null)
                {
                    foreach (var step_index in Enumerable.Range(0, steps_per_epoch.Value))
                    {
                        batch_logs = new Dictionary <string, float>
                        {
                        };

                        batch_logs["batch"] = step_index;
                        batch_logs["size"]  = 1;
                        callbacks.OnBatchBegin(step_index, batch_logs);
                        outs = fit_function(fit_inputs);

                        for (int i = 0; i < out_labels.Length; i++)
                        {
                            if (batch_logs.ContainsKey(out_labels[i]))
                            {
                                batch_logs.Add(out_labels[i], outs[i]);
                            }
                            else
                            {
                                batch_logs[out_labels[i]] = outs[i];
                            }
                        }

                        callbacks.OnBatchEnd(step_index, batch_logs);
                        if (callback_model.stop_training)
                        {
                            break;
                        }
                    }
                    if (do_validation)
                    {
                        val_outs = TestLoop(model, val_function, val_inputs, steps: validation_steps, verbose: 0);
                        // Same labels assumed.
                        for (int i = 0; i < out_labels.Length; i++)
                        {
                            var l = "val_" + out_labels[i];
                            if (epoch_logs.ContainsKey(l))
                            {
                                epoch_logs.Add(l, val_outs[i]);
                            }
                            else
                            {
                                epoch_logs[l] = val_outs[i];
                            }
                        }
                    }
                }
                else
                {
                    if (shuffle && batch_size.HasValue)
                    {
                        index_array = TrainingUtils.BatchShuffle(index_array, batch_size.Value);
                    }
                    else if (shuffle)
                    {
                        nd.Shuffle(index_array);
                    }

                    var batches = TrainingUtils.MakeBatches(num_train_samples, batch_size.Value);
                    foreach (var _tup_3 in batches.Select((_p_1, _p_2) => Tuple.Create(_p_2, _p_1)))
                    {
                        var batch_index = _tup_3.Item1;
                        var(batch_start, batch_end) = _tup_3.Item2;
                        var batch_ids = index_array[$"{batch_start}:{batch_end}"];
                        try
                        {
                            ins_batch = GenericUtils.SliceArrays(fit_inputs, batch_ids.ArrayData.Cast <int>().ToArray());
                        }
                        catch (Exception ex)
                        {
                            throw new Exception("TypeError while preparing batch. If using HDF5 input data, pass shuffle=\"batch\".");
                        }

                        batch_logs          = new Dictionary <string, float>();
                        batch_logs["batch"] = batch_index;
                        batch_logs["size"]  = batch_ids.Shape[0];
                        callbacks.OnBatchBegin(batch_index, batch_logs);
                        foreach (var i in indices_for_conversion_to_dense)
                        {
                            ins_batch[i] = ins_batch[i];
                        }

                        outs = fit_function(ins_batch);
                        for (int i = 0; i < out_labels.Length; i++)
                        {
                            if (batch_logs.ContainsKey(out_labels[i]))
                            {
                                batch_logs.Add(out_labels[i], outs[i]);
                            }
                            else
                            {
                                batch_logs[out_labels[i]] = outs[i];
                            }
                        }

                        callbacks.OnBatchEnd(batch_index, batch_logs);
                        if (callback_model.stop_training)
                        {
                            break;
                        }
                        if (batch_index == batches.Length - 1)
                        {
                            // Last batch.
                            if (do_validation)
                            {
                                val_outs = TestLoop(model, val_function, val_inputs, batch_size: batch_size.Value, verbose: 0);
                                // Same labels assumed.
                                for (int i = 0; i < out_labels.Length; i++)
                                {
                                    var l = "val_" + out_labels[i];
                                    if (epoch_logs.ContainsKey(l))
                                    {
                                        epoch_logs.Add(l, val_outs[i]);
                                    }
                                    else
                                    {
                                        epoch_logs[l] = val_outs[i];
                                    }
                                }
                            }
                        }
                    }
                }
                callbacks.OnEpochEnd(epoch, epoch_logs);
                if (callback_model.stop_training)
                {
                    break;
                }
            }

            callbacks.OnTrainEnd();
            return(model.history);
        }
Exemplo n.º 15
0
 public virtual void UpdateMultiPrecision(int[] indices, NDArrayList weights, NDArrayList grads, (NDArrayDict, NDArray)[] states)
Exemplo n.º 16
0
 public void Pull(string key, NDArrayList @out, int priority = 0, bool ignore_sparse = true)
 {
     NativeMethods.MXKVStorePullWithSparseEx(handle, 1, new[] { key }, MxUtil.GetNDArrayHandles(@out), priority,
                                             ignore_sparse);
 }
Exemplo n.º 17
0
        public static void RunSimple()
        {
            var mnist      = TestUtils.GetMNIST(); //Get the MNIST dataset, it will download if not found
            var batch_size = 100;                  //Set training batch size
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            // Define simple network with dense layers
            var net = new Sequential();

            net.Add(new Dense(128, ActivationType.Relu));
            net.Add(new Dense(64, ActivationType.Relu));
            net.Add(new Dense(10));

            //Set context, multi-gpu supported
            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            //Initialize the weights
            net.Initialize(new Xavier(magnitude: 2.24f), ctx);

            //Create the trainer with all the network parameters and set the optimizer
            var trainer = new Trainer(net.CollectParams(), new Adam());

            var   epoch  = 10;
            var   metric = new Accuracy(); //Use Accuracy as the evaluation metric.
            var   softmax_cross_entropy_loss = new SoftmaxCrossEntropyLoss();
            float lossVal = 0;             //For loss calculation

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                // Reset the train data iterator.
                train_data.Reset();
                lossVal = 0;

                // Loop over the train data iterator.
                while (!train_data.End())
                {
                    var batch = train_data.Next();

                    // Splits train data into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var data = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);

                    // Splits train labels into multiple slices along batch_axis
                    // and copy each slice into a context.
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();

                    // Inside training scope
                    NDArray loss = null;
                    for (int i = 0; i < data.Length; i++)
                    {
                        using (var ag = Autograd.Record())
                        {
                            var x = data[i];
                            var y = label[i];
                            var z = net.Call(x);
                            // Computes softmax cross entropy loss.
                            loss = softmax_cross_entropy_loss.Call(z, y);
                            outputs.Add(z);
                        }

                        // Backpropagate the error for one iteration.
                        loss.Backward();
                        lossVal += loss.Mean();
                    }

                    // Updates internal evaluation
                    metric.Update(label, outputs.ToArray());

                    // Make one step of parameter update. Trainer needs to know the
                    // batch size of data to normalize the gradient by 1/batch_size.
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                // Gets the evaluation result.
                var(name, acc) = metric.Get();

                // Reset evaluation result to initial state.
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Exemplo n.º 18
0
 public void Push(string key, NDArrayList value, int priority = 0)
 {
     NativeMethods.MXKVStorePushEx(handle, 1, new[] { key }, MxUtil.GetNDArrayHandles(value), priority);
 }
Exemplo n.º 19
0
 public static NDArrayList MsBatchifyFn(NDArrayList data) => throw new NotImplementedException();
Exemplo n.º 20
0
 public Callback()
 {
     this.validation_data = null;
     this.model           = null;
 }
Exemplo n.º 21
0
 public static (NDArrayList, string[]) TransformTest(NDArrayList imgs, int @short = 416, int max_size = 1024, (float, float, float)?mean = null, (float, float, float)?std = null)
Exemplo n.º 22
0
 public static NDArrayList TransformEval(NDArrayList imgs, int resize_short = 256, int crop_size = 224, (float, float, float)?mean = null, (float, float, float)?std = null)
Exemplo n.º 23
0
        public static void RunConv()
        {
            var mnist      = TestUtils.GetMNIST();
            var batch_size = 128;
            var train_data = new NDArrayIter(mnist["train_data"], mnist["train_label"], batch_size, true);
            var val_data   = new NDArrayIter(mnist["test_data"], mnist["test_label"], batch_size);

            var net = new Sequential();

            net.Add(new Conv2D(20, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Conv2D(50, kernel_size: (5, 5), activation: ActivationType.Tanh));
            net.Add(new MaxPool2D(pool_size: (2, 2), strides: (2, 2)));
            net.Add(new Flatten());
            net.Add(new Dense(500, ActivationType.Tanh));
            net.Add(new Dense(10));

            var gpus = TestUtils.ListGpus();
            var ctx  = gpus.Count > 0 ? gpus.Select(x => Context.Gpu(x)).ToArray() : new[] { Context.Cpu(0) };

            net.Initialize(new Xavier(magnitude: 2.24f), ctx);
            var trainer = new Trainer(net.CollectParams(), new SGD(learning_rate: 0.02f));

            var   epoch  = 10;
            var   metric = new Accuracy();
            var   softmax_cross_entropy_loss = new SoftmaxCELoss();
            float lossVal = 0;

            for (var iter = 0; iter < epoch; iter++)
            {
                var tic = DateTime.Now;
                train_data.Reset();
                lossVal = 0;
                while (!train_data.End())
                {
                    var batch = train_data.Next();
                    var data  = Utils.SplitAndLoad(batch.Data[0], ctx, batch_axis: 0);
                    var label = Utils.SplitAndLoad(batch.Label[0], ctx, batch_axis: 0);

                    var outputs = new NDArrayList();
                    using (var ag = Autograd.Record())
                    {
                        for (var i = 0; i < data.Length; i++)
                        {
                            var x = data[i];
                            var y = label[i];

                            var     z    = net.Call(x);
                            NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                            loss.Backward();
                            lossVal += loss.Mean();
                            outputs.Add(z);
                        }

                        //outputs = Enumerable.Zip(data, label, (x, y) =>
                        //{
                        //    var z = net.Call(x);
                        //    NDArray loss = softmax_cross_entropy_loss.Call(z, y);
                        //    loss.Backward();
                        //    lossVal += loss.Mean();
                        //    return z;
                        //}).ToList();
                    }

                    metric.Update(label, outputs.ToArray());
                    trainer.Step(batch.Data[0].Shape[0]);
                }

                var toc = DateTime.Now;

                var(name, acc) = metric.Get();
                metric.Reset();
                Console.Write($"Loss: {lossVal} ");
                Console.WriteLine($"Training acc at epoch {iter}: {name}={(acc * 100).ToString("0.##")}%, Duration: {(toc - tic).TotalSeconds.ToString("0.#")}s");
            }
        }
Exemplo n.º 24
0
 public override void Broadcast(string key, NDArray value, NDArrayList @out, int priority = 0)
 {
     Init(key, value);
     Pull(key, @out, priority);
 }
Exemplo n.º 25
0
 public abstract void UpdateMetric(EvalMetric eval_metric, NDArrayList labels, bool pre_sliced = false);
Exemplo n.º 26
0
 public override void PushPull(string key, NDArray value, NDArrayList @out, int priority = 0)
 {
     NativeMethods.MXKVStorePushPullEx(handle, 1, new[] { key }, @out.Length, new[] { key }, new[] { value.NativePtr },
                                       MxUtil.GetNDArrayHandles(@out), priority);
 }
Exemplo n.º 27
0
        public List <NDArrayList> Predict(DataIter eval_data, int?num_batch = null, bool merge_batches = true,
                                          bool reset = true, bool always_output_list = true, Func <DataBatch, NDArrayDict> sparse_row_id_fn = null)
        {
            if (!Binded && !ParamsInitialized)
            {
                throw new Exception("Module not binded and param initialized");
            }

            if (reset)
            {
                eval_data.Reset();
            }

            var output_list  = new List <NDArrayList>();
            var output_list2 = new NDArrayList();

            while (eval_data.End())
            {
                if (num_batch.HasValue && eval_data.Cursor == num_batch.Value)
                {
                    break;
                }

                var eval_batch = eval_data.Next();
                Prepare(eval_batch, sparse_row_id_fn);
                Forward(eval_batch, false);
                var pad     = eval_batch.Pad.Value;
                var outputs = new NDArrayList();
                foreach (var list in GetOutputs())
                {
                    foreach (var @out in list)
                    {
                        outputs.Add(@out[$"0:{@out.Shape[0] - pad}"].Copy());
                    }
                }

                output_list.Add(outputs.ToArray());
            }

            if (output_list.Count == 0)
            {
                return(output_list);
            }

            if (merge_batches)
            {
                var num_outputs = output_list[0].Length;
                foreach (var @out in output_list)
                {
                    if (@out.Length != num_outputs)
                    {
                        throw new Exception("Cannot merge batches, as num of outputs is not the same " +
                                            "in mini-batches. Maybe bucketing is used?");
                    }

                    output_list2.Add(nd.Concat(@out));
                }

                return(new List <NDArrayList> {
                    output_list2.ToArray()
                });
            }

            return(output_list);
        }
Exemplo n.º 28
0
 private NDArray SliceWeight(NDArrayList arr, int li, int lh)
 {
     throw new NotImplementedException();
 }
Exemplo n.º 29
0
 public static (NDArray, NDArray) TransformTest(NDArrayList imgs, int @short, int max_size = 1024, (float, float, float)?mean = null, (float, float, float)?std = null)
Exemplo n.º 30
0
 public abstract void Backward(NDArrayList out_grads = null);