public static ((NDArray, NDArray), (NDArray, NDArray)) LoadData(string path = "boston_housing.npz", float test_split = 0.2f, int seed = 113) { Debug.Assert(0 <= test_split && test_split < 1); path = DataUtils.GetFile(path, origin: "https://s3.amazonaws.com/keras-datasets/boston_housing.npz", file_hash: "f553886a1f8d56431e820c5b82552d9d95cfcb96d1e678153f8839538947dff5"); var arrays = NDArray.LoadNpz(path); var x = arrays[0]; var y = arrays[1]; mx.Seed(seed); NDArray indices = np.arange(x.Shape[0]); indices = nd.Shuffle(indices.AsType(DType.Int32)); x = x[indices]; y = y[indices]; int n = x.Shape[0]; int test_n = Convert.ToInt32(test_split * n); var x_train = x[$":{test_n}"]; var y_train = y[$":{test_n}"]; var x_test = x[$"{test_n}:"]; var y_test = y[$"{test_n}:"]; return((x_train, y_train), (x_test, y_test)); }
public override void Update(NDArray labels, NDArray preds) { CheckLabelShapes(labels, preds); var pred_label = preds.Argsort().AsType(DType.Int32); //ToDo: Use numpy argpartition var label = labels.AsType(DType.Int32); var num_samples = pred_label.Shape[0]; var num_dims = pred_label.Shape.Dimension; if (num_dims == 1) { sum_metric += nd.Equal(pred_label.Ravel(), label.Ravel()).Sum(); } else if (num_dims == 2) { var num_classes = pred_label.Shape[1]; TopK = Math.Min(num_classes, TopK); for (var j = 0; j < TopK; j++) { float num_correct = nd.Equal(pred_label[$":,{num_classes - 1 - j}"].Ravel(), label.Ravel()).Sum(); sum_metric += num_correct; global_sum_metric += num_correct; } } num_inst += num_samples; global_num_inst += num_samples; }
public override NDArrayDict CreateState(int index, NDArray weight) { var state = new NDArrayDict(); state["weight_master_copy"] = null; state["momentum"] = null; if (MultiPrecision && weight.DataType.Name == DType.Float16.Name) { state["weight_master_copy"] = weight.AsType(DType.Float32); if (Momentum != 0) { state["momentum"] = nd.Zeros(weight.Shape, weight.Context, weight.DataType).ToSType(weight.SType); } return(state); } if (!MultiPrecision && weight.DataType.Name == DType.Float16.Name) { Logger.Warning("Accumulating with float16 in optimizer can lead to " + "poor accuracy or slow convergence. " + "Consider using multi_precision=True option of the " + "SGD optimizer"); } if (Momentum != 0) { state["momentum"] = nd.Zeros(weight.Shape, weight.Context, weight.DataType).ToSType(weight.SType); } return(state); }
public void UpdateBinaryStats(NDArray label, NDArray pred) { label = label.AsType(DType.Int32); var pred_label = nd.Argmax(pred, 1); CheckLabelShapes(label, pred); //ToDo: check unique values and throw error for binary classification var pred_true = nd.EqualScalar(pred_label, 1); var pred_false = 1 - pred_true; var label_true = nd.EqualScalar(label, 1); var label_false = 1 - label_true; var true_pos = (pred_true * label_true).Sum(); var false_pos = (pred_true * label_false).Sum(); var false_neg = (pred_false * label_true).Sum(); var true_neg = (pred_false * label_false).Sum(); true_positives += (int)true_pos; global_true_positives += (int)true_pos; false_positives += (int)false_pos; global_false_positives += (int)false_pos; false_negatives += (int)false_neg; global_false_negatives += (int)false_neg; true_negatives += (int)true_neg; global_true_negatives += (int)true_neg; }
public override void Update(NDArray labels, NDArray preds) { var label = labels.AsType(DType.Int32).AsNumpy(); var pred = np.argmax(preds.AsNumpy(), 1).astype(NPTypeCode.Int32); var n = np.max(pred.max(), label.max()).Data <int>()[0]; if (n >= k) { Grow(n + 1 - k); } var bcm = np.zeros(k, k); pred.Data <int>().Zip(label.Data <int>(), (i, j) => { bcm[i, j] += 1; return(true); }); lcm += bcm; gcm += bcm; num_inst += 1; global_num_inst += 1; }
public void TestAsType() { NDArray nd = new NDArray(1, 2, 3); int[] i = nd.AsType <int>(); int[] t = new int[] { 1, 2, 3 }; Assert.True(Enumerable.SequenceEqual(i, t), "Arrays are not equal."); }
public virtual (NDArrayDict, NDArray) CreateStateMultiPrecision(int index, NDArray weight) { NDArray weight_master_copy = null; if (MultiPrecision && weight.DataType.Name == DType.Float16.Name) { weight_master_copy = weight.AsType(DType.Float32); return(CreateState(index, weight_master_copy), weight_master_copy); } if (!MultiPrecision && weight.DataType.Name == DType.Float16.Name) { Logger.Warning("Accumulating with float16 in optimizer can lead to " + "poor accuracy or slow convergence. " + "Consider using multi_precision=True option of the " + "optimizer"); } return(CreateState(index, weight), weight); }
public override void Update(NDArray labels, NDArray preds) { float loss = 0; var num = 0; labels = labels.AsInContext(preds.Context).Reshape(preds.Size); preds = nd.Pick(preds, labels.AsType(DType.Int32), Axis); if (IgnoreLabel.HasValue) { var ignore = nd.EqualScalar(labels, IgnoreLabel.Value).AsType(preds.DataType); num -= nd.Sum(ignore).AsScalar <int>(); preds = preds * (1 - ignore) + ignore; } loss -= nd.Sum(nd.Log(nd.MaximumScalar(preds, 1e-10f))).AsScalar <float>(); num += preds.Size; sum_metric += loss; global_sum_metric += loss; num_inst += num; global_num_inst += num; }
public static void ImShow(NDArray x, string winname = "", bool wait = true) { if (winname == "") { winname = "test"; } bool transpose = true; if (x.Shape.Dimension == 4) { x = x.Reshape(x.Shape[1], x.Shape[2], x.Shape[3]).AsType(DType.UInt8); } else { x = x.AsType(DType.UInt8); } if (x.Shape[0] > 3) { transpose = false; } if (transpose) { x = x.Transpose(new Shape(1, 2, 0)); } NDArray.WaitAll(); Mat mat = x; Cv2.ImShow(winname, mat); NDArray.WaitAll(); if (wait) { Cv2.WaitKey(); } }
public override NDArray Call(NDArray src) { return(src.AsType(DataType)); }
public static ((NDArray, NDArray), (NDArray, NDArray)) LoadData(string path = "imdb.npz", int?num_words = null, int skip_top = 0, int?maxlen = null, int seed = 113, int start_char = 1, int oov_char = 2, int index_from = 3) { path = DataUtils.GetFile(path, origin: "https://s3.amazonaws.com/text-datasets/imdb.npz", file_hash: "599dadb1135973df5b59232a0e9a887c"); var arrays = NDArray.LoadNpz(path); var x_train = arrays[0]; var labels_train = arrays[1]; var x_test = arrays[2]; var labels_test = arrays[4]; mx.Seed(seed); NDArray indices = nd.Arange(0, x_train.Shape[0]); indices = nd.Shuffle(indices.AsType(DType.Int32)); x_train = x_train[indices]; labels_train = labels_train[indices]; indices = nd.Arange(0, x_test.Shape[0]); indices = nd.Shuffle(indices.AsType(DType.Int32)); x_test = x_test[indices]; labels_test = labels_test[indices]; ndarray xs = nd.Concat(new List <NDArray> { x_train, x_test }); ndarray labels = nd.Concat(new List <NDArray> { labels_train, labels_test }); //if (start_char != 0) //{ // xs = (from x in xs // select (new List<int> { // start_char // } + (from w in x // select (w + index_from)).ToList())).ToList(); //} //else if (index_from) //{ // xs = (from x in xs // select (from w in x // select (w + index_from)).ToList()).ToList(); //} //if (maxlen) //{ // var _tup_1 = _remove_long_seq(maxlen, xs, labels); // xs = _tup_1.Item1; // labels = _tup_1.Item2; // if (!xs) // { // throw new ValueError("After filtering for sequences shorter than maxlen=" + maxlen.ToString() + ", no sequence was kept. Increase maxlen."); // } //} //if (!num_words) //{ // num_words = max((from x in xs // select max(x)).ToList()); //} //// by convention, use 2 as OOV word //// reserve 'index_from' (=3 by default) characters: //// 0 (padding), 1 (start), 2 (OOV) //if (oov_char != null) //{ // xs = (from x in xs // select (from w in x // select skip_top <= w < num_words ? w : oov_char).ToList()).ToList(); //} //else //{ // xs = (from x in xs // select (from w in x // where skip_top <= w < num_words // select w).ToList()).ToList(); //} //var idx = x_train.Count; //x_train = np.array(xs[::idx]); //var y_train = np.array(labels[::idx]); //x_test = np.array(xs[idx]); //var y_test = np.array(labels[idx]); //return Tuple.Create((x_train, y_train), (x_test, y_test)); throw new NotImplementedException(); }
public static float Acc(NDArray output, NDArray label) { // output: (batch, num_output) float32 ndarray // label: (batch, ) int32 ndarray return(nd.Equal(output.Argmax(axis: 1), label.AsType(DType.Float32)).Mean()); }
public static NDArray CastToFloatX(NDArray x) { return(x.AsType(_FLOATX)); }