private IEnumerable <((int[][], int[])[], int[][])> minibatches(CoNLLDataset data, int minibatch_size) { var x_batch = new List <(int[][], int[])>(); var y_batch = new List <int[]>(); foreach (var(x, y) in data.GetItems()) { if (len(y_batch) == minibatch_size) { yield return(x_batch.ToArray(), y_batch.ToArray()); x_batch.Clear(); y_batch.Clear(); } var x3 = (x.Select(x1 => x1.Item1).ToArray(), x.Select(x2 => x2.Item2).ToArray()); x_batch.Add(x3); y_batch.Add(y); } if (len(y_batch) > 0) { yield return(x_batch.ToArray(), y_batch.ToArray()); } }
private float run_epoch(Session sess, CoNLLDataset train, CoNLLDataset dev, int epoch) { float accuracy = 0; // iterate over dataset var batches = minibatches(train, hp.batch_size); foreach (var(words, labels) in batches) { var(fd, _) = get_feed_dict(words, labels, hp.lr, hp.dropout); (_, accuracy) = sess.run((train_op, loss), feed_dict: fd); } return(accuracy); }
private float run_epoch(Session sess, CoNLLDataset train, CoNLLDataset dev, int epoch) { NDArray results = null; // iterate over dataset var batches = minibatches(train, hp.batch_size); foreach (var(words, labels) in batches) { var(fd, _) = get_feed_dict(words, labels, hp.lr, hp.dropout); results = sess.run(new ITensorOrOperation[] { train_op, loss }, feed_dict: fd); } return(results[1]); }