public override Tensor log_prob(Tensor value) { value = value.@long().unsqueeze(-1); var valLogPmf = torch.broadcast_tensors(value, logits); value = valLogPmf[0][TensorIndex.Ellipsis, TensorIndex.Slice(null, 1)]; return(valLogPmf[1].gather(-1, value).squeeze(-1)); }
static (Tensor, Tensor) GetBatch(Tensor source, int index, int bptt) { var len = Math.Min(bptt, source.shape[0] - 1 - index); var data = source[TensorIndex.Slice(index, index + len)]; var target = source[TensorIndex.Slice(index + 1, index + 1 + len)].reshape(-1); return(data, target); }
private static List <Tensor> LoadImages(IList <string> images, int batchSize, int channels, int height, int width) { List <Tensor> tensors = new List <Tensor>(); var imgSize = channels * height * width; bool shuffle = false; Random rnd = new Random(); var indices = !shuffle? Enumerable.Range(0, images.Count).ToArray() : Enumerable.Range(0, images.Count).OrderBy(c => rnd.Next()).ToArray(); // Go through the data and create tensors for (var i = 0; i < images.Count;) { var take = Math.Min(batchSize, Math.Max(0, images.Count - i)); if (take < 1) { break; } var dataTensor = torch.zeros(new long[] { take, imgSize }, ScalarType.Byte); // Take for (var j = 0; j < take; j++) { var idx = indices[i++]; var lblStart = idx * (1 + imgSize); var imgStart = lblStart + 1; using (var stream = new SKManagedStream(File.OpenRead(images[idx]))) using (var bitmap = SKBitmap.Decode(stream)) { using (var inputTensor = torch.tensor(GetBytesWithoutAlpha(bitmap))) { Tensor finalized = inputTensor; var nz = inputTensor.count_nonzero().DataItem <long>(); if (bitmap.Width != width || bitmap.Height != height) { var t = inputTensor.reshape(1, channels, bitmap.Height, bitmap.Width); finalized = torchvision.transforms.functional.resize(t, height, width).reshape(imgSize); } dataTensor.index_put_(finalized, TensorIndex.Single(j)); } } } tensors.Add(dataTensor.reshape(take, channels, height, width)); dataTensor.Dispose(); } return(tensors); }
public PositionalEncoding(long dmodel, double dropout, int maxLen = 5000) : base("PositionalEncoding") { this.dropout = Dropout(dropout); var pe = torch.zeros(new long[] { maxLen, dmodel }); var position = torch.arange(0, maxLen, 1).unsqueeze(1); var divTerm = (torch.arange(0, dmodel, 2) * (-Math.Log(10000.0) / dmodel)).exp(); pe[TensorIndex.Ellipsis, TensorIndex.Slice(0, null, 2)] = (position * divTerm).sin(); pe[TensorIndex.Ellipsis, TensorIndex.Slice(1, null, 2)] = (position * divTerm).cos(); this.pe = pe.unsqueeze(0).transpose(0, 1); RegisterComponents(); }
public static Tensor crop(this Tensor image, int top, int left, int height, int width) { var dims = image.Dimensions; var hoffset = dims - 2; long h = image.shape[hoffset], w = image.shape[hoffset + 1]; var right = left + width; var bottom = top + height; if (left < 0 || top < 0 || right > w || bottom > h) { var slice = image.index(TensorIndex.Ellipsis, TensorIndex.Slice(Math.Max(top, 0), bottom), TensorIndex.Slice(Math.Max(left, 0), right)); var padding_ltrb = new long[] { Math.Max(-left, 0), Math.Max(-top, 0), Math.Max(right - w, 0), Math.Max(bottom - h, 0) }; return(TorchSharp.torch.nn.functional.pad(slice, padding_ltrb)); } return(image.index(TensorIndex.Ellipsis, TensorIndex.Slice(top, bottom), TensorIndex.Slice(left, right))); }
public override Tensor forward(Tensor t) { using var x = t + pe[TensorIndex.Slice(null, t.shape[0]), TensorIndex.Slice()]; return(dropout.forward(x)); }