public override torch.Tensor forward(torch.Tensor features) { // TODO: try whitening-like techniques // take <s> token (equiv. to [CLS]) using var x = features[torch.TensorIndex.Colon, torch.TensorIndex.Single(0), torch.TensorIndex.Colon]; return(Classifier.forward(x)); }
public override TorchTensor forward(TorchTensor input) { using (var f = features.forward(input)) using (var avg = avgPool.forward(f)) using (var x = avg.view(new long[] { avg.shape[0], 256 * 2 * 2 })) return(classifier.forward(x)); }
static void Main(string[] args) { Sequential xornet = new Sequential( new Linear(2, 100), new ReLU(), new Linear(100, 1)); double[,] x = { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } }; double[,] y = { { 0 }, { 1 }, { 1 }, { 0 } }; NDArray a_np = np.array(x); NDArray b_np = np.array(y); Tensor input = new Tensor(a_np); Tensor label = new Tensor(b_np); int epoch = 1000; SGD optim = new SGD(xornet.parameters(), 0.05); MSELoss mse = new MSELoss(); for (int i = 1; i <= epoch; i++) { Tensor output = xornet.forward(input); Tensor loss = mse.forward(output, label); optim.zero_grad(); loss.backward(); optim.step(); Console.WriteLine("[+] Epoch: " + i + " Loss: " + loss); } Tensor z = new Tensor(new NDArray(x)); Tensor outputs = xornet.forward(z); Console.WriteLine("Result: " + outputs.data.flatten().ToString()); Console.ReadLine(); }
public override torch.Tensor forward(torch.Tensor x) { using var x1 = x.permute(1, 2, 0); using var conv = Conv.forward(x1); return(conv.permute(2, 0, 1)); }
public override torch.Tensor forward(torch.Tensor x, Dictionary <string, object> param) { using var layerOutput = FullConnects.forward(x); using var layerOuptutIntermediate = layerOutput.add_(x); return(FinalLayerNorm.forward(layerOutput)); }