Exemplo n.º 1
0
        private torch.Tensor ForwardEmbedding(torch.Tensor tokens, torch.Tensor segmentLabels, torch.Tensor positions)
        {
            using var disposeScope = torch.NewDisposeScope();

            var x = TokenEmbedding.forward(tokens);

            if (EmbedScale != null)
            {
                x.mul_(EmbedScale);
            }
            if (PositionalEmbedding != null)
            {
                var positionalEmbedding = PositionalEmbedding.forward(tokens,
                                                                      new Dictionary <string, object> {
                    { PositionalEmbedding.PositionKey, positions }
                });
                x.add_(positionalEmbedding);
            }
            if (SegmentEmbedding != null && segmentLabels.IsNotNull())
            {
                var segmentEmbedding = SegmentEmbedding.forward(segmentLabels);
                x.add_(segmentEmbedding);
            }
            if (EmbeddingLayerNorm != null)
            {
                x = EmbeddingLayerNorm.forward(x);
            }
            x = EmbedTransfer.forward(x, (int)x.size()[x.size().Length - 1]);
            x = DropoutLayer.forward(x);

            return(x.MoveToOuterDisposeScope());
        }
Exemplo n.º 2
0
        public override torch.Tensor forward(torch.Tensor x, Dictionary <string, object> param)
        {
            using var disposeScope = torch.NewDisposeScope();

            if (!ParseArguments(param, out var selfAttentionMask, out var selfAttentionPaddingMask))
            {
                throw new ArgumentException("Invalid arguments.");
            }

            var attention = SelfAttention.forward(query: x, key: x, value: x,
                                                  out _,
                                                  keyPaddingMask: selfAttentionPaddingMask,
                                                  needWeights: false,
                                                  attentionMask: selfAttentionMask);
            var dropout = DropoutLayer.forward(attention);

            dropout.add_(x);
            var norm = LayerNorm.forward(dropout);

            return(norm.MoveToOuterDisposeScope());
        }
Exemplo n.º 3
0
 public override torch.Tensor forward(torch.Tensor x, Dictionary <string, object> param)
 {
     using var layerOutput             = FullConnects.forward(x);
     using var layerOuptutIntermediate = layerOutput.add_(x);
     return(FinalLayerNorm.forward(layerOutput));
 }