Ejemplo n.º 1
0
        private void Encode(Random r, out List <string> OutputSentence, out ComputeGraph g, out double cost, List <WeightMatrix> encoded)
        {
            var sentIndex      = r.Next(0, InputSequences.Count);
            var inputSentence  = InputSequences[sentIndex];
            var reversSentence = InputSequences[sentIndex].ToList();

            reversSentence.Reverse();
            OutputSentence = OutputSequences[sentIndex];
            g = new ComputeGraph();

            cost = 0.0;


            for (int i = 0; i < inputSentence.Count; i++)
            {
                int ix_source  = wordToIndex[inputSentence[i]];
                int ix_source2 = wordToIndex[reversSentence[i]];
                var x          = g.PeekRow(Embedding, ix_source);
                var eOutput    = encoder.Encode(x, g);
                var x2         = g.PeekRow(Embedding, ix_source2);
                var eOutput2   = ReversEncoder.Encode(x2, g);
                encoded.Add(g.concatColumns(eOutput, eOutput2));
            }


            //if (UseDropout)
            //{
            //    encoded = g.Dropout(encoded, 0.2);
            //}
        }
Ejemplo n.º 2
0
        private double DecodeOutput(List <string> OutputSentence, ComputeGraph g, double cost, List <WeightMatrix> encoded)
        {
            int ix_input = 1;

            for (int i = 0; i < OutputSentence.Count + 1; i++)
            {
                int ix_target = 0;
                if (i == OutputSentence.Count)
                {
                    ix_target = 0;
                }
                else
                {
                    ix_target = wordToIndex[OutputSentence[i]];
                }


                var x       = g.PeekRow(Embedding, ix_input);
                var eOutput = decoder.Decode(x, encoded.LastOrDefault(), g);
                if (UseDropout)
                {
                    eOutput = g.Dropout(eOutput, 0.2);
                }
                var o = g.add(
                    g.mul(eOutput, this.Whd), this.bd);
                if (UseDropout)
                {
                    o = g.Dropout(o, 0.2);
                }

                var probs = g.SoftmaxWithCrossEntropy(o);

                cost += -Math.Log(probs.Weight[ix_target]);

                o.Gradient             = probs.Weight;
                o.Gradient[ix_target] -= 1;
                ix_input = ix_target;
            }
            return(cost);
        }
Ejemplo n.º 3
0
        public List <string> Predict(List <string> inputSeq)
        {
            ReversEncoder.Reset();
            encoder.Reset();
            decoder.Reset();

            List <string> result = new List <string>();

            var G2 = new ComputeGraph(false);



            List <string> revseq = inputSeq.ToList();

            revseq.Reverse();
            List <WeightMatrix> encoded = new List <WeightMatrix>();

            for (int i = 0; i < inputSeq.Count; i++)
            {
                int ix       = wordToIndex[inputSeq[i]];
                int ix2      = wordToIndex[revseq[i]];
                var x2       = G2.PeekRow(Embedding, ix);
                var o        = encoder.Encode(x2, G2);
                var x3       = G2.PeekRow(Embedding, ix2);
                var eOutput2 = ReversEncoder.Encode(x3, G2);

                var d = G2.concatColumns(o, eOutput2);

                encoded.Add(d);
            }

            var ix_input = 1;

            while (true)
            {
                var x       = G2.PeekRow(Embedding, ix_input);
                var eOutput = decoder.Decode(x, encoded.LastOrDefault(), G2);
                if (UseDropout)
                {
                    for (int i = 0; i < eOutput.Weight.Length; i++)
                    {
                        eOutput.Weight[i] *= 0.2;
                    }
                }
                var o = G2.add(
                    G2.mul(eOutput, this.Whd), this.bd);
                if (UseDropout)
                {
                    for (int i = 0; i < o.Weight.Length; i++)
                    {
                        o.Weight[i] *= 0.2;
                    }
                }
                var probs = G2.SoftmaxWithCrossEntropy(o);
                var maxv  = probs.Weight[0];
                var maxi  = 0;
                for (int i = 1; i < probs.Weight.Length; i++)
                {
                    if (probs.Weight[i] > maxv)
                    {
                        maxv = probs.Weight[i];
                        maxi = i;
                    }
                }
                var pred = maxi;

                if (pred == 0)
                {
                    break;            // END token predicted, break out
                }
                if (result.Count > max_word)
                {
                    break;
                }                                       // something is wrong
                var letter2 = indexToWord[pred];
                result.Add(letter2);
                ix_input = pred;
            }

            return(result);
        }