Esempio n. 1
0
        public List <Matrix> activate(List <Matrix> x, ForwdBackwdProp g)
        {
            // List<int> input = x.inputs;
            Matrix        final   = new Matrix(Global.hiddenDim, 1);
            List <Matrix> outputs = new List <Matrix>();
            Matrix        _h_tm1  = Matrix.newMatrix_0(_hiddenDim, 1);
            Matrix        _s_tm1  = Matrix.newMatrix_0(_hiddenDim, 1);

            for (int i = 0; i < x.Count; i++)
            {
                //input gate
                Matrix sum0      = g.Mul(_wix, x[i]);
                Matrix sum1      = g.Mul(_wih, _h_tm1);
                Matrix inputGate = g.sigNonlin(g.Add(g.Add(sum0, sum1), _iBias));

                //forget gate
                Matrix sum2       = g.Mul(_wfx, x[i]);
                Matrix sum3       = g.Mul(_wfh, _h_tm1);
                Matrix forgetGate = g.sigNonlin(g.Add(g.Add(sum2, sum3), _fBias));

                //output gate
                Matrix sum4       = g.Mul(_wox, x[i]);
                Matrix sum5       = g.Mul(_woh, _h_tm1);
                Matrix outputGate = g.sigNonlin(g.Add(g.Add(sum4, sum5), _Bias));

                //write operation on cells
                Matrix sum6      = g.Mul(_wcx, x[i]);
                Matrix sum7      = g.Mul(_wch, _h_tm1);
                Matrix cellInput = g.tanhNonlin(g.Add(g.Add(sum6, sum7), _cBias));

                //compute new cell activation
                Matrix retainCell = g.Elmul(forgetGate, _s_tm1);
                Matrix writeCell  = g.Elmul(inputGate, cellInput);
                Matrix cellAct    = g.Add(retainCell, writeCell);

                //compute hidden state as gated, saturated cell activations
                Matrix output = g.Elmul(outputGate, g.tanhNonlin(cellAct));
                //if (i == 0)
                //{
                //    final = output;
                //}
                //else
                //{
                //    final = g.ConcatVectors(final, output);
                //}
                //final = g.Add(final, output);

                outputs.Add(output);
                //rollover activations for next iteration
                _h_tm1 = output;
                _s_tm1 = cellAct;
                //_h = g.Add(output, _h);
            }

            return(outputs);
        }
Esempio n. 2
0
        public Matrix Activate_1(Matrix input, ForwdBackwdProp g)
        {
            Matrix sum       = g.Add(g.Mul(_w, input), _b);
            Matrix returnObj = g.tanhNonlin(sum);

            return(returnObj);
        }
Esempio n. 3
0
        public Matrix Activate(Matrix input, ForwdBackwdProp g)
        {
            Matrix sum       = g.Add(g.Mul(_w, input), _b);
            Matrix returnObj = sum;

            return(returnObj);
        }
Esempio n. 4
0
        public void run(object param)
        {
            param           pa   = (param)param;
            List <DataStep> x1   = pa.datastep;
            List <Matrix>   xpro = new List <Matrix>();
            ForwdBackwdProp g    = new ForwdBackwdProp(_train);

            for (int i = 0; i < x1.Count; i++)
            {
                DataStep x = x1[i];

                List <Matrix> add = new List <Matrix>();
                for (int k = 0; k < 5; k++)
                {
                    add.Add(Global.wordEmbedding[x.inputs[k]]);
                }
                List <Matrix> returnObj2 = Global.GRNNLayer1.activate(add, g);

                List <Matrix> returnObj3 = Global.GRNNLayer2.activate(returnObj2, g);
                List <Matrix> returnObj4 = Global.GRNNLayer3.activate(returnObj3, g);
                List <Matrix> returnObj5 = Global.GRNNLayer4.activate(returnObj4, g);

                xpro.Add(returnObj5[0]);
            }

            List <Matrix> returnObj6 = Global.upLSTMLayer.activate(xpro, g);

            List <Matrix> returnObj7 = Global.upLSTMLayerr.activate(reverse(xpro), g);


            List <Matrix> sum = new List <Matrix>();

            for (int inde = 0; inde < returnObj6.Count(); inde++)
            {
                sum.Add(g.Add(returnObj6[inde], returnObj7[returnObj7.Count - inde - 1]));
            }

            for (int i = 0; i < returnObj6.Count; i++)
            {
                Matrix returnObj9 = Global.feedForwardLayer.Activate(sum[i], g);
                double loss       = LossSoftmax.getLoss(returnObj9, x1[i].goldOutput);
                if (double.IsNaN(loss) || double.IsInfinity(loss))
                {
                    Console.WriteLine("WARNING!!!");
                    Global.swLog.WriteLine("WARNING!!!");
                    pa.mre.Set();
                    return;
                }
                LossSoftmax.getGrad(returnObj9, x1[i].goldOutput);
            }
            g.backwardProp();
            pa.mre.Set();
        }
Esempio n. 5
0
        public List <Matrix> activate(List <Matrix> input, ForwdBackwdProp g)
        {
            List <Matrix> temp = new List <Matrix>();

            for (int i = 1; i < input.Count; i++)
            {
                Matrix concanate = g.ConcatVectors(input[i - 1], input[i]);
                //Matrix rl = g.sigNonlin(g.Add(g.Mul(rl1w, input[i - 1]), g.Mul(rl2w, input[i])));
                Matrix rl = g.sigNonlin(g.Mul(_gl, concanate));
                Matrix rr = g.sigNonlin(g.Mul(_gr, concanate));
                //Matrix rr = g.sigNonlin(g.Add(g.Mul(rr1w, input[i - 1]), g.Mul(rr2w, input[i])));
                //Matrix hh = g.tanhNonlin(g.Add(g.Mul(wi, input[i - 1]), g.Mul(wx, input[i])));
                Matrix hh = g.tanhNonlin(g.Mul(hh1w, g.ConcatVectors(g.Elmul(rl, input[i - 1]), g.Elmul(rr, input[i]))));

                //Matrix hhh = g.ConcatVectors(g.ConcatVectors(hh, input[i - 1]), input[i]);
                //Matrix zh1 = g.sigNonlin(g.Mul(hh2w, hhh));
                //Matrix zh2 = g.sigNonlin(g.Mul(hh3w, hhh));
                //Matrix zh3 = g.tanhNonlin(g.Mul(hh4w, hhh));
                //Matrix z1=g.Exp()


                Matrix concanate1 = g.ConcatVectors(g.ConcatVectors(hh, input[i - 1]), input[i]);

                Matrix z1 = g.sigNonlin(g.Mul(_u1, concanate1));
                Matrix z2 = g.sigNonlin(g.Mul(_u2, concanate1));
                Matrix z3 = g.sigNonlin(g.Mul(_u3, concanate1));


                //Matrix zl = g.SumDivid1(zh1, zh2, zh3);
                // Matrix zm = g.SumDivid2(zh1, zh2, zh3);
                //Matrix zr = g.SumDivid3(zh1, zh2, zh3);

                Matrix output = g.Add(g.Add(g.Elmul(z1, hh), g.Elmul(z2, input[i - 1])), g.Elmul(z3, input[i]));

                temp.Add(output);
            }


            return(temp);
        }
Esempio n. 6
0
        public void runtest(object param)
        {
            param           pa = (param)param;
            List <DataStep> x1 = pa.datastep;

            int[]  ires4, igold4, wordindeis;
            string str = "", str1 = "";

            igold4     = new int[x1.Count];
            ires4      = new int[x1.Count];
            wordindeis = new int[x1.Count];
            int             index = 0, arraynum = 0;
            ForwdBackwdProp g = new ForwdBackwdProp(_train);

            int dim = 0;

            dim = x1.Count;


            Matrix[]   xpro       = new Matrix[dim];
            List <int> ires_model = new List <int>();


            //Parallel.For(0, temp.Count, i =>
            for (int i = 0; i < x1.Count; i++)
            {
                List <Matrix> add = new List <Matrix>();

                for (int k = 0; k < 5; k++)
                {
                    add.Add(Global.wordEmbedding[x1[i].inputs[k]]);
                }
                List <Matrix> returnObj2 = Global.GRNNLayer1.activate(add, g);
                List <Matrix> returnObj3 = Global.GRNNLayer2.activate(returnObj2, g);
                List <Matrix> returnObj4 = Global.GRNNLayer3.activate(returnObj3, g);
                List <Matrix> returnObj5 = Global.GRNNLayer4.activate(returnObj4, g);
                xpro[i] = returnObj5[0];
            }//);
            List <Matrix> returnObj6 = Global.upLSTMLayer.activate(xpro.ToList(), g);
            List <Matrix> returnObj7 = Global.upLSTMLayerr.activate(reverse(xpro.ToList()), g);
            List <Matrix> sum        = new List <Matrix>();

            for (int inde = 0; inde < returnObj6.Count(); inde++)
            {
                sum.Add(g.Add(returnObj6[inde], returnObj7[returnObj7.Count - inde - 1]));
            }

            for (int i = 0; i < xpro.Length; i++)
            {
                Matrix returnObj9 = Global.feedForwardLayer.Activate(sum[i], g);
                igold4[i] = LossSoftmax.getMax(x1[i].goldOutput);
                ires4[i]  = LossSoftmax.getMax(returnObj9);
            }

            //);


            //fscore.backprocess(wordindeis, ires4);

            pa.seq.write_string = "BOS O O" + "\n";
            //pa.sw.WriteLine("BOS O O");
            for (int i = 0; i < ires4.Count(); i++)
            {
                pa.seq.write_string += (Global.word[x1[i].wordindex] + " " + ires4[i] + " " + igold4[i] + "\n");
            }
            pa.seq.write_string += ("EOS O O" + "\n");
            pa.seq.write_string += "\n";


            List <string> res = fscore.getChunks4(ires4);

            str1 = fscore.calcorrect(fscore.getChunks4(igold4), res);

            lock (thislock)
            {
                string[] strs1 = str1.Split();
                _total4   += Int32.Parse(strs1[0]);
                _prTotal4 += Int32.Parse(strs1[1]);
                _correct4 += Int32.Parse(strs1[2]);
            }
        }