Пример #1
0
 public void Process(CMNN nn, int index)
 {
     //Process Network
     if (H1Dim > 0)
     {
         for (int i = 0; i < XDim; i++)
         {
             X.SetValue(i, 0, nn.GetInput(EncodePivot + i));
         }
         nn.Layer(X, W0, b0, Y).ELU();
         nn.Layer(Y, W1, b1, Y).ELU();
     }
     else if (XDim != H2Dim)
     {
         Debug.Log("XDim need to be equal to H2Dim");
     }
     else
     {
         for (int i = 0; i < XDim; i++)
         {
             Y.SetValue(i, 0, nn.GetInput(EncodePivot + i));
         }
     }
 }
Пример #2
0
        public void Process(CMNN nn, int index)
        {
            //Generate Weights
            if (index == 0)
            {
                W0 = Experts[0][0];
                b0 = Experts[1][0];
                W1 = Experts[2][0];
                b1 = Experts[3][0];
                W2 = Experts[4][0];
                b2 = Experts[5][0];
            }
            else
            {
                float[] weights = nn.Components[index - 1].Y.Flatten();
                if (nn.Threading)
                {
                    Task.WaitAll(
                        Task.Factory.StartNew(() => nn.BlendAll(W0, Experts[0], weights, weights.Length)),
                        Task.Factory.StartNew(() => nn.BlendAll(b0, Experts[1], weights, weights.Length)),
                        Task.Factory.StartNew(() => nn.BlendAll(W1, Experts[2], weights, weights.Length)),
                        Task.Factory.StartNew(() => nn.BlendAll(b1, Experts[3], weights, weights.Length)),
                        Task.Factory.StartNew(() => nn.BlendAll(W2, Experts[4], weights, weights.Length)),
                        Task.Factory.StartNew(() => nn.BlendAll(b2, Experts[5], weights, weights.Length))
                        );
                }
                else
                {
                    nn.BlendAll(W0, Experts[0], weights, weights.Length);
                    nn.BlendAll(b0, Experts[1], weights, weights.Length);
                    nn.BlendAll(W1, Experts[2], weights, weights.Length);
                    nn.BlendAll(b1, Experts[3], weights, weights.Length);
                    nn.BlendAll(W2, Experts[4], weights, weights.Length);
                    nn.BlendAll(b2, Experts[5], weights, weights.Length);
                }
            }

            //Set Input
            if (index == nn.Components.Length - 1 && nn.encoders.Length > 0)
            {
                int dim_accumulated = 0;
                for (int i = 0; i < nn.encoders.Length; i++)
                {
                    for (int j = 0; j < nn.encoders[i].H2Dim; j++)
                    {
                        X.SetValue(j + dim_accumulated, 0, nn.encoders[i].GetLatent(j));
                    }
                    dim_accumulated += nn.encoders[i].H2Dim;
                }
            }
            else
            {
                for (int i = 0; i < XDim; i++)
                {
                    X.SetValue(i, 0, nn.GetInput(GatingPivot + i));
                }
            }

            //Process Network
            if (!Bias)
            {
                b0.SetZero();
                b1.SetZero();
                b2.SetZero();
            }
            nn.Layer(X, W0, b0, Y).ELU();
            nn.Layer(Y, W1, b1, Y).ELU();
            nn.Layer(Y, W2, b2, Y);
            if (index < nn.Components.Length - 1)
            {
                Y.SoftMax();
            }
        }