Пример #1
0
 public void matrixXvectorADD(LSTMCell[] dest, neuron[] srcvec, LSTMWeight[][] srcmatrix, int from, int to, int from2, int to2)
 {
     //ac mod
     Parallel.For(0, (to - from), parallelOption, i =>
     {
         for (int j = 0; j < to2 - from2; j++)
         {
             dest[i + from].netIn += srcvec[j + from2].ac * srcmatrix[i][j].wInputInputGate;
         }
     });
 }
Пример #2
0
        private void saveLSTMWeight(LSTMWeight[][] weight, BinaryWriter fo)
        {
            if (weight == null || weight.Length == 0)
            {
                fo.Write(0);
                fo.Write(0);
            }

            fo.Write(weight.Length);
            fo.Write(weight[0].Length);

            int w = weight.Length;
            int h = weight[0].Length;

            for (int i = 0; i < w; i++)
            {
                for (int j = 0; j < h; j++)
                {
                    fo.Write((float)weight[i][j].wInputCell);
                    fo.Write((float)weight[i][j].wInputForgetGate);
                    fo.Write((float)weight[i][j].wInputInputGate);
                    fo.Write((float)weight[i][j].wInputOutputGate);
                }
            }
        }
Пример #3
0
        public LSTMWeight[][] loadLSTMWeight(BinaryReader br)
        {
            int w = br.ReadInt32();
            int h = br.ReadInt32();
            LSTMWeight[][] m = new LSTMWeight[w][];

            for (int i = 0; i < w; i++)
            {
                m[i] = new LSTMWeight[h];
                for (int j = 0; j < h; j++)
                {
                    m[i][j].wInputCell = br.ReadSingle();
                    m[i][j].wInputForgetGate = br.ReadSingle();
                    m[i][j].wInputInputGate = br.ReadSingle();
                    m[i][j].wInputOutputGate = br.ReadSingle();
                }
            }

            return m;
        }
Пример #4
0
        public override void initWeights()
        {
            int INPUT = m_TrainingSet.GetSparseDimension();
            //create and initialise the weights from input to hidden layer
            mat_input2hidden = new LSTMWeight[L1][];
            for (int i = 0; i < L1; i++)
            {
                mat_input2hidden[i] = new LSTMWeight[INPUT + 1];
                for (int j = 0; j <= INPUT; j++)
                {
                    mat_input2hidden[i][j] = LSTMWeightInit(INPUT);
                }
            }

            if (fea_size > 0)
            {
                mat_feature2hidden = new LSTMWeight[L1][];
                for (int i = 0; i < L1; i++)
                {
                    mat_feature2hidden[i] = new LSTMWeight[fea_size];
                    for (int j = 0; j < fea_size; j++)
                    {
                        mat_feature2hidden[i][j] = LSTMWeightInit(INPUT);
                    }
                }
            }

            //Create and intialise the weights from hidden to output layer, these are just normal weights
            double hiddenOutputRand = 1 / Math.Sqrt((double)L1);
            mat_hidden2output = new Matrix(L1 + 1, L2);

               // mat_hidden2output = new double[L1 + 1][];

            for (int i = 0; i <= L1; i++)
            {
            //    mat_hidden2output[i] = new double[L2];
                for (int j = 0; j < L2; j++)
                {
                    mat_hidden2output[i][j] = (((double)((rand() % 100) + 1) / 100) * 2 * hiddenOutputRand) - hiddenOutputRand;
                }
            }
        }
Пример #5
0
        public LSTMWeight[][] loadLSTMWeight(BinaryReader br)
        {
            int w = br.ReadInt32();
            int h = br.ReadInt32();
            int vqSize = br.ReadInt32();

            Logger.WriteLine("Loading LSTM-Weight: width:{0}, height:{1}, vqSize:{2}...", w, h, vqSize);

            List<double> codeBook = new List<double>();
            for (int i = 0; i < vqSize; i++)
            {
                codeBook.Add(br.ReadDouble());
            }


            LSTMWeight[][] m = new LSTMWeight[w][];

            for (int i = 0; i < w; i++)
            {
                m[i] = new LSTMWeight[h];
                for (int j = 0; j < h; j++)
                {
                    int vqIdx = br.ReadByte();
                    m[i][j].wInputCell = (float)codeBook[vqIdx];

                    vqIdx = br.ReadByte();
                    m[i][j].wInputForgetGate = (float)codeBook[vqIdx];

                    vqIdx = br.ReadByte();
                    m[i][j].wInputInputGate = (float)codeBook[vqIdx];

                    vqIdx = br.ReadByte();
                    m[i][j].wInputOutputGate = (float)codeBook[vqIdx];
                }
            }

            return m;
        }
Пример #6
0
        public override void initWeights()
        {
            //create and initialise the weights from input to hidden layer
            input2hidden = new LSTMWeight[L1][];
            for (int i = 0; i < L1; i++)
            {
                input2hidden[i] = new LSTMWeight[L0];
                for (int j = 0; j < L0; j++)
                {
                    input2hidden[i][j] = LSTMWeightInit();
                }
            }

            if (DenseFeatureSize > 0)
            {
                feature2hidden = new LSTMWeight[L1][];
                for (int i = 0; i < L1; i++)
                {
                    feature2hidden[i] = new LSTMWeight[DenseFeatureSize];
                    for (int j = 0; j < DenseFeatureSize; j++)
                    {
                        feature2hidden[i][j] = LSTMWeightInit();
                    }
                }
            }

            //Create and intialise the weights from hidden to output layer, these are just normal weights
            Hidden2OutputWeight = new Matrix<double>(L2, L1);

            for (int i = 0; i < Hidden2OutputWeight.GetHeight(); i++)
            {
                for (int j = 0; j < Hidden2OutputWeight.GetWidth(); j++)
                {
                    Hidden2OutputWeight[i][j] = RandInitWeight();
                }
            }
        }
Пример #7
0
        private void saveLSTMWeight(LSTMWeight[][] weight, BinaryWriter fo)
        {
            int w = weight.Length;
            int h = weight[0].Length;
            int vqSize = 256;

            Logger.WriteLine("Saving LSTM weight matrix. width:{0}, height:{1}, vqSize:{2}", w, h, vqSize);

            fo.Write(weight.Length);
            fo.Write(weight[0].Length);

            //Build vector quantization model
            VectorQuantization vq = new VectorQuantization();
            for (int i = 0; i < w; i++)
            {
                for (int j = 0; j < h; j++)
                {
                    vq.Add(weight[i][j].wInputCell);
                    vq.Add(weight[i][j].wInputForgetGate);
                    vq.Add(weight[i][j].wInputInputGate);
                    vq.Add(weight[i][j].wInputOutputGate);
                }
            }


            double distortion = vq.BuildCodebook(vqSize);
            Logger.WriteLine("Distortion: {0}", distortion);

            //Save VQ codebook into file
            fo.Write(vqSize);
            for (int j = 0; j < vqSize; j++)
            {
                fo.Write(vq.CodeBook[j]);
            }

            for (int i = 0; i < w; i++)
            {
                for (int j = 0; j < h; j++)
                {
                    fo.Write((byte)vq.ComputeVQ(weight[i][j].wInputCell));
                    fo.Write((byte)vq.ComputeVQ(weight[i][j].wInputForgetGate));
                    fo.Write((byte)vq.ComputeVQ(weight[i][j].wInputInputGate));
                    fo.Write((byte)vq.ComputeVQ(weight[i][j].wInputOutputGate));
                }
            }

        }