Esempio n. 1
0
        private static LSTMGateWeight LoadLSTMGateWeights(BinaryReader br)
        {
            var            w          = br.ReadInt32();
            var            h          = br.ReadInt32();
            var            vqSize     = br.ReadInt32();
            LSTMGateWeight gateWeight = new LSTMGateWeight();

            Logger.WriteLine("Loading LSTM-Weight: width:{0}, height:{1}, vqSize:{2}...", w, h, vqSize);

            var m = new float[w][];

            gateWeight.weights          = m;
            gateWeight.layerSize        = w;
            gateWeight.denseFeatureSize = h;

            for (var i = 0; i < w; i++)
            {
                m[i] = new float[h];
                for (var j = 0; j < h; j++)
                {
                    m[i][j] = br.ReadSingle();
                }
            }

            return(gateWeight);
        }
Esempio n. 2
0
        private void UpdateOutputGateWeights(LSTMGateWeight gateWeight, int i, float err)
        {
            var j = 0;

            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weightsDelta[i];

            foreach (var denseFeature in DenseFeatureGroups)
            {
                int k = 0;
                var denseFeatureSize = denseFeature.Length;
                var moreItems        = (denseFeatureSize % Vector <float> .Count);
                while (k < denseFeatureSize - moreItems)
                {
                    Vector <float> vecDelta = new Vector <float>(denseFeature, k);
                    vecDelta = vecDelta * err;

                    var w_i = new Vector <float>(weights_i, j);
                    w_i += vecDelta;
                    w_i.CopyTo(weights_i, j);

                    j += Vector <float> .Count;
                    k += Vector <float> .Count;
                }

                while (k < denseFeatureSize)
                {
                    float delta = denseFeature[k] * err;
                    weights_i[j] += delta;

                    j++;
                    k++;
                }
            }
        }
Esempio n. 3
0
        private void UpdateOutputGateWeights(LSTMGateWeight gateWeight, int i, float err)
        {
            var j = 0;

            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weightsDelta[i];

            var moreItems = (DenseFeatureSize % Vector <float> .Count);

            while (j < DenseFeatureSize - moreItems)
            {
                Vector <float> vecDelta = new Vector <float>(DenseFeature, j);
                vecDelta = vecDelta * err;

                var w_i = new Vector <float>(weights_i, j);
                w_i += vecDelta;
                w_i.CopyTo(weights_i, j);

                j += Vector <float> .Count;
            }

            while (j < DenseFeatureSize)
            {
                float delta = DenseFeature[j] * err;
                weights_i[j] += delta;

                j++;
            }
        }
Esempio n. 4
0
        public override void Load(BinaryReader br)
        {
            LayerSize         = br.ReadInt32();
            SparseFeatureSize = br.ReadInt32();
            DenseFeatureSize  = br.ReadInt32();

            AllocateMemoryForCells();
            AllocateMemoryForLSTMCells();

            //Create cells of each layer
            CreateCell(br);

            //Load weight matrix between each two layer pairs
            //weight input->hidden
            if (SparseFeatureSize > 0)
            {
                Logger.WriteLine("Loading sparse feature weights...");
                sparseFeatureWeights = LoadLSTMWeights(br);
            }

            if (DenseFeatureSize > 0)
            {
                //weight fea->hidden
                Logger.WriteLine("Loading dense feature weights...");
                wDenseInputGate  = LoadLSTMGateWeights(br);
                wDenseCellGate   = LoadLSTMGateWeights(br);
                wDenseForgetGate = LoadLSTMGateWeights(br);
                wDenseOutputGate = LoadLSTMGateWeights(br);
            }
        }
Esempio n. 5
0
        private void UpdateGateWeights(LSTMGateWeight gateWeight, int i, float featureDerivate, float c_yForget, float err)
        {
            var j = 0;

            float[] deri_i         = gateWeight.deri[i];
            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weights[i];
            while (j < DenseFeatureSize)
            {
                var feature = new Vector <float>(DenseFeature, j);
                var wd      = feature * featureDerivate;
                var wd_i    = new Vector <float>(deri_i, j);
                wd += wd_i * c_yForget;
                wd.CopyTo(deri_i, j);

                Vector <float> vecDelta = wd * err;
                //    vecDelta = RNNHelper.NormalizeGradient(vecDelta);

                //Computing learning rate and update it
                var wlr_i           = new Vector <float>(learningrate_i, j);
                var vecLearningRate = ComputeLearningRate(vecDelta, ref wlr_i);
                wlr_i.CopyTo(learningrate_i, j);

                lock (gateWeight.locker[i])
                {
                    var w_i = new Vector <float>(weights_i, j);
                    w_i += vecLearningRate * vecDelta;
                    w_i.CopyTo(weights_i, j);
                }

                j += Vector <float> .Count;
            }
        }
Esempio n. 6
0
        private void UpdateOutputGateWeights(LSTMGateWeight gateWeight, int i, float err)
        {
            var j = 0;

            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weights[i];
            while (j < DenseFeatureSize)
            {
                Vector <float> vecDelta = new Vector <float>(DenseFeature, j);
                vecDelta = vecDelta * err;
                //           vecDelta = RNNHelper.NormalizeGradient(vecDelta);

                var wlr_i           = new Vector <float>(learningrate_i, j);
                var vecLearningRate = ComputeLearningRate(vecDelta, ref wlr_i);
                wlr_i.CopyTo(learningrate_i, j);

                lock (gateWeight.locker[i])
                {
                    var w_i = new Vector <float>(weights_i, j);
                    w_i += vecLearningRate * vecDelta;
                    w_i.CopyTo(weights_i, j);
                }

                j += Vector <float> .Count;
            }
        }
Esempio n. 7
0
        public LSTMGateWeight CloneSharedWeights()
        {
            LSTMGateWeight gateWeight = new LSTMGateWeight();

            gateWeight.InitWeights(layerSize, denseFeatureSize);
            gateWeight.weights      = weights;
            gateWeight.weightsDelta = weightsDelta;
            gateWeight.learningRate = learningRate;

            return(gateWeight);
        }
Esempio n. 8
0
        private void UpdateGateWeights(int curState, LSTMGateWeight gateWeight, int i, float featureDerivate, float c_yForget, float err)
        {
            var j = 0;

            float[] deri_i         = gateWeight.deri[i];
            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weights[i];
            while (j < DenseFeatureSize - Vector <float> .Count)
            {
                var feature = new Vector <float>(DenseFeature, j);
                var wd      = feature * featureDerivate;
                if (curState > 0)
                {
                    var wd_i = new Vector <float>(deri_i, j);
                    wd += wd_i * c_yForget;
                }
                wd.CopyTo(deri_i, j);

                Vector <float> vecDelta = wd * err;
                vecDelta = RNNHelper.NormalizeGradient(vecDelta);
                var wlr_i           = new Vector <float>(learningrate_i, j);
                var vecLearningRate = ComputeLearningRate(vecDelta, ref wlr_i);

                var w_i = new Vector <float>(weights_i, j);
                w_i += vecLearningRate * vecDelta;

                w_i.CopyTo(weights_i, j);
                wlr_i.CopyTo(learningrate_i, j);

                j += Vector <float> .Count;
            }

            while (j < DenseFeatureSize)
            {
                var wd = DenseFeature[j] * featureDerivate;
                if (curState > 0)
                {
                    wd += deri_i[j] * c_yForget;
                }
                deri_i[j] = wd;

                float delta = wd * err;
                delta = RNNHelper.NormalizeGradient(delta);
                var wlr_i        = learningrate_i[j];
                var learningRate = ComputeLearningRate(delta, ref wlr_i);

                weights_i[j]     += learningRate * delta;
                learningrate_i[j] = wlr_i;

                j++;
            }
        }
Esempio n. 9
0
        private void UpdateGateWeights(LSTMGateWeight gateWeight, int i, float featureDerivate, float c_yForget, float err)
        {
            var j = 0;

            float[] deri_i         = gateWeight.deri[i];
            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weightsDelta[i];

            foreach (var denseFeature in DenseFeatureGroups)
            {
                int k = 0;
                var denseFeatureSize = denseFeature.Length;
                var moreItems        = (denseFeatureSize % Vector <float> .Count);
                while (k < denseFeatureSize - moreItems)
                {
                    var feature = new Vector <float>(denseFeature, k);
                    var wd      = feature * featureDerivate;
                    var wd_i    = new Vector <float>(deri_i, j);
                    wd += wd_i * c_yForget;
                    wd.CopyTo(deri_i, j);

                    Vector <float> vecDelta = wd * err;

                    var w_i = new Vector <float>(weights_i, j);
                    w_i += vecDelta;
                    w_i.CopyTo(weights_i, j);

                    j += Vector <float> .Count;
                    k += Vector <float> .Count;
                }

                while (k < denseFeatureSize)
                {
                    var wd = denseFeature[k] * featureDerivate;
                    wd           += deri_i[j] * c_yForget;
                    deri_i[j]     = wd;
                    weights_i[j] += wd * err;

                    j++;
                    k++;
                }
            }
        }
Esempio n. 10
0
        public override void InitializeWeights(int sparseFeatureSize, int denseFeatureSize)
        {
            SparseFeatureSize = sparseFeatureSize;
            DenseFeatureSize  = denseFeatureSize;

            if (DenseFeatureSize % Vector <float> .Count != 0)
            {
                DenseFeatureSize += (Vector <float> .Count - (DenseFeatureSize % Vector <float> .Count));
            }

            InitializeCellWeights(null);

            if (SparseFeatureSize > 0)
            {
                sparseFeatureWeights = new Vector4[LayerSize][];
                for (var i = 0; i < LayerSize; i++)
                {
                    sparseFeatureWeights[i] = new Vector4[SparseFeatureSize];
                    for (var j = 0; j < SparseFeatureSize; j++)
                    {
                        sparseFeatureWeights[i][j] = InitializeLSTMWeight();
                    }
                }
            }

            if (DenseFeatureSize > 0)
            {
                wDenseInputGate  = new LSTMGateWeight();
                wDenseForgetGate = new LSTMGateWeight();
                wDenseCellGate   = new LSTMGateWeight();
                wDenseOutputGate = new LSTMGateWeight();
                wDenseInputGate.InitWeights(LayerSize, DenseFeatureSize);
                wDenseForgetGate.InitWeights(LayerSize, DenseFeatureSize);
                wDenseCellGate.InitWeights(LayerSize, DenseFeatureSize);
                wDenseOutputGate.InitWeights(LayerSize, DenseFeatureSize);
            }

            InitializeInternalTrainingParameters();

            Logger.WriteLine(
                "Initializing weights, sparse feature size: {0}, dense feature size: {1}, random value is {2}",
                SparseFeatureSize, DenseFeatureSize, RNNHelper.rand.NextDouble());
        }
Esempio n. 11
0
        private void SaveLSTMweights(LSTMGateWeight gateWeight, BinaryWriter fo, bool bVQ = false)
        {
            float[][] weights = gateWeight.weights;
            var       w       = weights.Length;
            var       h       = weights[0].Length;

            Logger.WriteLine($"Saving LSTM gate weight matrix. width: {w}, height: {h}");

            fo.Write(w);
            fo.Write(h);

            fo.Write(0);

            for (var i = 0; i < w; i++)
            {
                for (var j = 0; j < h; j++)
                {
                    fo.Write(weights[i][j]);
                }
            }
        }
Esempio n. 12
0
        private void UpdateGateWeights(LSTMGateWeight gateWeight, int i, float featureDerivate, float c_yForget, float err)
        {
            var j = 0;

            float[] deri_i         = gateWeight.deri[i];
            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weightsDelta[i];

            var moreItems = (DenseFeatureSize % Vector <float> .Count);

            while (j < DenseFeatureSize - moreItems)
            {
                var feature = new Vector <float>(DenseFeature, j);
                var wd      = feature * featureDerivate;
                var wd_i    = new Vector <float>(deri_i, j);
                wd += wd_i * c_yForget;
                wd.CopyTo(deri_i, j);

                Vector <float> vecDelta = wd * err;

                var w_i = new Vector <float>(weights_i, j);
                w_i += vecDelta;
                w_i.CopyTo(weights_i, j);

                j += Vector <float> .Count;
            }

            while (j < DenseFeatureSize)
            {
                var wd = DenseFeature[j] * featureDerivate;
                wd       += deri_i[j] * c_yForget;
                deri_i[j] = wd;

                float delta = wd * err;
                weights_i[j] += delta;

                j++;
            }
        }
Esempio n. 13
0
        public override void InitializeWeights(int sparseFeatureSize, int denseFeatureSize)
        {
            SparseFeatureSize = sparseFeatureSize;
            DenseFeatureSize  = denseFeatureSize;

            CreateCell(null);

            if (SparseFeatureSize > 0)
            {
                sparseFeatureWeights      = new Vector4[LayerSize][];
                sparseFeatureToHiddenDeri = new Vector3[LayerSize][];
                for (var i = 0; i < LayerSize; i++)
                {
                    sparseFeatureWeights[i]      = new Vector4[SparseFeatureSize];
                    sparseFeatureToHiddenDeri[i] = new Vector3[SparseFeatureSize];
                    for (var j = 0; j < SparseFeatureSize; j++)
                    {
                        sparseFeatureWeights[i][j] = InitializeLSTMWeight();
                    }
                }
            }

            if (DenseFeatureSize > 0)
            {
                wDenseInputGate  = new LSTMGateWeight();
                wDenseForgetGate = new LSTMGateWeight();
                wDenseCellGate   = new LSTMGateWeight();
                wDenseOutputGate = new LSTMGateWeight();
                wDenseInputGate.Init(LayerSize, DenseFeatureSize);
                wDenseForgetGate.Init(LayerSize, DenseFeatureSize);
                wDenseCellGate.Init(LayerSize, DenseFeatureSize);
                wDenseOutputGate.Init(LayerSize, DenseFeatureSize, false);
            }

            Logger.WriteLine(
                "Initializing weights, sparse feature size: {0}, dense feature size: {1}, random value is {2}",
                SparseFeatureSize, DenseFeatureSize, RNNHelper.rand.NextDouble());
        }
Esempio n. 14
0
        private void UpdateOutputGateWeights(LSTMGateWeight gateWeight, int i, float err)
        {
            var j = 0;

            float[] learningrate_i = gateWeight.learningRate[i];
            float[] weights_i      = gateWeight.weights[i];
            while (j < DenseFeatureSize - Vector <float> .Count)
            {
                Vector <float> vecDelta = new Vector <float>(DenseFeature, j);
                vecDelta = vecDelta * err;
                vecDelta = RNNHelper.NormalizeGradient(vecDelta);
                var wlr_i           = new Vector <float>(learningrate_i, j);
                var vecLearningRate = ComputeLearningRate(vecDelta, ref wlr_i);

                var w_i = new Vector <float>(weights_i, j);
                w_i += vecLearningRate * vecDelta;

                w_i.CopyTo(weights_i, j);
                wlr_i.CopyTo(learningrate_i, j);

                j += Vector <float> .Count;
            }

            while (j < DenseFeatureSize)
            {
                float delta = DenseFeature[j] * err;
                delta = RNNHelper.NormalizeGradient(delta);
                var wlr_i        = learningrate_i[j];
                var learningRate = ComputeLearningRate(delta, ref wlr_i);

                weights_i[j]     += learningRate * delta;
                learningrate_i[j] = wlr_i;

                j++;
            }
        }