public LSTMGateWeight CloneSharedWeights() { LSTMGateWeight gateWeight = new LSTMGateWeight(); gateWeight.InitWeights(layerSize, denseFeatureSize); gateWeight.weights = weights; gateWeight.weightsDelta = weightsDelta; gateWeight.learningRate = learningRate; return(gateWeight); }
public override void InitializeWeights(int sparseFeatureSize, int denseFeatureSize) { SparseFeatureSize = sparseFeatureSize; DenseFeatureSize = denseFeatureSize; if (DenseFeatureSize % Vector <float> .Count != 0) { DenseFeatureSize += (Vector <float> .Count - (DenseFeatureSize % Vector <float> .Count)); } InitializeCellWeights(null); if (SparseFeatureSize > 0) { sparseFeatureWeights = new Vector4[LayerSize][]; for (var i = 0; i < LayerSize; i++) { sparseFeatureWeights[i] = new Vector4[SparseFeatureSize]; for (var j = 0; j < SparseFeatureSize; j++) { sparseFeatureWeights[i][j] = InitializeLSTMWeight(); } } } if (DenseFeatureSize > 0) { wDenseInputGate = new LSTMGateWeight(); wDenseForgetGate = new LSTMGateWeight(); wDenseCellGate = new LSTMGateWeight(); wDenseOutputGate = new LSTMGateWeight(); wDenseInputGate.InitWeights(LayerSize, DenseFeatureSize); wDenseForgetGate.InitWeights(LayerSize, DenseFeatureSize); wDenseCellGate.InitWeights(LayerSize, DenseFeatureSize); wDenseOutputGate.InitWeights(LayerSize, DenseFeatureSize); } InitializeInternalTrainingParameters(); Logger.WriteLine( "Initializing weights, sparse feature size: {0}, dense feature size: {1}, random value is {2}", SparseFeatureSize, DenseFeatureSize, RNNHelper.rand.NextDouble()); }