public override RNN <T> Clone() { List <SimpleLayer> forwardLayers = new List <SimpleLayer>(); List <SimpleLayer> backwardLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in forwardHiddenLayers) { forwardLayers.Add(layer.CreateLayerSharedWegiths()); } foreach (SimpleLayer layer in backwardHiddenLayers) { backwardLayers.Add(layer.CreateLayerSharedWegiths()); } BiRNN <T> rnn = new BiRNN <T>(); rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths()); rnn.CRFWeights = CRFWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.bVQ = bVQ; rnn.IsCRFTraining = IsCRFTraining; if (rnn.IsCRFTraining) { rnn.InitializeCRFVariablesForTraining(); } return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> hiddenLayers = new List <SimpleLayer>(); List <float[]>[] denseFeatureGroupsList = new List <float[]> [HiddenLayerList.Count]; int i = 0; foreach (SimpleLayer layer in HiddenLayerList) { hiddenLayers.Add(layer.CreateLayerSharedWegiths()); denseFeatureGroupsList[i] = new List <float[]>(); i++; } List <float[]> denseFeatureGroupsOutputLayer = new List <float[]>(); ForwardRNNSeq2Seq <T> rnn = new ForwardRNNSeq2Seq <T>(); rnn.HiddenLayerList = hiddenLayers; rnn.OutputLayer = OutputLayer.CreateLayerSharedWegiths(); rnn.CRFWeights = CRFWeights; rnn.denseFeatureGroupsList = denseFeatureGroupsList; rnn.denseFeatureGroupsOutputLayer = denseFeatureGroupsOutputLayer; rnn.sparseFeatureGorups = new List <SparseVector>(); rnn.MaxSeqLength = MaxSeqLength; rnn.bVQ = bVQ; rnn.IsCRFTraining = IsCRFTraining; if (rnn.IsCRFTraining) { rnn.InitializeCRFVariablesForTraining(); } return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> hiddenLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in HiddenLayerList) { hiddenLayers.Add(layer.CreateLayerSharedWegiths()); } ForwardRNNSeq2Seq <T> rnn = new ForwardRNNSeq2Seq <T>(); rnn.HiddenLayerList = hiddenLayers; rnn.OutputLayer = OutputLayer.CreateLayerSharedWegiths(); rnn.CRFTagTransWeights = CRFTagTransWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.crfLocker = crfLocker; return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> hiddenLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in HiddenLayerList) { hiddenLayers.Add(layer.CreateLayerSharedWegiths()); } ForwardRNN <T> rnn = new ForwardRNN <T>(); rnn.HiddenLayerList = hiddenLayers; rnn.OutputLayer = OutputLayer.CreateLayerSharedWegiths(); rnn.CRFWeights = CRFWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.bVQ = bVQ; rnn.IsCRFTraining = IsCRFTraining; if (rnn.IsCRFTraining) { rnn.InitializeCRFVariablesForTraining(); } return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> forwardLayers = new List <SimpleLayer>(); List <SimpleLayer> backwardLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in forwardHiddenLayers) { forwardLayers.Add(layer.CreateLayerSharedWegiths()); } foreach (SimpleLayer layer in backwardHiddenLayers) { backwardLayers.Add(layer.CreateLayerSharedWegiths()); } BiRNNAvg <T> rnn = new BiRNNAvg <T>(); rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths()); rnn.CRFTagTransWeights = CRFTagTransWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.crfLocker = crfLocker; return(rnn); }
public override void LoadModel(string filename, bool bTrain = false) { Logger.WriteLine(Logger.Level.info, "Loading bi-directional model: {0}", filename); using (var sr = new StreamReader(filename)) { var br = new BinaryReader(sr.BaseStream); IsCRFTraining = br.ReadBoolean(); var layerSize = br.ReadInt32(); LayerType layerType = LayerType.None; //Load forward layers from file forwardHiddenLayers = new List <SimpleLayer>(); for (var i = 0; i < layerSize; i++) { layerType = (LayerType)br.ReadInt32(); forwardHiddenLayers.Add(Load(layerType, br)); SimpleLayer layer = forwardHiddenLayers[forwardHiddenLayers.Count - 1]; if (bTrain) { layer.SetRunningMode(RunningMode.Training); layer.InitializeInternalTrainingParameters(); } else { layer.SetRunningMode(RunningMode.Test); } } //Load backward layers from file backwardHiddenLayers = new List <SimpleLayer>(); for (var i = 0; i < layerSize; i++) { layerType = (LayerType)br.ReadInt32(); backwardHiddenLayers.Add(Load(layerType, br)); SimpleLayer layer = backwardHiddenLayers[backwardHiddenLayers.Count - 1]; if (bTrain) { layer.SetRunningMode(RunningMode.Training); layer.InitializeInternalTrainingParameters(); } else { layer.SetRunningMode(RunningMode.Test); } } Logger.WriteLine("Create output layer"); layerType = (LayerType)br.ReadInt32(); OutputLayer = Load(layerType, br); if (bTrain) { OutputLayer.SetRunningMode(RunningMode.Training); OutputLayer.InitializeInternalTrainingParameters(); } else { OutputLayer.SetRunningMode(RunningMode.Test); } if (IsCRFTraining) { Logger.WriteLine("Loading CRF tag trans weights..."); CRFWeights = RNNHelper.LoadMatrix(br); } if (bTrain) { InitCache(forwardHiddenLayers, backwardHiddenLayers, OutputLayer.CreateLayerSharedWegiths()); } } }