public static RNN <T> CreateRNN(NETWORKTYPE networkType) { RNN <T> rnn = null; switch (networkType) { case NETWORKTYPE.Forward: rnn = new ForwardRNN <T>(); break; case NETWORKTYPE.ForwardSeq2Seq: rnn = new ForwardRNNSeq2Seq <T>(); break; case NETWORKTYPE.ForwardSeq2SeqLabeling: rnn = new ForwardRNNSeq2SeqLabeling <T>(); break; case NETWORKTYPE.BiDirectional: rnn = new BiRNN <T>(); break; case NETWORKTYPE.BiDirectionalAverage: rnn = new BiRNNAvg <T>(); break; } return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> forwardLayers = new List <SimpleLayer>(); List <SimpleLayer> backwardLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in forwardHiddenLayers) { forwardLayers.Add(layer.CreateLayerSharedWegiths()); } foreach (SimpleLayer layer in backwardHiddenLayers) { backwardLayers.Add(layer.CreateLayerSharedWegiths()); } BiRNN <T> rnn = new BiRNN <T>(); rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths()); rnn.CRFWeights = CRFWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.bVQ = bVQ; rnn.IsCRFTraining = IsCRFTraining; if (rnn.IsCRFTraining) { rnn.InitializeCRFVariablesForTraining(); } return(rnn); }
public override RNN <T> Clone() { List <SimpleLayer> forwardLayers = new List <SimpleLayer>(); List <SimpleLayer> backwardLayers = new List <SimpleLayer>(); foreach (SimpleLayer layer in forwardHiddenLayers) { forwardLayers.Add(layer.CreateLayerSharedWegiths()); } foreach (SimpleLayer layer in backwardHiddenLayers) { backwardLayers.Add(layer.CreateLayerSharedWegiths()); } BiRNN <T> rnn = new BiRNN <T>(); rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths()); rnn.CRFTagTransWeights = CRFTagTransWeights; rnn.MaxSeqLength = MaxSeqLength; rnn.crfLocker = crfLocker; return(rnn); }