Esempio n. 1
0
        public static RNN <T> CreateRNN(NETWORKTYPE networkType)
        {
            RNN <T> rnn = null;

            switch (networkType)
            {
            case NETWORKTYPE.Forward:
                rnn = new ForwardRNN <T>();
                break;

            case NETWORKTYPE.ForwardSeq2Seq:
                rnn = new ForwardRNNSeq2Seq <T>();
                break;

            case NETWORKTYPE.ForwardSeq2SeqLabeling:
                rnn = new ForwardRNNSeq2SeqLabeling <T>();
                break;

            case NETWORKTYPE.BiDirectional:
                rnn = new BiRNN <T>();
                break;

            case NETWORKTYPE.BiDirectionalAverage:
                rnn = new BiRNNAvg <T>();
                break;
            }
            return(rnn);
        }
Esempio n. 2
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> hiddenLayers = new List <SimpleLayer>();

            List <float[]>[] denseFeatureGroupsList = new List <float[]> [HiddenLayerList.Count];

            int i = 0;

            foreach (SimpleLayer layer in HiddenLayerList)
            {
                hiddenLayers.Add(layer.CreateLayerSharedWegiths());
                denseFeatureGroupsList[i] = new List <float[]>();
                i++;
            }

            List <float[]> denseFeatureGroupsOutputLayer = new List <float[]>();

            ForwardRNNSeq2Seq <T> rnn = new ForwardRNNSeq2Seq <T>();

            rnn.HiddenLayerList               = hiddenLayers;
            rnn.OutputLayer                   = OutputLayer.CreateLayerSharedWegiths();
            rnn.CRFWeights                    = CRFWeights;
            rnn.denseFeatureGroupsList        = denseFeatureGroupsList;
            rnn.denseFeatureGroupsOutputLayer = denseFeatureGroupsOutputLayer;
            rnn.sparseFeatureGorups           = new List <SparseVector>();
            rnn.MaxSeqLength                  = MaxSeqLength;
            rnn.bVQ           = bVQ;
            rnn.IsCRFTraining = IsCRFTraining;
            if (rnn.IsCRFTraining)
            {
                rnn.InitializeCRFVariablesForTraining();
            }

            return(rnn);
        }
Esempio n. 3
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> hiddenLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in HiddenLayerList)
            {
                hiddenLayers.Add(layer.CreateLayerSharedWegiths());
            }

            ForwardRNNSeq2Seq <T> rnn = new ForwardRNNSeq2Seq <T>();

            rnn.HiddenLayerList    = hiddenLayers;
            rnn.OutputLayer        = OutputLayer.CreateLayerSharedWegiths();
            rnn.CRFTagTransWeights = CRFTagTransWeights;
            rnn.MaxSeqLength       = MaxSeqLength;
            rnn.crfLocker          = crfLocker;

            return(rnn);
        }
Esempio n. 4
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> hiddenLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in HiddenLayerList)
            {
                hiddenLayers.Add(layer.CreateLayerSharedWegiths());
            }

            ForwardRNNSeq2Seq <T> rnn = new ForwardRNNSeq2Seq <T>();

            rnn.HiddenLayerList = hiddenLayers;
            rnn.OutputLayer     = OutputLayer.CreateLayerSharedWegiths();
            rnn.CRFWeights      = CRFWeights;
            rnn.MaxSeqLength    = MaxSeqLength;
            rnn.bVQ             = bVQ;
            rnn.IsCRFTraining   = IsCRFTraining;
            if (rnn.IsCRFTraining)
            {
                rnn.InitializeCRFVariablesForTraining();
            }

            return(rnn);
        }