Beispiel #1
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> forwardLayers  = new List <SimpleLayer>();
            List <SimpleLayer> backwardLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in forwardHiddenLayers)
            {
                forwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            foreach (SimpleLayer layer in backwardHiddenLayers)
            {
                backwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            BiRNN <T> rnn = new BiRNN <T>();

            rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths());
            rnn.CRFWeights    = CRFWeights;
            rnn.MaxSeqLength  = MaxSeqLength;
            rnn.bVQ           = bVQ;
            rnn.IsCRFTraining = IsCRFTraining;
            if (rnn.IsCRFTraining)
            {
                rnn.InitializeCRFVariablesForTraining();
            }

            return(rnn);
        }
Beispiel #2
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> forwardLayers  = new List <SimpleLayer>();
            List <SimpleLayer> backwardLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in forwardHiddenLayers)
            {
                forwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            foreach (SimpleLayer layer in backwardHiddenLayers)
            {
                backwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            BiRNN <T> rnn = new BiRNN <T>();

            rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths());
            rnn.CRFTagTransWeights = CRFTagTransWeights;
            rnn.MaxSeqLength       = MaxSeqLength;
            rnn.crfLocker          = crfLocker;

            return(rnn);
        }