Example #1
0
        public override void LoadModel(string filename, bool bTrain = false)
        {
            Logger.WriteLine("Loading SimpleRNN model: {0}", filename);

            var sr = new StreamReader(filename);
            var br = new BinaryReader(sr.BaseStream);

            IsCRFTraining = br.ReadBoolean();

            //Create cells of each layer
            var       layerSize = br.ReadInt32();
            LayerType layerType = LayerType.None;

            HiddenLayerList = new List <SimpleLayer>();
            for (var i = 0; i < layerSize; i++)
            {
                layerType = (LayerType)br.ReadInt32();
                HiddenLayerList.Add(Load(layerType, br));

                SimpleLayer layer = HiddenLayerList[HiddenLayerList.Count - 1];
                if (bTrain)
                {
                    layer.SetRunningMode(RunningMode.Training);
                    layer.InitializeInternalTrainingParameters();
                }
                else
                {
                    layer.SetRunningMode(RunningMode.Test);
                }
            }

            Logger.WriteLine("Create output layer");
            layerType   = (LayerType)br.ReadInt32();
            OutputLayer = Load(layerType, br);

            if (bTrain)
            {
                OutputLayer.SetRunningMode(RunningMode.Training);
                OutputLayer.InitializeInternalTrainingParameters();
            }
            else
            {
                OutputLayer.SetRunningMode(RunningMode.Test);
            }

            if (IsCRFTraining)
            {
                Logger.WriteLine("Loading CRF tag trans weights...");
                CRFTagTransWeights = RNNHelper.LoadMatrix(br);
                crfLocker          = new object();
            }

            sr.Close();
        }
Example #2
0
        /// <summary>
        /// Create network for forward sequence-to-sequence
        /// Sparse feature size = source sparse feature size + target sparse feature size
        /// Dense feature size: For first layer, (source dense feature size + target dense), for other layers, (source dense feature size + previous hidden layer size)
        /// </summary>
        /// <param name="hiddenLayersConfig"></param>
        /// <param name="outputLayerConfig"></param>
        /// <param name="TrainingSet"></param>
        /// <param name="featurizer"></param>
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            var srcDenseFeatureSize = featurizer.Seq2SeqAutoEncoder.GetTopHiddenLayerSize() * 2;
            var sparseFeatureSize   = TrainingSet.SparseFeatureSize;

            sparseFeatureSize += featurizer.Seq2SeqAutoEncoder.Config.SparseFeatureSize;
            Logger.WriteLine("Sparse Feature Format: [{0}][{1}] = {2}", TrainingSet.SparseFeatureSize, featurizer.Seq2SeqAutoEncoder.Config.SparseFeatureSize, sparseFeatureSize);

            HiddenLayerList = CreateLayers(hiddenLayersConfig);

            for (var i = 0; i < HiddenLayerList.Count; i++)
            {
                SimpleLayer layer = HiddenLayerList[i];
                layer.InitializeWeights(sparseFeatureSize, i == 0 ? (srcDenseFeatureSize + TrainingSet.DenseFeatureSize) : (srcDenseFeatureSize + HiddenLayerList[i - 1].LayerSize));
                layer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {layer.LayerSize}, sparse feature size = {layer.SparseFeatureSize}, dense feature size = {layer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            OutputLayer = CreateOutputLayer(outputLayerConfig, 0, (srcDenseFeatureSize + HiddenLayerList[HiddenLayerList.Count - 1].LayerSize));
            OutputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a Forward recurrent neural sequence-to-sequence network with {HiddenLayerList.Count} hidden layers");
        }
Example #3
0
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            var forwardHiddenLayers  = CreateLayers(hiddenLayersConfig);
            var backwardHiddenLayers = CreateLayers(hiddenLayersConfig);

            for (var i = 0; i < hiddenLayersConfig.Count; i++)
            {
                SimpleLayer forwardLayer  = forwardHiddenLayers[i];
                SimpleLayer backwardLayer = backwardHiddenLayers[i];

                var denseFeatureSize = TrainingSet.DenseFeatureSize;
                if (i > 0)
                {
                    denseFeatureSize = forwardHiddenLayers[i - 1].LayerSize * 2;
                }

                forwardLayer.InitializeWeights(TrainingSet.SparseFeatureSize, denseFeatureSize);
                backwardLayer.InitializeWeights(TrainingSet.SparseFeatureSize, denseFeatureSize);

                forwardLayer.SetRunningMode(RunningMode.Training);
                backwardLayer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {forwardLayer.LayerSize}, sparse feature size = {forwardLayer.SparseFeatureSize}, dense feature size = {forwardLayer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            SimpleLayer outputLayer = CreateOutputLayer(outputLayerConfig, TrainingSet.SparseFeatureSize, forwardHiddenLayers[forwardHiddenLayers.Count - 1].LayerSize * 2);

            outputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a bi-directional recurrent neural network with {forwardHiddenLayers.Count} hidden layers. Forward and backward layers are concatnated.");
            InitCache(forwardHiddenLayers, backwardHiddenLayers, outputLayer);
        }
Example #4
0
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            HiddenLayerList = CreateLayers(hiddenLayersConfig);
            for (var i = 0; i < HiddenLayerList.Count; i++)
            {
                SimpleLayer layer = HiddenLayerList[i];
                layer.InitializeWeights(TrainingSet.SparseFeatureSize, i == 0 ? TrainingSet.DenseFeatureSize : HiddenLayerList[i - 1].LayerSize);
                layer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {layer.LayerSize}, sparse feature size = {layer.SparseFeatureSize}, dense feature size = {layer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            OutputLayer = CreateOutputLayer(outputLayerConfig, TrainingSet.SparseFeatureSize, HiddenLayerList[HiddenLayerList.Count - 1].LayerSize);
            OutputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a Forward recurrent neural network with {HiddenLayerList.Count} hidden layers");
        }
Example #5
0
        public override void LoadModel(string filename, bool bTrain = false)
        {
            Logger.WriteLine(Logger.Level.info, "Loading bi-directional model: {0}", filename);

            using (var sr = new StreamReader(filename))
            {
                var br = new BinaryReader(sr.BaseStream);

                IsCRFTraining = br.ReadBoolean();
                var       layerSize = br.ReadInt32();
                LayerType layerType = LayerType.None;

                //Load forward layers from file
                forwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    forwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = forwardHiddenLayers[forwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                //Load backward layers from file
                backwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    backwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = backwardHiddenLayers[backwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                Logger.WriteLine("Create output layer");
                layerType   = (LayerType)br.ReadInt32();
                OutputLayer = Load(layerType, br);

                if (bTrain)
                {
                    OutputLayer.SetRunningMode(RunningMode.Training);
                    OutputLayer.InitializeInternalTrainingParameters();
                }
                else
                {
                    OutputLayer.SetRunningMode(RunningMode.Test);
                }

                if (IsCRFTraining)
                {
                    Logger.WriteLine("Loading CRF tag trans weights...");
                    CRFWeights = RNNHelper.LoadMatrix(br);
                }

                if (bTrain)
                {
                    InitCache(forwardHiddenLayers, backwardHiddenLayers, OutputLayer.CreateLayerSharedWegiths());
                }
            }
        }