Exemple #1
0
        public BPTTLayer(int hiddenLayerSize, ModelSetting modelsetting) : base(hiddenLayerSize)
        {
            Logger.WriteLine("Initializing BPTT Layer...");
            Logger.WriteLine("Layer Size: {0}", hiddenLayerSize);
            bptt       = modelsetting.Bptt + 1;
            bptt_block = 10;

            Logger.WriteLine("BPTT Size: {0}", bptt);
            Logger.WriteLine("BPTT Block Size: {0}", bptt_block);
        }
        public NCEOutputLayer(int hiddenLayerSize, ModelSetting modelSetting) : base(hiddenLayerSize)
        {
            Logger.WriteLine("Initializing NCEOutputLayer...");
            m_modelSetting     = modelSetting;
            NegativeSampleSize = m_modelSetting.NCESampleSize;
            Logger.WriteLine("Hidden layer size: {0}", hiddenLayerSize);
            Logger.WriteLine("Negative sampling size: {0}", NegativeSampleSize);

            if (NegativeSampleSize > hiddenLayerSize)
            {
                throw new ArgumentException(String.Format("The size of negative sampling('{0}') cannot be greater than the hidden layer size('{1}').", NegativeSampleSize, hiddenLayerSize));
            }
        }
Exemple #3
0
 public RNNEncoder(ModelSetting modelSetting)
 {
     m_modelSetting = modelSetting;
 }
 public DropoutLayer(int hiddenLayerSize, ModelSetting modelSetting) : base(hiddenLayerSize)
 {
     rnd            = new Random();
     m_modelSetting = modelSetting;
 }
Exemple #5
0
        private static void Train()
        {
            Logger.LogFile = "RNNSharpConsole.log";

            if (File.Exists(strTagFile) == false)
            {
                Logger.WriteLine(Logger.Level.err, "FAILED: The tag mapping file {0} isn't existed.", strTagFile);
                UsageTrain();
                return;
            }

            //Load tag id and its name from file
            TagSet tagSet = new TagSet(strTagFile);

            //Create configuration instance and set parameters
            ModelSetting RNNConfig = new ModelSetting();
            RNNConfig.ModelFile = strModelFile;
            RNNConfig.NumHidden = layersize;
            RNNConfig.IsCRFTraining = (iCRF == 1) ? true : false;
            RNNConfig.ModelDirection = iDir;
            RNNConfig.ModelType = modelType;
            RNNConfig.MaxIteration = maxIter;
            RNNConfig.SaveStep = savestep;
            RNNConfig.LearningRate = alpha;
            RNNConfig.Dropout = dropout;
            RNNConfig.Bptt = bptt;

            //Dump RNN setting on console
            RNNConfig.DumpSetting();

            if (File.Exists(strFeatureConfigFile) == false)
            {
                Logger.WriteLine(Logger.Level.err, "FAILED: The feature configuration file {0} doesn't exist.", strFeatureConfigFile);
                UsageTrain();
                return;
            }
            //Create feature extractors and load word embedding data from file
            Featurizer featurizer = new Featurizer(strFeatureConfigFile, tagSet);
            featurizer.ShowFeatureSize();

            if (featurizer.IsRunTimeFeatureUsed() == true && iDir == 1)
            {
                Logger.WriteLine(Logger.Level.err, "FAILED: Run time feature is not available for bi-directional RNN model.");
                UsageTrain();
                return;
            }

            if (File.Exists(strTrainFile) == false)
            {
                Logger.WriteLine(Logger.Level.err, "FAILED: The training corpus doesn't exist.");
                UsageTrain();
                return;
            }

            if (File.Exists(strValidFile) == false)
            {
                Logger.WriteLine(Logger.Level.err, "FAILED: The validation corpus doesn't exist.");
                UsageTrain();
                return;
            }

            //Create RNN encoder and save necessary parameters
            RNNEncoder encoder = new RNNEncoder(RNNConfig);

            //LoadFeatureConfig training corpus and extract feature set
            encoder.TrainingSet = new DataSet(tagSet.GetSize());
            LoadDataset(strTrainFile, featurizer, encoder.TrainingSet);

            //LoadFeatureConfig validated corpus and extract feature set
            encoder.ValidationSet = new DataSet(tagSet.GetSize());
            LoadDataset(strValidFile, featurizer, encoder.ValidationSet);

            if (iCRF == 1)
            {
                Logger.WriteLine(Logger.Level.info, "Initialize output tag bigram transition probability...");
                //Build tag bigram transition matrix
                encoder.TrainingSet.BuildLabelBigramTransition();
            }

            //Start to train the model
            encoder.Train();

        }
Exemple #6
0
 public RNNEncoder(ModelSetting modelSetting, Config featurizer)
 {
     ModelSettings   = modelSetting;
     this.featurizer = featurizer;
 }
Exemple #7
0
 public RNNEncoder(ModelSetting modelSetting)
 {
     m_modelSetting = modelSetting;
 }
Exemple #8
0
        private static void Train()
        {
            if (File.Exists(strTagFile) == false)
            {
                Console.WriteLine("FAILED: The tag mapping file {0} isn't existed.", strTagFile);
                UsageTrain();
                return;
            }

            //Load tag id and its name from file
            TagSet tagSet = new TagSet(strTagFile);

            //Create configuration instance and set parameters
            ModelSetting RNNConfig = new ModelSetting();
            RNNConfig.SetModelFile(strModelFile);
            RNNConfig.SetNumHidden(layersize);
            RNNConfig.SetCRFTraining((iCRF == 1) ? true : false);
            RNNConfig.SetDir(iDir);
            RNNConfig.SetModelType(modelType);
            RNNConfig.SetMaxIteration(maxIter);
            RNNConfig.SetSaveStep(savestep);
            RNNConfig.SetLearningRate(alpha);
            RNNConfig.SetRegularization(beta);
            RNNConfig.SetBptt(bptt);

            //Dump RNN setting on console
            RNNConfig.DumpSetting();

            if (File.Exists(strFeatureConfigFile) == false)
            {
                Console.WriteLine("FAILED: The feature configuration file {0} isn't existed.", strFeatureConfigFile);
                UsageTrain();
                return;
            }
            //Create feature extractors and load word embedding data from file
            Featurizer featurizer = new Featurizer(strFeatureConfigFile, tagSet);
            featurizer.ShowFeatureSize();

            if (File.Exists(strTrainFile) == false)
            {
                Console.WriteLine("FAILED: The training corpus {0} isn't existed.", strTrainFile);
                UsageTrain();
                return;
            }

            //LoadFeatureConfig training corpus and extract feature set
            DataSet dataSetTrain = new DataSet(tagSet.GetSize());
            LoadDataset(strTrainFile, featurizer, dataSetTrain);

            if (File.Exists(strValidFile) == false)
            {
                Console.WriteLine("FAILED: The validated corpus {0} isn't existed.", strValidFile);
                UsageTrain();
                return;
            }

            //LoadFeatureConfig validated corpus and extract feature set
            DataSet dataSetValidation = new DataSet(tagSet.GetSize());
            LoadDataset(strValidFile, featurizer, dataSetValidation);

            //Create RNN encoder and save necessary parameters
            RNNEncoder encoder = new RNNEncoder(RNNConfig);
            encoder.SetTrainingSet(dataSetTrain);
            encoder.SetValidationSet(dataSetValidation);

            if (iCRF == 1)
            {
                Console.WriteLine("Initialize output tag bigram transition probability...");
                //Build tag bigram transition matrix
                dataSetTrain.BuildLabelBigramTransition();
                encoder.SetLabelBigramTransition(dataSetTrain.GetLabelBigramTransition());
            }

            //Start to train the model
            encoder.Train();
        }
Exemple #9
0
 public LSTMLayer(int layersize, ModelSetting modelsetting) : base(layersize)
 {
     LayerSize = layersize;
     AllocateMemoryForLSTMCells();
 }
Exemple #10
0
 public RNNEncoder(ModelSetting modelSetting)
 {
     ModelSettings = modelSetting;
 }
        public NCEOutputLayer(int hiddenLayerSize, ModelSetting modelSetting) : base(hiddenLayerSize)
        {
            Logger.WriteLine("Initializing NCEOutputLayer...");
            m_modelSetting     = modelSetting;
            NegativeSampleSize = m_modelSetting.NCESampleSize;
            Logger.WriteLine("Hidden layer size: {0}", hiddenLayerSize);
            Logger.WriteLine("Negative sampling size: {0}", NegativeSampleSize);

            if (NegativeSampleSize > hiddenLayerSize)
            {
                throw new ArgumentException(String.Format("The size of negative sampling('{0}') cannot be greater than the hidden layer size('{1}').", NegativeSampleSize, hiddenLayerSize));
            }

            long train_words = 0;

            vocab_size = 0;
            Dictionary <int, int> tagId2Freq = new Dictionary <int, int>();

            foreach (Sequence seq in m_modelSetting.TrainDataSet.SequenceList)
            {
                foreach (State state in seq.States)
                {
                    if (tagId2Freq.ContainsKey(state.Label) == false)
                    {
                        tagId2Freq.Add(state.Label, 0);
                    }
                    tagId2Freq[state.Label]++;
                    train_words++;
                }
            }

            vocab_size = tagId2Freq.Keys.Count;
            Logger.WriteLine("Vocabulary size: {0}", vocab_size);
            Logger.WriteLine("Training words in total: {0}", train_words);

            accFreqTable  = new long[vocab_size];
            accTagIdTable = new int[vocab_size];
            accTotalFreq  = 0;

            //Keep accTotalFreq is less than int.MaxValue
            int accFactor = 1 + (int)(train_words / int.MaxValue);

            SortedDictionary <int, List <int> > freq2TagIdList = new SortedDictionary <int, List <int> >();

            foreach (KeyValuePair <int, int> pair in tagId2Freq)
            {
                if (freq2TagIdList.ContainsKey(pair.Value) == false)
                {
                    freq2TagIdList.Add(pair.Value, new List <int>());
                }
                freq2TagIdList[pair.Value].Add(pair.Key);
            }

            int i = 0;

            foreach (KeyValuePair <int, List <int> > pair in freq2TagIdList.Reverse())
            {
                foreach (int tagId in pair.Value)
                {
                    accTotalFreq    += (pair.Key / accFactor);
                    accFreqTable[i]  = accTotalFreq;
                    accTagIdTable[i] = tagId;
                    i++;
                }
            }
        }