Exemplo n.º 1
0
        public MLPVar(Config conf, int nInput, int nOuput, OptBase opt) : base(conf, nInput, nOuput, opt)
        {
            Layers = new DenseUnit[Conf.Layers];

            for (var i = 0; i < Layers.Length; i++)
            {
                if (i == 0 && i == Conf.Layers - 1)
                {
                    Layers[i] = new DenseUnit(this, InDim, OutDim);
                }
                else if (i == 0)
                {
                    Layers[i] = new DenseMaskedUnit(this, InDim, HidDim);
                }
                else if (i == Conf.Layers - 1)
                {
                    Layers[i] = new DenseUnit(this, HidDim, OutDim);
                }
                else
                {
                    Layers[i] = new DenseMaskedUnit(this, HidDim, HidDim);
                }
            }
            _top = conf.TrainTop;
        }
Exemplo n.º 2
0
Arquivo: MLP.cs Projeto: qicny/meProp
        public MLP(Config conf, int nInput, OptBase opt) : base(conf, nInput, opt)
        {
            var r       = new RNG(conf.RandomSeed);
            var layer   = conf.Layer;
            var nOutput = conf.LabelCount;
            var nHidden = conf.HiddenSize;

            _h = new DenseUnit[conf.Layer];
            if (conf.Layer == 1)
            {
                _h[0] = new DenseUnit(this, nInput, nOutput, r);
            }
            else
            {
                for (var i = 0; i < conf.Layer; i++)
                {
                    if (i == 0)
                    {
                        _h[i] = new DenseUnit(this, nInput, nHidden, r);
                    }
                    else if (i == layer - 1)
                    {
                        _h[i] = new DenseUnit(this, nHidden, nOutput, r);
                    }
                    else
                    {
                        _h[i] = new DenseUnit(this, nHidden, nHidden, r);
                    }
                }
            }
            keep = 1.0 - conf.DropProb;
        }
Exemplo n.º 3
0
        public void Recover(Config conf, DataSet train)
        {
            _train = train;
            _conf  = conf;
            FixedParams.AddRange(_surface.SubmitParameters());
            FixedParams.AddRange(_act.SubmitParameters());
            FixedParams.AddRange(_label.SubmitParameters());
            FixedParams.AddRange(_actOutput.SubmitParameters());
            FixedParams.AddRange(_labelOutput.SubmitParameters());
            FixedParams.Add(_non);
            AllParams.AddRange(FixedParams);
            AllParams.AddRange(_formemb);
            AllParams.AddRange(_posemb);
            switch (_conf.OptType)
            {
            case OptType.sgd:
                _opt = new Sgd(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.DecayRate,
                               train.Count);
                break;

            case OptType.adagrad:
                _opt = new AdaGrad(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.Eps);
                break;

            case OptType.adam:
                _opt = new Adam(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, 0.999f, 0.9f, 1e-8f);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(_conf.OptType), "unknown opt type");
            }
        }
Exemplo n.º 4
0
 protected NetBase(Config conf, int idim, OptBase opt)
 {
     _opt       = opt;
     InDim      = idim;
     OutDim     = conf.LabelCount;
     HidDim     = conf.HiddenSize;
     FixedParam = new List <Tensor>();
     _seed      = conf.RandomSeed;
 }
Exemplo n.º 5
0
        public MLP(Config conf, DataSet train)
        {
            _train     = train;
            _conf      = conf;
            _rng       = new RandomNumberGenerator(conf.Seed);
            _rngChoice = new RandomNumberGenerator(conf.Seed);
            _formemb   = File.Exists(conf.EmbedFile)
                ? InitEmbed(conf.EmbedFile, train.Form, conf.InitRange)
                : InitEmbed(train.Form, conf.InitRange);
            _posemb      = InitEmbed(train.PosTag, conf.InitRange);
            _non         = new Tensor(1, conf.HiddenSize * 2, true);
            _surface     = new BiLstmUnit(conf.EmbeddingSize * 2, conf.HiddenSize);
            _act         = new DenseUnit(conf.HiddenSize * 6, conf.HiddenSize);
            _actOutput   = new DenseUnit(conf.HiddenSize, 3);
            _label       = new DenseUnit(conf.HiddenSize * 6, conf.HiddenSize);
            _labelOutput = new DenseUnit(conf.HiddenSize * 3, train.DepLabel.Count);
            FixedParams.AddRange(_surface.SubmitParameters());
            FixedParams.AddRange(_act.SubmitParameters());
            FixedParams.AddRange(_label.SubmitParameters());
            FixedParams.AddRange(_actOutput.SubmitParameters());
            FixedParams.AddRange(_labelOutput.SubmitParameters());
            FixedParams.Add(_non);
            AllParams.AddRange(FixedParams);
            AllParams.AddRange(_formemb);
            AllParams.AddRange(_posemb);


            _surface.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next);
            _act.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next);
            _label.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next);
            _actOutput.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next);
            _labelOutput.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next);

            switch (conf.OptType)
            {
            case OptType.sgd:
                _opt = new Sgd(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, conf.DecayRate,
                               train.Count);
                break;

            case OptType.adagrad:
                _opt = new AdaGrad(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, conf.Eps);
                break;

            case OptType.adam:
                _opt = new Adam(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, 0.999f, 0.9f, 1e-8f);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(conf.OptType), "unknown opt type");
            }
            _opt.Prepare(AllParams);

            G.SetRng(conf.Seed);
        }
Exemplo n.º 6
0
        private bool _isTraining;                   // the state of the current model; if false, no gradient operation will be recorded


        protected NetBase(Config conf, int idim, int odim, OptBase opt)
        {
            _opt        = opt;
            Conf        = conf;
            InDim       = idim;
            OutDim      = odim;
            HidDim      = Conf.HiddenSize;
            FixedParam  = new List <Tensor>();
            Rand        = new RandomNumberGenerator(conf.RandomSeed);
            _isTraining = false;
        }
Exemplo n.º 7
0
        private void InitNet(NetType netType, OptType optType)
        {
            switch (optType)
            {
            case OptType.sgd:
                Opt = new Sgd(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.DecayRate,
                              _dataset.Examples.Count);
                break;

            case OptType.adagrad:
                Opt = new AdaGrad(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.Eps);
                break;

            case OptType.adam:
                Opt = new Adam(l2RegFactor: _conf.L2RegFactor, clipRange: _conf.ClipBound);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(optType), optType, null);
            }

            var eDim = _dataset.Examples[0].Feature.Length;
            var hDim = _conf.HiddenSize;
            var oDim = _conf.LabelCount;

            switch (netType)
            {
            case NetType.mlp:
                Net = new MLP(_conf, eDim, Opt);
                break;

            case NetType.mlptop:
                Net = new MLPTop(_conf, eDim, Opt);
                break;

            case NetType.mlprand:
                Net = new MLPRand(_conf, eDim, Opt);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(netType), netType, null);
            }
        }
Exemplo n.º 8
0
        private void InitNet(NetType netType, OptType optType)
        {
            switch (optType)
            {
            case OptType.sgd:
                Opt = new Sgd(Global.Config.LearningRate, Global.Config.L2RegFactor, Global.Config.ClipBound, Global.Config.DecayRate,
                              _dataset.Examples.Count);
                break;

            case OptType.adagrad:
                Opt = new AdaGrad(Global.Config.LearningRate, Global.Config.L2RegFactor, Global.Config.ClipBound, Global.Config.Eps);
                break;

            case OptType.adam:
                Opt = new Adam(Global.Config.LearningRate, Global.Config.L2RegFactor, Global.Config.ClipBound, 0.999f, 0.9f, 1e-8f);
                break;;

            default:
                throw new ArgumentOutOfRangeException(nameof(optType), optType, null);
            }

            var eDim = Global.Config.TokenCount * Global.Config.EmbeddingSize;
            var hDim = Global.Config.HiddenSize;
            var oDim = _labelCount;

            switch (netType)
            {
            case NetType.ffnn:
                Net = new FFNN(eDim, hDim, oDim, Opt);
                break;

            case NetType.blstm:
                Net = new Blstm(eDim, hDim, oDim, Opt);
                break;

            default:
                throw new ArgumentOutOfRangeException(nameof(netType), netType, null);
            }

            Net.InitEmbed(_embedd);
        }
Exemplo n.º 9
0
        // build a optimizer
        private void InitOpt(OptType optType)
        {
            switch (optType)
            {
            case OptType.sgd:
                Opt = new Sgd(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.DecayRate,
                              _dataset.Examples.Count);
                break;

            case OptType.adagrad:
                Opt = new AdaGrad(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.Eps);
                break;

            case OptType.adam:
                Opt = new Adam(l2RegFactor: _conf.L2RegFactor, clipRange: _conf.ClipBound);
                break;

            default:
                // should never reach this
                throw new ArgumentOutOfRangeException(nameof(optType), optType, null);
            }
        }
Exemplo n.º 10
0
 public MLPTop(Config conf, int nInput, OptBase opt) : base(conf, nInput, opt)
 {
     _k = conf.TrainTop;
 }
Exemplo n.º 11
0
 public MLPRand(Config conf, int nInput, int nOuput, OptBase opt) : base(conf, nInput, nOuput, opt)
 {
     _k = conf.TrainTop;
 }