public void Recover(Config conf, DataSet train) { _train = train; _conf = conf; FixedParams.AddRange(_surface.SubmitParameters()); FixedParams.AddRange(_act.SubmitParameters()); FixedParams.AddRange(_label.SubmitParameters()); FixedParams.AddRange(_actOutput.SubmitParameters()); FixedParams.AddRange(_labelOutput.SubmitParameters()); FixedParams.Add(_non); AllParams.AddRange(FixedParams); AllParams.AddRange(_formemb); AllParams.AddRange(_posemb); switch (_conf.OptType) { case OptType.sgd: _opt = new Sgd(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.DecayRate, train.Count); break; case OptType.adagrad: _opt = new AdaGrad(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, _conf.Eps); break; case OptType.adam: _opt = new Adam(_conf.LearningRate, _conf.L2RegFactor, _conf.ClipBound, 0.999f, 0.9f, 1e-8f); break; default: throw new ArgumentOutOfRangeException(nameof(_conf.OptType), "unknown opt type"); } }
public MLP(Config conf, DataSet train) { _train = train; _conf = conf; _rng = new RandomNumberGenerator(conf.Seed); _rngChoice = new RandomNumberGenerator(conf.Seed); _formemb = File.Exists(conf.EmbedFile) ? InitEmbed(conf.EmbedFile, train.Form, conf.InitRange) : InitEmbed(train.Form, conf.InitRange); _posemb = InitEmbed(train.PosTag, conf.InitRange); _non = new Tensor(1, conf.HiddenSize * 2, true); _surface = new BiLstmUnit(conf.EmbeddingSize * 2, conf.HiddenSize); _act = new DenseUnit(conf.HiddenSize * 6, conf.HiddenSize); _actOutput = new DenseUnit(conf.HiddenSize, 3); _label = new DenseUnit(conf.HiddenSize * 6, conf.HiddenSize); _labelOutput = new DenseUnit(conf.HiddenSize * 3, train.DepLabel.Count); FixedParams.AddRange(_surface.SubmitParameters()); FixedParams.AddRange(_act.SubmitParameters()); FixedParams.AddRange(_label.SubmitParameters()); FixedParams.AddRange(_actOutput.SubmitParameters()); FixedParams.AddRange(_labelOutput.SubmitParameters()); FixedParams.Add(_non); AllParams.AddRange(FixedParams); AllParams.AddRange(_formemb); AllParams.AddRange(_posemb); _surface.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next); _act.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next); _label.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next); _actOutput.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next); _labelOutput.Init((fin, fout) => new GlorotNormalInit(fin, fout, _rng).Next); switch (conf.OptType) { case OptType.sgd: _opt = new Sgd(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, conf.DecayRate, train.Count); break; case OptType.adagrad: _opt = new AdaGrad(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, conf.Eps); break; case OptType.adam: _opt = new Adam(conf.LearningRate, conf.L2RegFactor, conf.ClipBound, 0.999f, 0.9f, 1e-8f); break; default: throw new ArgumentOutOfRangeException(nameof(conf.OptType), "unknown opt type"); } _opt.Prepare(AllParams); G.SetRng(conf.Seed); }