public LevenbergMarquardt(objective_func obj_func, List <double> inputs, List <Value> modelParams, model_func model, model_func model_jac, double lambda = 0.001, double obj_error = 0.00001, int max_iter = 10000, int rnd_seed = 0) { if (inputs.Count == 0) { throw new ApplicationException("Number of input data must be > 0"); } if (modelParams.Count == 0) { throw new ApplicationException("Number of model parameters must be > 0"); } _obj_func = obj_func; _model = model; _jac_func = model_jac; _lambda = lambda; _max_iter = max_iter; _inputs = new NRealMatrix(inputs.Count, 1); _inputs.SetArray((from input in inputs select new NDouble[] { new NDouble(input) }).ToArray()); _outputs = _obj_func(_inputs); _modelParams = modelParams; // initalize the weights with normal random distibution var seed = new MLapack.MCJIMatrix(4, 1); var rndSeed = rnd_seed == 0 ? 321 : rnd_seed; seed.setAt(0, 0, rndSeed); seed.setAt(1, 0, rndSeed); seed.setAt(2, 0, rndSeed); seed.setAt(3, 0, rndSeed); // check if a guess has been provided bool modelParamInitialized = false; foreach (var weight in modelParams) { if (weight.X != 0) { modelParamInitialized = true; } } if (modelParamInitialized) { _weights = new NRealMatrix(1, modelParams.Count); _weights.SetArray(new NDouble[][] { (from param in modelParams select new NDouble(param.X)).ToArray() }); } else { _weights = LapackLib.Instance.RandomMatrix(RandomDistributionType.Uniform_0_1, seed, 1, modelParams.Count); for (int idxWeight = 0; idxWeight < _weights.Columns; idxWeight++) { _weights[0, idxWeight] = (_weights[0, idxWeight] * 2.0 - 1.0) / Math.Sqrt(inputs.Count); } } _obj_error = obj_error; _error = calcError(_weights); _totalError = calcTotalError(_error); _startError = _totalError; }
public LevenbergMarquardt(objective_func obj_func, List<double> inputs, List<Value> modelParams, model_func model, model_func model_jac, double lambda = 0.001, double obj_error = 0.00001, int max_iter = 10000, int rnd_seed = 0) { if (inputs.Count == 0) throw new ApplicationException("Number of input data must be > 0"); if (modelParams.Count == 0) throw new ApplicationException("Number of model parameters must be > 0"); _obj_func = obj_func; _model = model; _jac_func = model_jac; _lambda = lambda; _max_iter = max_iter; _inputs = new NRealMatrix(inputs.Count, 1); _inputs.SetArray((from input in inputs select new NDouble[] { new NDouble(input) } ).ToArray()); _outputs = _obj_func(_inputs); _modelParams = modelParams; // initalize the weights with normal random distibution var seed = new MLapack.MCJIMatrix(4,1); var rndSeed = rnd_seed == 0 ? 321 : rnd_seed; seed.setAt(0, 0, rndSeed); seed.setAt(1, 0, rndSeed); seed.setAt(2, 0, rndSeed); seed.setAt(3, 0, rndSeed); // check if a guess has been provided bool modelParamInitialized = false; foreach (var weight in modelParams) { if (weight.X != 0) modelParamInitialized = true; } if (modelParamInitialized) { _weights = new NRealMatrix(1, modelParams.Count); _weights.SetArray(new NDouble[][] {(from param in modelParams select new NDouble(param.X)).ToArray() }); } else { _weights = LapackLib.Instance.RandomMatrix(RandomDistributionType.Uniform_0_1, seed, 1, modelParams.Count); for (int idxWeight = 0; idxWeight < _weights.Columns; idxWeight++) _weights[0, idxWeight] = (_weights[0, idxWeight] * 2.0 - 1.0) / Math.Sqrt(inputs.Count); } _obj_error = obj_error; _error = calcError(_weights); _totalError = calcTotalError(_error); _startError = _totalError; }