/// <summary> /// Adds copies of the parameters in the list to the parameters contained in this component. /// </summary> /// <param name="_parameters"></param> private void AddParametersFromList(List <Parameter.Parameter> _parameters) { if (_parameters == null) { return; } if (_parameters.Count == 0) { return; } foreach (Parameter.Parameter p in _parameters) { if (this.ContainedParameters.ContainsKey(p.ID)) { continue; } Parameter.Parameter p_dupl = this.ContainedParameters.FirstOrDefault(x => x.Value.Name == p.Name && x.Value.Unit == p.Unit && x.Value.Propagation == p.Propagation).Value; if (p_dupl != null) { continue; // if this method was called at least once before } p.PropertyChanged += param_PropertyChanged; this.ContainedParameters.Add(p.ID, p); this.Category |= p.Category; } }
/** * <summary> Training algorithm for auto encoders. An auto encoder is a neural network which attempts to replicate its input at its output.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">Parameters of the auto encoder.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var partition = trainSet.StratifiedPartition(0.2, new Random(parameters.GetSeed())); model = new AutoEncoderModel(partition.Get(1), partition.Get(0), (MultiLayerPerceptronParameter)parameters); }
/** * <summary> Training algorithm for deep network classifier.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">Parameters of the deep network algorithm. crossValidationRatio and seed are used as parameters.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var partition = trainSet.StratifiedPartition( ((DeepNetworkParameter)parameters).GetCrossValidationRatio(), new Random(parameters.GetSeed())); model = new DeepNetworkModel(partition.Get(1), partition.Get(0), (DeepNetworkParameter)parameters); }
/// <summary> /// <para>Recursive extraction of all parameters, including all sub-components.</para> /// <para>Parameters with Propagation CALC_IN (calculated from NW) are not included.</para> /// <para>They are used for cumulative values including all instances of the component.</para> /// </summary> /// <returns></returns> protected Dictionary <string, double> ExtractParameterValues() { Dictionary <string, double> param_slots = new Dictionary <string, double>(); Dictionary <long, Parameter.Parameter> flat_p_list = this.GetFlatParamsList(); foreach (var entry in flat_p_list) { Parameter.Parameter p = entry.Value; if (p == null) { continue; } if (p.Propagation == InfoFlow.CALC_IN) { continue; } if (param_slots.ContainsKey(p.Name)) { continue; } param_slots.Add(p.Name, p.ValueCurrent); } return(param_slots); }
/** * <summary> Training algorithm for the linear discriminant analysis classifier (Introduction to Machine Learning, Alpaydin, 2015).</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">-</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { Vector averageVector; var w0 = new Dictionary <string, double>(); var w = new Dictionary <string, Vector>(); var priorDistribution = trainSet.ClassDistribution(); var classLists = trainSet.DivideIntoClasses(); var covariance = new Matrix(trainSet.Get(0).ContinuousAttributeSize(), trainSet.Get(0).ContinuousAttributeSize()); for (var i = 0; i < classLists.Size(); i++) { averageVector = new Vector(classLists.Get(i).ContinuousAttributeAverage()); var classCovariance = classLists.Get(i).Covariance(averageVector); classCovariance.MultiplyWithConstant(classLists.Get(i).Size() - 1); covariance.Add(classCovariance); } covariance.DivideByConstant(trainSet.Size() - classLists.Size()); covariance.Inverse(); for (var i = 0; i < classLists.Size(); i++) { var ci = ((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel(); averageVector = new Vector(classLists.Get(i).ContinuousAttributeAverage()); var wi = covariance.MultiplyWithVectorFromRight(averageVector); w[ci] = wi; var w0i = -0.5 * wi.DotProduct(averageVector) + System.Math.Log(priorDistribution.GetProbability(ci)); w0[ci] = w0i; } model = new LdaModel(priorDistribution, w, w0); }
/// <summary> /// 实例化重复单元格式化器 /// </summary> /// <param name="startTagParameter">开始标签参数</param> /// <param name="endTagParameter">结束标签参数</param> /// <param name="dataSource">数据源</param> /// <param name="formatters">格式化器集合</param> public RepeaterFormatter(Parameter.Parameter startTagParameter, Parameter.Parameter endTagParameter, IEnumerable <TSource> dataSource, params EmbeddedFormatter <TSource>[] formatters) : base(dataSource, formatters) { StartTagParameter = startTagParameter; EndTagParameter = endTagParameter; }
/// <summary> /// For calculator components: To be called only once, after the first mapping as a calculator. /// </summary> internal void SaveDefaultValuesBeforeCalculation() { if (this.R2GInstances[0].InstanceParamValues.Count > 0) { return; } Dictionary <long, Parameter.Parameter> all_params = this.GetFlatParamsList(); Dictionary <string, double> all_param_values = new Dictionary <string, double>(); foreach (var entry in all_params) { Parameter.Parameter p = entry.Value; if (p == null) { continue; } if (!(all_param_values.ContainsKey(p.Name))) { all_param_values.Add(p.Name, p.ValueCurrent); } } this.R2GInstances[0].InstanceParamValues = all_param_values; }
/** * <summary> Training algorithm for the quadratic discriminant analysis classifier (Introduction to Machine Learning, Alpaydin, 2015).</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">-</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var w0 = new Dictionary <string, double>(); var w = new Dictionary <string, Vector>(); var W = new Dictionary <string, Matrix>(); var classLists = trainSet.DivideIntoClasses(); var priorDistribution = trainSet.ClassDistribution(); for (var i = 0; i < classLists.Size(); i++) { var ci = ((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel(); var averageVector = new Vector(classLists.Get(i).ContinuousAttributeAverage()); var classCovariance = classLists.Get(i).Covariance(averageVector); var determinant = classCovariance.Determinant(); classCovariance.Inverse(); var Wi = (Matrix)classCovariance.Clone(); Wi.MultiplyWithConstant(-0.5); W[ci] = Wi; var wi = classCovariance.MultiplyWithVectorFromLeft(averageVector); w[ci] = wi; var w0i = -0.5 * (wi.DotProduct(averageVector) + System.Math.Log(determinant)) + System.Math.Log(priorDistribution.GetProbability(ci)); w0[ci] = w0i; } model = new QdaModel(priorDistribution, W, w, w0); }
/// <summary> /// Extracts the setting for each parameter - to be displayed in the instance or not. /// </summary> /// <returns></returns> protected Dictionary <string, bool> ExtractParamDisplayInInstance() { Dictionary <string, bool> param_disp = new Dictionary <string, bool>(); Dictionary <long, Parameter.Parameter> flat_p_list = this.GetFlatParamsList(); foreach (var entry in flat_p_list) { Parameter.Parameter p = entry.Value; if (p == null) { continue; } if (p.Propagation == InfoFlow.CALC_IN) { continue; } if (param_disp.ContainsKey(p.Name)) { continue; } param_disp.Add(p.Name, p.ShowInCompInstDisplay); } return(param_disp); }
public Parameter.Parameter FindParameterMatchIn(List <Component.Component> _comps, out Component.Component _comp_parent) { _comp_parent = null; if (_comps == null || _comps.Count == 0) { return(null); } List <Component.Component> flat_list = Component.Component.GetFlattenedListOf(_comps); foreach (Component.Component c in flat_list) { foreach (var entry in c.ContainedParameters) { Parameter.Parameter p = entry.Value; if (p == null) { continue; } StructureNode match = this.FindMatchFor(p.ID, -1, -1, p.Name, typeof(ParameterStructure.Parameter.Parameter)); if (match != null) { _comp_parent = c; return(p); } } } return(null); }
/** * <summary> Training algorithm for the linear perceptron algorithm. 20 percent of the data is separated as cross-validation * data used for selecting the best weights. 80 percent of the data is used for training the linear perceptron with * gradient descent.</summary> * * <param name="trainSet"> Training data given to the algorithm</param> * <param name="parameters">Parameters of the linear perceptron.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var partition = trainSet.StratifiedPartition( ((LinearPerceptronParameter)parameters).GetCrossValidationRatio(), new Random(parameters.GetSeed())); model = new LinearPerceptronModel(partition.Get(1), partition.Get(0), (LinearPerceptronParameter)parameters); }
protected void RunExperiment(Classifier.Classifier classifier, Parameter.Parameter parameter, ExperimentPerformance experimentPerformance, CrossValidation <Instance.Instance> crossValidation, InstanceList.InstanceList testSet) { for (var i = 0; i < K; i++) { var trainSet = new InstanceList.InstanceList(crossValidation.GetTrainFold(i)); classifier.Train(trainSet, parameter); experimentPerformance.Add(classifier.Test(testSet)); } }
/** * <summary> Training algorithm for K-Means classifier. K-Means finds the mean of each class for training.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">distanceMetric: distance metric used to calculate the distance between two instances.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var priorDistribution = trainSet.ClassDistribution(); var classMeans = new InstanceList.InstanceList(); var classLists = trainSet.DivideIntoClasses(); for (var i = 0; i < classLists.Size(); i++) { classMeans.Add(classLists.Get(i).Average()); } model = new KMeansModel(priorDistribution, classMeans, ((KMeansParameter)parameters).GetDistanceMetric()); }
private bool ContainsValidAutoSubcomponentFor(ComponentAutoFunction _fct) { List <Parameter.Parameter> parameters_for_check = new List <Parameter.Parameter>(); switch (_fct) { case ComponentAutoFunction.CUMULATION: parameters_for_check = Parameter.Parameter.GetCumulativeParametersForInstancing(); break; default: parameters_for_check = Parameter.Parameter.GetSizeParametersForInstancing(); break; } foreach (var entry in this.ContainedComponents) { Component c = entry.Value; if (c == null) { continue; } if (!c.IsAutomaticallyGenerated) { continue; } // if (c.R2GMainState.Type != Relation2GeomType.CONTAINED_IN && c.R2GMainState.Type != Relation2GeomType.CONNECTS) continue; // check the specific parameters bool contains_all_p = false; bool missed_at_least_one = true; foreach (Parameter.Parameter p in parameters_for_check) { missed_at_least_one = false; Parameter.Parameter corresponding = c.ContainedParameters.FirstOrDefault(x => x.Value.Name == p.Name && x.Value.Unit == p.Unit && x.Value.Propagation == p.Propagation).Value; if (corresponding == null) { missed_at_least_one = true; break; } } contains_all_p = !missed_at_least_one; if (contains_all_p) { return(true); } } return(false); }
/** * <summary> Bagging bootstrap ensemble method that creates individuals for its ensemble by training each classifier on a random * redistribution of the training set. * This training method is for a bagged decision tree classifier. 20 percent of the instances are left aside for pruning of the trees * 80 percent of the instances are used for training the trees. The number of trees (forestSize) is a parameter, and basically * the method will learn an ensemble of trees as a model.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">Parameters of the bagging trees algorithm. ensembleSize returns the number of trees in the bagged forest.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var forestSize = ((BaggingParameter)parameters).GetEnsembleSize(); var forest = new List <DecisionTree>(); for (var i = 0; i < forestSize; i++) { var bootstrap = trainSet.Bootstrap(i); var tree = new DecisionTree(new DecisionNode(new InstanceList.InstanceList(bootstrap.GetSample()), null, null, false)); forest.Add(tree); } model = new TreeEnsembleModel(forest); }
/** * <summary> Training algorithm for Naive Bayes algorithm. It basically calls trainContinuousVersion for continuous data sets, * trainDiscreteVersion for discrete data sets.</summary> * <param name="trainSet">Training data given to the algorithm</param> * <param name="parameters">-</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { var priorDistribution = trainSet.ClassDistribution(); var classLists = trainSet.DivideIntoClasses(); if (classLists.Get(0).Get(0).GetAttribute(0) is DiscreteAttribute) { TrainDiscreteVersion(priorDistribution, classLists); } else { TrainContinuousVersion(priorDistribution, classLists); } }
/** * <summary> Training algorithm for C4.5 univariate decision tree classifier. 20 percent of the data are left aside for pruning * 80 percent of the data is used for constructing the tree.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">-</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { DecisionTree tree; if (((C45Parameter)parameters).IsPrune()) { var partition = trainSet.StratifiedPartition( ((C45Parameter)parameters).GetCrossValidationRatio(), new Random(parameters.GetSeed())); tree = new DecisionTree(new DecisionNode(partition.Get(1), null, null, false)); tree.Prune(partition.Get(0)); } else { tree = new DecisionTree(new DecisionNode(trainSet, null, null, false)); } model = tree; }
/// <summary> /// <para>Returns the value, as a formatted string, in the parameter slot of the instance placed in the NW element.</para> /// <para>The parameter can be contained in this component or ANY of its sub-components.</para> /// </summary> /// <param name="_container"></param> /// <param name="_param_suffix"></param> /// <returns></returns> internal string GetParamValueOfInstance(FlNetElement _container, string _param_suffix) { if (_container == null) { return(string.Empty); } if (string.IsNullOrEmpty(_param_suffix)) { return(string.Empty); } Parameter.Parameter p = this.GetFirstParamBySuffix(_param_suffix); if (p == null) { return(string.Empty); } string info = string.Empty; foreach (GeometricRelationship gr in this.R2GInstances) { if (gr.InstanceNWElementID != _container.ID) { continue; } if (gr.InstanceParamValues == null || gr.InstanceParamValues.Count == 0) { this.UpdateInstanceIn(_container, new Point(0, 0), false); } if (gr.InstanceParamValues.ContainsKey(p.Name)) { info = Parameter.Parameter.ValueToString(gr.InstanceParamValues[p.Name], "F2"); } return(info); } return(info); }
protected static StructureNode CreateFrom(Parameter.Parameter _node_source, StructureNode _sn_parent) { if (_node_source == null) { return(null); } // a parameter node cannot be w/o parent component if (_sn_parent == null) { return(null); } if (!_sn_parent.ContentType_Used || _sn_parent.ContentType == null) { return(null); } if (_sn_parent.ContentType != typeof(Component.Component)) { return(null); } // create the node StructureNode node = new StructureNode(); // content node.IDAsLong = _node_source.ID; node.IDAsLong_Used = true; node.IDAsString = _node_source.Name; node.IDAsString_Used = true; node.ContentType = typeof(Parameter.Parameter); node.ContentType_Used = true; // structure node.ParentNode = _sn_parent; return(node); }
/// <summary> /// 构造函数 /// </summary> /// <param name="parameter">参数</param> /// <param name="value">值</param> protected SimpleFormatter(Parameter.Parameter parameter, object value) { Parameter = parameter; Value = value; }
/** * <summary> Training algorithm for the dummy classifier. Actually dummy classifier returns the maximum occurring class in * the training data, there is no training.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">-</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { model = new DummyModel(trainSet); }
protected Performance.Performance runExperiment(Classifier.Classifier classifier, Parameter.Parameter parameter, CrossValidation <Instance.Instance> crossValidation) { var trainSet = new InstanceList.InstanceList(crossValidation.GetTrainFold(0)); var testSet = new InstanceList.InstanceList(crossValidation.GetTestFold(0)); return(classifier.SingleRun(parameter, trainSet, testSet)); }
protected void UpdateCumulativeValuesFromInstances() { Parameter.Parameter p_L_min_total = this.GetFirstParamByName(Parameter.Parameter.RP_LENGTH_MIN_TOTAL); Parameter.Parameter p_L_max_total = this.GetFirstParamByName(Parameter.Parameter.RP_LENGTH_MAX_TOTAL); Parameter.Parameter p_A_min_total = this.GetFirstParamByName(Parameter.Parameter.RP_AREA_MIN_TOTAL); Parameter.Parameter p_A_max_total = this.GetFirstParamByName(Parameter.Parameter.RP_AREA_MAX_TOTAL); Parameter.Parameter p_V_min_total = this.GetFirstParamByName(Parameter.Parameter.RP_VOLUME_MIN_TOTAL); Parameter.Parameter p_V_max_total = this.GetFirstParamByName(Parameter.Parameter.RP_VOLUME_MAX_TOTAL); Parameter.Parameter p_Count_total = this.GetFirstParamByName(Parameter.Parameter.RP_COUNT); double p_L_min_total_value = 0; double p_L_max_total_value = 0; double p_A_min_total_value = 0; double p_A_max_total_value = 0; double p_V_min_total_value = 0; double p_V_max_total_value = 0; foreach (GeometricRelationship gr in this.R2GInstances) { if (gr.InstanceSize == null || gr.InstanceSize.Count < 6) { continue; } if (gr.InstanceSize[0] == 0) { continue; } if (p_L_min_total != null) { p_L_min_total_value += gr.InstanceSize[2]; } if (p_L_max_total != null) { p_L_max_total_value += gr.InstanceSize[5]; } if (p_A_min_total != null) { p_A_min_total_value += gr.InstanceSize[0] * gr.InstanceSize[1]; } if (p_A_max_total != null) { p_A_max_total_value += gr.InstanceSize[3] * gr.InstanceSize[4]; } if (p_V_min_total != null) { p_V_min_total_value += gr.InstanceSize[0] * gr.InstanceSize[1] * gr.InstanceSize[2]; } if (p_V_max_total != null) { p_V_max_total_value += gr.InstanceSize[3] * gr.InstanceSize[4] * gr.InstanceSize[5]; } } if (p_L_min_total != null) { p_L_min_total.ValueCurrent = p_L_min_total_value; } if (p_L_max_total != null) { p_L_max_total.ValueCurrent = p_L_max_total_value; } if (p_A_min_total != null) { p_A_min_total.ValueCurrent = p_A_min_total_value; } if (p_A_max_total != null) { p_A_max_total.ValueCurrent = p_A_max_total_value; } if (p_V_min_total != null) { p_V_min_total.ValueCurrent = p_V_min_total_value; } if (p_V_max_total != null) { p_V_max_total.ValueCurrent = p_V_max_total_value; } if (p_Count_total != null) { p_Count_total.ValueCurrent = this.R2GInstances.Count; } }
private void CalculateAndMap(Dictionary <long, double> _input_values, ref Dictionary <long, double> _output_values) { // check the applicability of the value lists if (_input_values == null || _output_values == null) { return; } if (_input_values.Count == 0 || _output_values.Count == 0) { return; } Dictionary <long, Parameter.Parameter> all_params = this.GetFlatParamsList(); foreach (var entry in _input_values) { if (!all_params.ContainsKey(entry.Key)) { return; } Parameter.Parameter p = all_params[entry.Key]; if (p == null) { return; } if (p.Propagation == InfoFlow.OUPUT) { return; } } foreach (var entry in _output_values) { if (!all_params.ContainsKey(entry.Key)) { return; } Parameter.Parameter p = all_params[entry.Key]; if (p == null) { return; } if (p.Propagation == InfoFlow.INPUT || p.Propagation == InfoFlow.CALC_IN || p.Propagation == InfoFlow.REF_IN) { return; } } // apply input values: foreach (var entry in _input_values) { all_params[entry.Key].ValueCurrent = entry.Value; } // calculate this.ExecuteAllCalculationChains(); // apply output values: Dictionary <long, double> results = new Dictionary <long, double>(); foreach (var entry in _output_values) { results.Add(entry.Key, all_params[entry.Key].ValueCurrent); } foreach (var entry in results) { _output_values[entry.Key] = entry.Value; } }
public abstract void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters);
/** * <summary> Runs current classifier with the given train and test data.</summary> * * <param name="parameter">Parameter of the classifier to be trained.</param> * <param name="trainSet"> Training data to be used in training the classifier.</param> * <param name="testSet"> Test data to be tested after training the model.</param> * <returns>The accuracy (and error) of the trained model as an instance of Performance class.</returns> */ public Performance.Performance SingleRun(Parameter.Parameter parameter, InstanceList.InstanceList trainSet, InstanceList.InstanceList testSet) { Train(trainSet, parameter); return(Test(testSet)); }
/** * <summary> Training algorithm for K-nearest neighbor classifier.</summary> * * <param name="trainSet"> Training data given to the algorithm.</param> * <param name="parameters">K: k parameter of the K-nearest neighbor algorithm * distanceMetric: distance metric used to calculate the distance between two instances.</param> */ public override void Train(InstanceList.InstanceList trainSet, Parameter.Parameter parameters) { model = new KnnModel(trainSet, ((KnnParameter)parameters).GetK(), ((KnnParameter)parameters).GetDistanceMetric()); }
/// <summary> /// 实例化表格格式化器 /// </summary> /// <param name="tagParameter">标签参数</param> /// <param name="dataSource">数据源</param> /// <param name="formatters">格式化器集合</param> public TableFormatter(Parameter.Parameter tagParameter, IEnumerable <TSource> dataSource, params EmbeddedFormatter <TSource>[] formatters) : base(dataSource, formatters) { TagParameter = tagParameter; }
/// <summary> /// 实例化部分格式化器 /// </summary> /// <param name="parameter">参数</param> /// <param name="value">值</param> public PartFormatter(Parameter.Parameter parameter, object value) : base(parameter, value) { }
public static StructureNode CreateFrom(Component.Component _node_source, StructureNode _sn_parent) { if (_node_source == null) { return(null); } // create the node StructureNode node = new StructureNode(); // content node.IDAsLong = _node_source.ID; node.IDAsLong_Used = true; node.ContentType = typeof(Component.Component); node.ContentType_Used = true; // structure if (_sn_parent != null) { // the parent has to be a component if (!_sn_parent.ContentType_Used || _sn_parent.ContentType == null) { return(null); } if (_sn_parent.ContentType != typeof(Component.Component)) { return(null); } // no self-parenting if (_sn_parent.IDAsLong == _node_source.ID) { return(null); } node.ParentNode = _sn_parent; } foreach (var entry in _node_source.ContainedParameters) { Parameter.Parameter p = entry.Value; if (p == null) { continue; } StructureNode p_sn = StructureNode.CreateFrom(p, node); if (p_sn != null) { node.children_nodes.Add(p_sn); } } foreach (GeometricRelationship gr in _node_source.R2GInstances) { StructureNode gr_sn = StructureNode.CreateFrom(gr, node); if (gr_sn != null) { node.children_nodes.Add(gr_sn); } } // recursion foreach (var entry in _node_source.ContainedComponents) { Component.Component sC = entry.Value; if (sC == null) { continue; } StructureNode sC_sn = StructureNode.CreateFrom(sC, node); if (sC_sn != null) { node.children_nodes.Add(sC_sn); } } return(node); }