/// <summary> /// Returns a new settings object with the settings specified in the file as key value pair. Settings not beeing specified in this file will have the default value. /// </summary> /// <param name="settingLocation">Full qualified name of the settings file.</param> /// <returns>A settings object with the values specified in the file.</returns> public static ML_Settings readSettingsFromFile(string settingLocation) { ML_Settings mls = new ML_Settings(); if (System.IO.File.Exists(settingLocation) == false) { GlobalState.logError.logLine("Could not load ML settings file! File (" + settingLocation + ") does not exit."); return(mls); } System.IO.StreamReader file = new System.IO.StreamReader(settingLocation); string line; while ((line = file.ReadLine()) != null) { string[] nameAndValue = line.Split(new char[] { ' ' }, 2); if (!mls.setSetting(nameAndValue[0], nameAndValue[1])) { GlobalState.logError.logLine("MlSetting " + nameAndValue[0] + " not found!"); } } file.Close(); if (GlobalState.varModel != null && mls.blacklisted.Count > 0) { mls.checkAndCleanBlacklisted(); } return(mls); }
private void addMlSettingsBoxContent() { MachineLearning.Learning.ML_Settings settingsObject = new MachineLearning.Learning.ML_Settings(); FieldInfo[] fields = settingsObject.GetType().GetFields(); for (int i = 0; i < fields.Length; i++) { Label l = new Label(); mlSettingsPanel.Controls.Add(l); l.AutoSize = true; l.Location = new System.Drawing.Point(5, 5 + ML_FIELDS_OFFSET * i); l.Name = fields[i].Name + "_label"; l.Size = new System.Drawing.Size(35, 15); l.TabIndex = i * 2; l.Text = fields[i].Name; TextBox t = new TextBox(); mlSettingsPanel.Controls.Add(t); t.Location = new System.Drawing.Point(150, 5 + ML_FIELDS_OFFSET * i); t.Name = fields[i].Name + "_textBox"; t.Size = new System.Drawing.Size(150, 20); t.TabIndex = i * 2 + 1; t.Text = fields[i].GetValue(settingsObject).ToString(); } }
private void AddMlSetting_Click(object sender, EventArgs e) { MachineLearning.Learning.ML_Settings setting = new MachineLearning.Learning.ML_Settings(); foreach (Control c in mlSettingsPanel.Controls) { if (c.Name.EndsWith("_textBox")) { string fieldName = c.Name.Substring(0, c.Name.Length - "_textBox".Length); setting.setSetting(fieldName, ((TextBox)c).Text); } } addedElementsList.Items.Add(new Container(CONTAINERKEY_MLSETTING, setting)); }
/// <summary> /// Returns a new settings object with the settings specified in the file as key value pair. Settings not beeing specified in this file will have the default value. /// </summary> /// <param name="settings">All settings to be changed in a string with whitespaces as separator .</param> /// <returns>A settings object with the values specified in the file.</returns> public static ML_Settings readSettings(string settings) { ML_Settings mls = new ML_Settings(); String[] settingArray = settings.Split(' '); for (int i = 0; i < settingArray.Length; i++) { string[] nameAndValue = settingArray[i].Split(new char[] { ':' }, 2); if (!mls.setSetting(nameAndValue[0], nameAndValue[1])) { GlobalState.logError.log("MlSetting " + nameAndValue[0] + " not found!"); } } return(mls); }
/// <summary> /// Returns a new settings object with the settings specified in the file as key value pair. Settings not beeing specified in this file will have the default value. /// </summary> /// <param name="settings">All settings to be changed in a string with whitespaces as separator .</param> /// <returns>A settings object with the values specified in the file.</returns> public static ML_Settings readSettings(string settings) { settings = settings.Trim(); settings = settings.Replace(System.Environment.NewLine, ""); ML_Settings mls = new ML_Settings(); String[] settingArray = settings.Split(' '); for (int i = 0; i < settingArray.Length; i++) { string[] nameAndValue = settingArray[i].Split(new char[] { ':' }, 2); if (!mls.setSetting(nameAndValue[0], nameAndValue[1])) { GlobalState.logError.logLine("MlSetting " + nameAndValue[0] + " not found!"); } } if (GlobalState.varModel != null && mls.blacklisted.Count > 0) { mls.checkAndCleanBlacklisted(); } return(mls); }
/// <summary> /// Clears the binary and numeric selections and the machine learning settings stored in this object. /// </summary> public void clear() { mlSettings = new ML_Settings(); clearSampling(); }
private static void defineParameterSpace(string[] parameters, Dictionary <string, List <bool> > boolSettings, Dictionary <string, List <int> > intSettings, Dictionary <string, List <double> > doubleSettings, Dictionary <string, List <LossFunction> > lossFuncInterval, Dictionary <string, List <ScoreMeasure> > scoreMeasureInterval, Dictionary <string, List <TimeSpan> > learnTimeLimitInterval) { foreach (string parameter in parameters) { //dummy int y; double x; TimeSpan z; //setting name and values that should be within the parameter space Tuple <string, string[]> nameAndValues = extractSettings(parameter); ML_Settings referenceSetting = new ML_Settings(); System.Reflection.FieldInfo fi = referenceSetting.GetType().GetField(nameAndValues.Item1); if (fi == null) { GlobalState.logInfo.logLine("Invalid variable name: " + nameAndValues.Item1 + ". This setting will be ignored."); } else if (isBool(nameAndValues.Item2[0]) && fi.FieldType.FullName.Equals("System.Boolean")) { List <bool> toAdd = new List <bool>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(toBool(value)); } boolSettings.Add(nameAndValues.Item1, toAdd); } else if (int.TryParse(nameAndValues.Item2[0], out y) && (fi.FieldType.FullName.Equals("System.Int32") || fi.FieldType.FullName.Equals("System.Int64"))) { List <int> toAdd = new List <int>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(int.Parse(value)); } intSettings.Add(nameAndValues.Item1, toAdd); } else if (Double.TryParse(nameAndValues.Item2[0], out x) && fi.FieldType.FullName.Equals("System.Double")) { List <double> toAdd = new List <double>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(Double.Parse(value, CultureInfo.InvariantCulture)); } doubleSettings.Add(nameAndValues.Item1, toAdd); } else if (isLossFunction(nameAndValues.Item2[0]) && fi.FieldType.FullName.Equals("MachineLearning.Learning.ML_Settings+LossFunction")) { List <LossFunction> toAdd = new List <LossFunction>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(toLossFunction(value)); } lossFuncInterval[nameAndValues.Item1] = toAdd; } else if (isScoreMeasure(nameAndValues.Item2[0]) && fi.FieldType.FullName.Equals("MachineLearning.Learning.ML_Settings+ScoreMeasure")) { List <ScoreMeasure> toAdd = new List <ScoreMeasure>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(toScoreMeasure(value)); } scoreMeasureInterval[nameAndValues.Item1] = toAdd; } else if (TimeSpan.TryParse(nameAndValues.Item2[0], out z) && fi.FieldType.FullName.Equals("System.TimeSpan")) { List <TimeSpan> toAdd = new List <TimeSpan>(); foreach (string value in nameAndValues.Item2) { toAdd.Add(TimeSpan.Parse(value)); } learnTimeLimitInterval[nameAndValues.Item1] = toAdd; } else { GlobalState.logInfo.logLine("Invalid setting-value pair: " + nameAndValues.Item1 + " " + string.Join(",", nameAndValues.Item2) + ". This setting will be ignored."); } } }
/// <summary> /// Performs the functionality of one command. If no functionality is found for the command, the command is retuned by this method. /// </summary> /// <param name="line">One command with its parameters.</param> /// <returns>Returns an empty string if the command could be performed by the method. If the command could not be performed by the method, the original command is returned.</returns> public string performOneCommand(string line) { GlobalState.logInfo.logLine(COMMAND + line); // remove comment part of the line (the comment starts with an #) line = line.Split(new Char[] { '#' }, 2)[0]; if (line.Length == 0) return ""; // split line in command and parameters of the command string[] components = line.Split(new Char[] { ' ' }, 2); string command = components[0]; string task = ""; if (components.Length > 1) task = components[1]; string[] taskAsParameter = task.Split(new Char[] { ' ' }); switch (command.ToLower()) { case COMMAND_START_ALLMEASUREMENTS: { InfluenceModel infMod = new InfluenceModel(GlobalState.varModel, GlobalState.currentNFP); List<Configuration> configurations_Learning = new List<Configuration>(); foreach (Configuration config in GlobalState.allMeasurements.Configurations) { if (config.nfpValues.ContainsKey(GlobalState.currentNFP)) configurations_Learning.Add(config); } if (configurations_Learning.Count == 0) { GlobalState.logInfo.logLine("The learning set is empty! Cannot start learning!"); break; } GlobalState.logInfo.logLine("Learning: " + "NumberOfConfigurationsLearning:" + configurations_Learning.Count); // prepare the machine learning exp = new MachineLearning.Learning.Regression.Learning(configurations_Learning, configurations_Learning); exp.metaModel = infMod; exp.mLsettings = this.mlSettings; exp.learn(); } break; case COMMAND_TRUEMODEL: StreamReader readModel = new StreamReader(task); String model = readModel.ReadLine().Trim(); readModel.Close(); this.trueModel = new InfluenceFunction(model.Replace(',', '.'), GlobalState.varModel); NFProperty artificalProp = new NFProperty("artificial"); GlobalState.currentNFP = artificalProp; //computeEvaluationDataSetBasedOnTrueModel(); break; case COMMAND_SUBSCRIPT: { FileInfo fi = new FileInfo(task); StreamReader reader = null; if (!fi.Exists) throw new FileNotFoundException(@"Automation script not found. ", fi.ToString()); reader = fi.OpenText(); Commands co = new Commands(); co.exp = this.exp; while (!reader.EndOfStream) { String oneLine = reader.ReadLine().Trim(); co.performOneCommand(oneLine); } } break; case COMMAND_EVALUATION_SET: { GlobalState.evalutionSet.Configurations = ConfigurationReader.readConfigurations(task, GlobalState.varModel); GlobalState.logInfo.logLine("Evaluation set loaded."); } break; case COMMAND_CLEAR_GLOBAL: SPLConqueror_Core.GlobalState.clear(); toSample.Clear(); toSampleValidation.Clear(); break; case COMMAND_CLEAR_SAMPLING: exp.clearSampling(); toSample.Clear(); toSampleValidation.Clear(); break; case COMMAND_CLEAR_LEARNING: exp.clear(); toSample.Clear(); toSampleValidation.Clear(); break; case COMMAND_LOAD_CONFIGURATIONS: GlobalState.allMeasurements.Configurations = (GlobalState.allMeasurements.Configurations.Union(ConfigurationReader.readConfigurations(task, GlobalState.varModel))).ToList(); GlobalState.logInfo.logLine(GlobalState.allMeasurements.Configurations.Count + " configurations loaded."); break; case COMMAND_SAMPLE_ALLBINARY: { if (taskAsParameter.Contains(COMMAND_VALIDATION)) { this.toSampleValidation.Add(SamplingStrategies.ALLBINARY); this.exp.info.binarySamplings_Validation = "ALLBINARY"; } else { this.toSample.Add(SamplingStrategies.ALLBINARY); this.exp.info.binarySamplings_Learning = "ALLBINARY"; } break; } case COMMAND_ANALYZE_LEARNING: {//TODO: Analyzation is not supported in the case of bagging GlobalState.logInfo.logLine("Models:"); if (this.mlSettings.bagging) { for (int i = 0; i < this.exp.models.Count; i++) { FeatureSubsetSelection learnedModel = exp.models[i]; if (learnedModel == null) { GlobalState.logError.logLine("Error... learning was not performed!"); break; } GlobalState.logInfo.logLine("Termination reason: " + learnedModel.LearningHistory.Last().terminationReason); foreach (LearningRound lr in learnedModel.LearningHistory) { double relativeError = 0; if (GlobalState.evalutionSet.Configurations.Count > 0) { double relativeErro2r = learnedModel.computeError(lr.FeatureSet, GlobalState.evalutionSet.Configurations, out relativeError); } else { double relativeErro2r = learnedModel.computeError(lr.FeatureSet, GlobalState.allMeasurements.Configurations, out relativeError); } GlobalState.logInfo.logLine(lr.ToString() + relativeError); } } } else { FeatureSubsetSelection learnedModel = exp.models[0]; if (learnedModel == null) { GlobalState.logError.logLine("Error... learning was not performed!"); break; } GlobalState.logInfo.logLine("Termination reason: " + learnedModel.LearningHistory.Last().terminationReason); foreach (LearningRound lr in learnedModel.LearningHistory) { double relativeError = 0; if (GlobalState.evalutionSet.Configurations.Count > 0) { double relativeErro2r = learnedModel.computeError(lr.FeatureSet, GlobalState.evalutionSet.Configurations, out relativeError); } else { double relativeErro2r = learnedModel.computeError(lr.FeatureSet, GlobalState.allMeasurements.Configurations, out relativeError); } GlobalState.logInfo.logLine(lr.ToString() + relativeError); } } break; } case COMMAND_EXERIMENTALDESIGN: performOneCommand_ExpDesign(task); break; case COMMAND_SAMPLING_OPTIONORDER: parseOptionOrder(task); break; case COMMAND_VARIABILITYMODEL: GlobalState.varModel = VariabilityModel.loadFromXML(task); if (GlobalState.varModel == null) GlobalState.logError.logLine("No variability model found at " + task); break; case COMMAND_SET_NFP: GlobalState.currentNFP = GlobalState.getOrCreateProperty(task.Trim()); break; case COMMAND_SAMPLE_OPTIONWISE: if (taskAsParameter.Contains(COMMAND_VALIDATION)) { this.toSampleValidation.Add(SamplingStrategies.OPTIONWISE); this.exp.info.binarySamplings_Validation = "OPTIONSWISE"; } else { this.toSample.Add(SamplingStrategies.OPTIONWISE); this.exp.info.binarySamplings_Learning = "OPTIONSWISE"; } break; case COMMAND_LOG: string location = task.Trim(); GlobalState.logInfo.close(); GlobalState.logInfo = new InfoLogger(location); GlobalState.logError.close(); GlobalState.logError = new ErrorLogger(location + "_error"); break; case COMMAND_SET_MLSETTING: this.mlSettings = ML_Settings.readSettings(task); break; case COMMAND_LOAD_MLSETTINGS: this.mlSettings = ML_Settings.readSettingsFromFile(task); break; case COMMAND_SAMPLE_PAIRWISE: if (taskAsParameter.Contains(COMMAND_VALIDATION)) { this.toSampleValidation.Add(SamplingStrategies.PAIRWISE); this.exp.info.binarySamplings_Validation = "PAIRWISE"; } else { this.toSample.Add(SamplingStrategies.PAIRWISE); this.exp.info.binarySamplings_Learning = "PAIRWISE"; } break; case COMMAND_PRINT_MLSETTINGS: GlobalState.logInfo.logLine(this.mlSettings.ToString()); break; case COMMAND_PRINT_CONFIGURATIONS: { /* List<Dictionary<NumericOption, double>> numericSampling = exp.NumericSelection_Learning; List<List<BinaryOption>> binarySampling = exp.BinarySelections_Learning; List<Configuration> configurations = new List<Configuration>(); foreach (Dictionary<NumericOption, double> numeric in numericSampling) { foreach (List<BinaryOption> binary in binarySampling) { Configuration config = Configuration.getConfiguration(binary, numeric); if (!configurations.Contains(config) && GlobalState.varModel.configurationIsValid(config)) { configurations.Add(config); } } }*/ var configs = ConfigurationBuilder.buildConfigs(GlobalState.varModel, this.toSample); string[] para = task.Split(new char[] { ' ' }); // TODO very error prone.. ConfigurationPrinter printer = new ConfigurationPrinter(para[0], para[1], para[2], GlobalState.optionOrder); printer.print(configs); break; } case COMMAND_SAMPLE_BINARY_RANDOM: { string[] para = task.Split(new char[] { ' ' }); ConfigurationBuilder.binaryThreshold = Convert.ToInt32(para[0]); ConfigurationBuilder.binaryModulu = Convert.ToInt32(para[1]); VariantGenerator vg = new VariantGenerator(null); if (taskAsParameter.Contains(COMMAND_VALIDATION)) { this.toSampleValidation.Add(SamplingStrategies.BINARY_RANDOM); this.exp.info.binarySamplings_Validation = "BINARY_RANDOM"; } else { this.toSample.Add(SamplingStrategies.BINARY_RANDOM); this.exp.info.binarySamplings_Learning = "BINARY_RANDOM " + task; } break; } case COMMAND_START_LEARNING: { InfluenceModel infMod = new InfluenceModel(GlobalState.varModel, GlobalState.currentNFP); List<Configuration> configurationsLearning = buildSet(this.toSample); List<Configuration> configurationsValidation = buildSet(this.toSampleValidation); if (configurationsLearning.Count == 0) { configurationsLearning = configurationsValidation; } if (configurationsLearning.Count == 0) { GlobalState.logInfo.logLine("The learning set is empty! Cannot start learning!"); break; } if (configurationsValidation.Count == 0) { configurationsValidation = configurationsLearning; } GlobalState.logInfo.logLine("Learning: " + "NumberOfConfigurationsLearning:" + configurationsLearning.Count + " NumberOfConfigurationsValidation:" + configurationsValidation.Count); //+ " UnionNumberOfConfigurations:" + (configurationsLearning.Union(configurationsValidation)).Count()); too costly to compute // We have to reuse the list of models because of NotifyCollectionChangedEventHandlers that might be attached to the list of models. exp.models.Clear(); var mod = exp.models; exp = new MachineLearning.Learning.Regression.Learning(configurationsLearning, configurationsValidation); exp.models = mod; exp.metaModel = infMod; exp.mLsettings = this.mlSettings; exp.learn(); GlobalState.logInfo.logLine("Average model: \n" + exp.metaModel.printModelAsFunction()); double relativeError = 0; if (GlobalState.evalutionSet.Configurations.Count > 0) { relativeError = FeatureSubsetSelection.computeError(exp.metaModel, GlobalState.evalutionSet.Configurations, ML_Settings.LossFunction.RELATIVE); } else { relativeError = FeatureSubsetSelection.computeError(exp.metaModel, GlobalState.allMeasurements.Configurations, ML_Settings.LossFunction.RELATIVE); } GlobalState.logInfo.logLine("Error :" + relativeError); } break; case COMMAND_SAMPLE_NEGATIVE_OPTIONWISE: // TODO there are two different variants in generating NegFW configurations. if (taskAsParameter.Contains(COMMAND_VALIDATION)) { this.toSampleValidation.Add(SamplingStrategies.NEGATIVE_OPTIONWISE); this.exp.info.binarySamplings_Validation = "NEGATIVE_OPTIONWISE"; } else { this.toSample.Add(SamplingStrategies.NEGATIVE_OPTIONWISE); this.exp.info.binarySamplings_Learning = "NEGATIVE_OPTIONWISE"; } break; default: return command; } return ""; }
private string mlSettingsContent(ML_Settings settings) { return CommandLine.Commands.COMMAND_LOAD_MLSETTINGS+" " + settings.ToString(); }
/// <summary> /// Returns a new settings object with the settings specified in the file as key value pair. Settings not beeing specified in this file will have the default value. /// </summary> /// <param name="settingLocation">Full qualified name of the settings file.</param> /// <returns>A settings object with the values specified in the file.</returns> public static ML_Settings readSettingsFromFile(string settingLocation) { ML_Settings mls = new ML_Settings(); if (System.IO.File.Exists(settingLocation) == false) { GlobalState.logError.logLine("Could not load ML settings file! File (" + settingLocation + ") does not exit."); return mls; } System.IO.StreamReader file = new System.IO.StreamReader(settingLocation); string line; while ((line = file.ReadLine()) != null) { string[] nameAndValue = line.Split(new char[] { ' ' }, 2); if (!mls.setSetting(nameAndValue[0], nameAndValue[1])) { GlobalState.logError.logLine("MlSetting " + nameAndValue[0] + " not found!"); } } file.Close(); return mls; }
/// <summary> /// Returns a new settings object with the settings specified in the file as key value pair. Settings not beeing specified in this file will have the default value. /// </summary> /// <param name="settings">All settings to be changed in a string with whitespaces as separator .</param> /// <returns>A settings object with the values specified in the file.</returns> public static ML_Settings readSettings(string settings) { settings = settings.Trim(); settings = settings.Replace(System.Environment.NewLine, ""); ML_Settings mls = new ML_Settings(); String[] settingArray = settings.Split(' '); for (int i = 0; i < settingArray.Length; i++) { string[] nameAndValue = settingArray[i].Split(new char[] { ':' }, 2); if (!mls.setSetting(nameAndValue[0], nameAndValue[1])) { GlobalState.logError.logLine("MlSetting " + nameAndValue[0] + " not found!"); } } return mls; }