/// <summary> /// Menu: Rename column /// </summary> void _mnuRenameColumn_Click(object sender, EventArgs e) { string newName = FrmInputSingleLine.Show(this._listView.FindForm(), "Rename column", this._clickedColumn.Id, "Enter a new name for this column", this._clickedColumn.OverrideDisplayName); if (newName != null) { this._clickedColumn.OverrideDisplayName = newName; this._clickedColumn.Header.Text = this._clickedColumn.ToString(); this.SaveColumnUserPreferences(); } }
private void _btnEditId_Click(object sender, EventArgs e) { FrmMsgBox.ShowWarning(this, "Edit ID", "The ID represents the identifier used when the data was first loaded. It must be unique. Changing the ID may have unintended consequences.", FrmMsgBox.EDontShowAgainId.ChangeExperimentalGroupsId); string newId = FrmInputSingleLine.Show(this, this.Text, "Edit ID", this._group.DisplayName, this._txtId.Text); if (newId != null) { this._txtId.Text = newId; } }
void set_peak_names() { string header = FrmInputSingleLine.Show(this, this.Text, "Peak names", "Enter the peak names", "{DisplayName}"); if (header != null) { ParseElementCollection hc = new ParseElementCollection(header); foreach (Peak p in this._core.Peaks) { p.OverrideDisplayName = hc.ConvertToString(p, this._core); } } }
void find_classifier() { Core core = this._core; UserFlag type1; UserFlag type2; ConfigurationStatistic stat = DataSet.ForStatistics(this._core).ShowList(this, null); if (stat == null) { FrmMsgBox.ShowError(this, "No stat with this name"); return; } string sign = FrmInputSingleLine.Show(this, "Classifier settings", "Find classifier", "Enter the cutoff, or 0 for for automatic", "0"); double manCutoff; type1 = DataSet.ForUserFlags(this._core).IncludeMessage("Specify the comment flag signifying the first type").ShowList(this, null); if (type1 == null) { return; } type2 = DataSet.ForUserFlags(this._core).IncludeMessage("Specify the comment flag signifying the second type").ShowList(this, null); if (type2 == null) { return; } if (!double.TryParse(sign, out manCutoff)) { return; } StringBuilder sb = new StringBuilder(); // TID0 = Full // TID1..5 = Test/training // TID6..9 = Bootstrap for (int tid = 0; tid < 10; tid++) { // Get all significances List <double> sigs = new List <double>(core.Peaks.Select(λ => λ.GetStatistic(stat))); List <bool> inTrainingSet = new List <bool>(core.Peaks.Count); int co; // For the training only include 75% if (tid >= 1 && tid <= 5) { co = (int)(core.Peaks.Count * 0.75d); } else { co = core.Peaks.Count; } for (int n = 0; n < core.Peaks.Count; n++) { inTrainingSet.Add(n < co); } inTrainingSet.Shuffle(); // For the boot-strap shuffle the sigs if (tid >= 6) { sigs.Shuffle(); } Tuple <double, int, int, int, int> best = null; Tuple <double, int, int, int, int> bestTest = null; double cutoff = 0; // Find the best variable cutoff if (manCutoff == 0.0d) { for (int n = 0; n < core.Peaks.Count; n++) { if (inTrainingSet[n]) { var success = this.SimpleClassify(sigs[n], type1, type2, sigs, inTrainingSet, true); if (best == null || success.Item1 > best.Item1) { best = success; bestTest = this.SimpleClassify(sigs[n], type1, type2, sigs, inTrainingSet, false); cutoff = sigs[n]; } } } } else { best = this.SimpleClassify(manCutoff, type1, type2, sigs, inTrainingSet, true); bestTest = this.SimpleClassify(manCutoff, type1, type2, sigs, inTrainingSet, false); cutoff = manCutoff; } sb.AppendLine(tid == 0 ? "FULLDATA" : tid <= 5 ? "VALIDATION" : "BOOTSTRAP"); sb.AppendLine(); sb.AppendLine(" " + type1 + " <= " + cutoff + " < " + type2); sb.AppendLine(); sb.AppendLine(" TRAINING SET (" + co + ")"); sb.AppendLine(" " + type1 + " correct: " + StringHelper.AsFraction(best.Item2, best.Item2 + best.Item3)); sb.AppendLine(" " + type2 + " correct: " + StringHelper.AsFraction(best.Item4, best.Item4 + best.Item5)); sb.AppendLine(" Total correct: " + StringHelper.AsFraction(best.Item2 + best.Item4, best.Item2 + best.Item4 + best.Item3 + best.Item5)); sb.AppendLine(" Variables used: " + StringHelper.AsFraction(best.Item2 + best.Item4 + best.Item3 + best.Item5, core.Peaks.Count)); sb.AppendLine(); if (co != core.Peaks.Count) { sb.AppendLine(" TEST SET (" + (core.Peaks.Count - co) + ")"); sb.AppendLine(" " + type1 + " correct: " + StringHelper.AsFraction(bestTest.Item2, bestTest.Item2 + bestTest.Item3)); sb.AppendLine(" " + type2 + " correct: " + StringHelper.AsFraction(bestTest.Item4, bestTest.Item4 + bestTest.Item5)); sb.AppendLine(" Total correct: " + StringHelper.AsFraction(bestTest.Item2 + bestTest.Item4, bestTest.Item2 + bestTest.Item4 + bestTest.Item3 + bestTest.Item5)); sb.AppendLine(" Variables used: " + StringHelper.AsFraction(bestTest.Item2 + bestTest.Item4 + bestTest.Item3 + bestTest.Item5, core.Peaks.Count)); sb.AppendLine(); sb.AppendLine(" SCORE: " + (bestTest.Item1 * 100).ToString("F02")); } else { sb.AppendLine(" SCORE: " + (best.Item1 * 100).ToString("F02")); } sb.AppendLine(); sb.AppendLine("--------------------------------------------------------------------------------"); sb.AppendLine(); } FrmInputMultiLine.ShowFixed(this, "Find classifier", "Classifier results", "Best value to determine split between variables marked with \"" + type1 + "\" and \"" + type2 + "\" based on their significances", sb.ToString()); }