public List<ResultsGroup>[] evaluate(OpenFileDialog FileLinks, Feature featureData) { List<ResultsGroup>[] AllFinalResults = new List<ResultsGroup>[Convert.ToInt32(FileLinks.FileNames.Count())]; Int32 Count = 0; //Each data file is treated separately, hence the for loop. foreach (String filename in FileLinks.FileNames) { //Get the Parameters. ParametersForm parameterForm = new ParametersForm(); ParametersForm.ParameterSettings parameters = parameterForm.GetParameters(); //Perform the First and second grouping and getting data for the features by the Grouping function. List<ResultsGroup> LRLR = new List<ResultsGroup>(); LRLR = Groupings(filename, parameters); //Generate scores. SupervisedLearner sl = new SupervisedLearner(); AllFinalResults[Count] = sl.Scorings(LRLR, featureData, parameters); Count++; } return AllFinalResults; }
public List<ResultsGroup>[] run(OpenFileDialog FileLinks) { List<ResultsGroup>[] AllFinalResults = new List<ResultsGroup>[Convert.ToInt32(FileLinks.FileNames.Count())]; Int32 Count = 0; //Each data file is treated separately, hence the for loop. foreach (String filename in FileLinks.FileNames) { //Get the Parameters. ParametersForm parameter = new ParametersForm(); ParametersForm.ParameterSettings paradata = parameter.GetParameters(); //Perform the First and second grouping and getting data for the features by the Grouping function. List<ResultsGroup> LRLR = new List<ResultsGroup>(); LRLR = Groupings(filename, paradata); //##############Logistic Regression#################### Features fe = new Features(); //current features Feature featureData = fe.readFeature(); //default features String defaultpath = Application.StartupPath + "\\FeatureDefault.fea"; Feature defaultData = fe.readFeature(defaultpath); //Features that will be used Feature finalfeatureData = new Feature(); //Here are the beta values in logistic regression. finalfeatureData.Initial = featureData.Initial * 0.5 + defaultData.Initial * 0.5; finalfeatureData.numChargeStates = featureData.numChargeStates * 0.5 + defaultData.numChargeStates * 0.5; finalfeatureData.ScanDensity = featureData.ScanDensity * 0.5 + defaultData.ScanDensity * 0.5; finalfeatureData.numModiStates = featureData.numModiStates * 0.5 + defaultData.numModiStates * 0.5; finalfeatureData.totalVolume = featureData.totalVolume * 0.5 + defaultData.totalVolume * 0.5; finalfeatureData.ExpectedA = featureData.ExpectedA * 0.5 + defaultData.ExpectedA * 0.5; finalfeatureData.CentroidScan = featureData.CentroidScan * 0.5 + defaultData.CentroidScan * 0.5; finalfeatureData.numOfScan = featureData.numOfScan * 0.5 + defaultData.numOfScan * 0.5; finalfeatureData.avgSigNoise = featureData.avgSigNoise * 0.5 + defaultData.avgSigNoise * 0.5; //Generate scores. SupervisedLearner sl = new SupervisedLearner(); AllFinalResults[Count] = sl.Scorings(LRLR, finalfeatureData, paradata); Count++; } return AllFinalResults; }