public static void ProcessMultiRuns(List <string> dirList) { TrainingTesting_SharedVariables._trialProblem = null; if (Preferences.Instance.ProblemOriginalMulti == null) { GuiPreferences.Instance.setLog("Processing Failed: Problem null!"); return; } if (Preferences.Instance.ProblemOriginalMulti.Count <= 0) { GuiPreferences.Instance.setLog("Processing Failed: Problem List is Empty"); return; } for (int i = 0; i < Preferences.Instance.ProblemOriginalMulti.Count; i++) { if (Preferences.Instance.ProblemOriginalMulti[i].samples == null) { GuiPreferences.Instance.setLog("Processing problem " + i.ToString() + " Failed: samples empty!"); return; } } Preferences.Instance.dirList = dirList; //we only clear all memory in offline analysis, and once in the first realtime processing vector - which is done externally in testdll if (!Preferences.Instance.RealTimeTestingSingleVector) { GuiPreferences.Instance.setLog("Starting Processing."); clearProblem(); clearSVM(); JobManager.clearJob(); GC.Collect(); } //if unprocessed is UNchecked then process all jobs. if (!GuiPreferences.Instance.UnprocessedChecked) { //NOTE: this is commented out because this is not recoded for multi dirs yet. and not needed in the current pipeline. /* * //we only need to add jobs at the beginning and do it once. * if ((!Preferences.Instance.RealTimeTestingSingleVector) || (Preferences.Instance.RealTimeTestingSingleVector && Preferences.Instance.ProblemOriginal.samples[1] == null)) * { * addFeatureJobs(); * calculatePreviousKValues(); * } * * //pre for all jobs, future support only * preProcessFeatureJobs(); * //here the pre/proc/post is done for every job. * processFeatureJobs(); * //post for all jobs, future support only * postProcessFeatureJobs(); * * * //TODO: do this each time we get out of processing, or do this just when "Testing" is activated". seems harmless to do it all the time. * _trialProblem = Preferences.Instance.ProblemFinal; * * //we only need to assign to the final problem when we are in offline mode * //in real time we only need to send the classification result and sample weight. * if (!Preferences.Instance.RealTimeTestingSingleVector) * { * GuiPreferences.Instance.setLog("Processing Finished!"); * } * else * { * if (Preferences.Instance.currentUDPVector >= Preferences.Instance.maxKvectorsToWaitBeforeClassification) * { * Preferences.Instance.currentClassifiedVector += 1; * //allow to predict only if sample is not labeled as baseline - this happens in the beginning of a experiment session * if (Preferences.Instance.ProblemFinal.labels[Preferences.Instance.currentClassifiedVector - 1] != 1.0) * { * //allow to predict * double pred = 1.0; * GuiPreferences.Instance.setLog("Single Vector Processing Finished.."); * pred = Preferences.Instance.svmWrapper.GetAccuracyFromTestSingleSample(Preferences.Instance.ProblemFinal, Preferences.Instance.svmModel); * transmitPrediction(pred); * } * } * }*/ } else //process without PROCESSING JOBS, the original problem { //real-time testing if (Preferences.Instance.RealTimeTestingSingleVector) { //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //NOTE: minmax for testing loading moved to form1.cs before pipe async creation. //if you need it here please put it back. if (GuiPreferences.Instance.CbSMOChecked) //real time testing using WEKA AND SMO { RealTimeProcessing.ExecuteRealtimeTest(TrainingTesting_SharedVariables.smo); Preferences.Instance.currentClassifiedVector += 1; //test on self should get 100% //testingEval = new weka.classifiers.Evaluation(data); //testingEval.evaluateModel(smo, data); //printWekaResults(testingEval.toSummaryString("\nResults\n======\n", false)); //GuiPreferences.Instance.setLog("RealTime SMO Model Testing on current vector"); //pred = (double)testingEval.predictions().elementAt(0); //transmitPrediction(pred); //GuiPreferences.Instance.setLog("Single Vector Processing Finished.."); } else if (GuiPreferences.Instance.CbSVMChecked) //weka + SVM, pipeline unfinished. { //NOTE: commented out because this code is not yet finished to support weka and SVM (not SMO) /* * //if (Preferences.Instance.currentUDPVector >= Preferences.Instance.maxKvectorsToWaitBeforeClassification) * { * Preferences.Instance.currentClassifiedVector += 1; * //allow to predict only if sample is not labeled as baseline - this happens in the beginning of a experiment session * //if (Preferences.Instance.ProblemFinal.labels[Preferences.Instance.currentClassifiedVector - 1] != 1.0) * { * //allow to predict * double pred = 1.0; * pred = Preferences.Instance.svmWrapper.GetAccuracyFromTestSingleSample(Preferences.Instance.ProblemFinal, Preferences.Instance.svmModel); * transmitPrediction(pred); * GuiPreferences.Instance.setLog("Single Vector Processing Finished.."); * } * }*/ } } else // TRAINING W/ WEKA + SMO { //go over all available and loaded RUNS and run the weka pipeline on them for (int i = 0; i < Preferences.Instance.ProblemOriginalMulti.Count; i++) { TrainingTesting_SharedVariables._trialProblem = Preferences.Instance.ProblemOriginalMulti[i]; Preferences.Instance.currentProcessedRun = i; //0-based index (run 0 is the first run) GuiPreferences.Instance.WorkDirectory = dirList[i]; TrainingTesting_SharedVariables.topIGFeatures = new double[][] { }; Instances data; bool createFiles = false; //if overwrite then delete all non relevant files in the directory (arff/libsvm) if (GuiPreferences.Instance.CbOverwriteProcessedFilesChecked) { foreach (string fileName in Preferences.Instance.deleteFiles) { FileDirectoryOperations.DeleteFile(GuiPreferences.Instance.WorkDirectory + fileName); } } //if overwrite we create new files (after the above deletion) if (GuiPreferences.Instance.CbOverwriteProcessedFilesChecked) { createFiles = true; } else // we check to see if the files are there and only if no we create. { // if files are not here its safe to create them from scratch, if not then we pass. if (!(File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scale_paramCS.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scaledCS.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scaledCS.libsvm.arff") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors_scale_paramCS.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors_scaledCS.libsvm") && File.Exists(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors_scaledCS.libsvm.arff") )) { createFiles = true; } } if (createFiles) { //files are loaded,thresholded,vectorized (arff),normalized. false means that IG and training are not done here. if (!ProcessSingleRunOffline(ref TrainingTesting_SharedVariables.topIGFeatures, TrainingTesting_SharedVariables._trialProblem)) { GuiPreferences.Instance.setLog("Samples are empty"); } Preferences.Instance.TrainingBaselineMediansRunningWindow[i].clearMatrixMemory(); } else { GuiPreferences.Instance.setLog("Skipped Processing for directory: " + GuiPreferences.Instance.WorkDirectory); } } //Concatenate the data WekaProcessingPipelineForMultiRuns(dirList); if (GuiPreferences.Instance.CbSVMChecked) { Preferences.Instance.ProblemOriginal = new libSVM_ExtendedProblem(); //here we need to load the filtered DS //Preferences.Instance.svmWrapper. //GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scaledCS_filteredIG.libsvm"; } TrainingTesting_SharedVariables._trialProblemMulti = Preferences.Instance.ProblemOriginalMulti; GuiPreferences.Instance.setLog("Finished Processing."); Sound.beep(Sound.beepType.Asterisk); //System.Media.SystemSounds.Beep.Play(); } } }
/// <summary> /// copying files to finalData, processing, creating config files/minmax/etc and converting to arff. /// </summary> /// <param name="_trialProblem"></param> /// <returns></returns> public static void WekaProcessingPipelineForMultiRuns(List <string> directoryList) { //create a dir that holds the final DS in C:\ //GuiPreferences.Instance.WorkDirectory = @"C:\FinalData_" + DateTime.Now.ToLongTimeString().Replace(':', '-'); GuiPreferences.Instance.WorkDirectory = @"C:\FinalData_" + "TR" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "_" + GuiPreferences.Instance.NormalizedType.ToString() + GuiPreferences.Instance.NudMovingWindow.ToString() + "_"; if (GuiPreferences.Instance.IgSelectionType == IGType.Threshold) { GuiPreferences.Instance.WorkDirectory = GuiPreferences.Instance.WorkDirectory + "IG_Thr" + GuiPreferences.Instance.NudIGThreshold.ToString() + "_"; } else if (GuiPreferences.Instance.IgSelectionType == IGType.Voxels) { GuiPreferences.Instance.WorkDirectory = GuiPreferences.Instance.WorkDirectory + "IG_Vox" + GuiPreferences.Instance.NudIGVoxelAmount.ToString() + "_"; } //GuiPreferences.Instance.ProtocolFile + "_" + GuiPreferences.Instance.WorkDirectory = GuiPreferences.Instance.WorkDirectory + Preferences.Instance.events.EventListLastTr.ToString(); if (GuiPreferences.Instance.CbPeekHigherTRsIGChecked == true) { GuiPreferences.Instance.WorkDirectory = GuiPreferences.Instance.WorkDirectory + "_Peeking"; } GuiPreferences.Instance.setLog(@"Creating Final Directory in: " + GuiPreferences.Instance.WorkDirectory); FileDirectoryOperations.CreateDirectory(GuiPreferences.Instance.WorkDirectory); GuiPreferences.Instance.WorkDirectory += @"\"; ConcatenateLibsvmVectorizedPerTR(directoryList); //NOTE: min/max values are taken from the param files of each run. which means that you get N max values and N min values. //if a median is needed, we have to go over all columns for each feature and concat all values to a huge list that contains all N files and only then do the median. //the median code here is not conceptually not right. as we take median out of 4 max values or min values. its a bad way to calculate a median. //median code should be done in the normalization class and the code here should reflect the concept behind it. //NOTE2: this code goes over N max and min values from each param file and get the MAX(maxes) and MIN(mins). //these max and mins are saved to be used as the initial min/max values for the testing stage. double[][] feature_max = new double[directoryList.Count][]; double[][] feature_min = new double[directoryList.Count][]; int i = 0; int max_index = -1; foreach (string directory in directoryList) { TrainingTesting_SharedVariables._svmscaleTraining.getConfigFileMinMaxValues( //use previous tr min/maxes for median consideration //directory + "TrainSet_" + (GuiPreferences.Instance.NudClassifyUsingTR - 1).ToString() + "th_vectors_scale_paramCS.libsvm", //use current tr for min/max median directory + "TrainSet_" + (GuiPreferences.Instance.NudClassifyUsingTR).ToString() + "th_vectors_scale_paramCS.libsvm", ref feature_max[i], ref feature_min[i], ref max_index); i++; } //calculate Mean + save new min/max param to C:\ double[] finalFeature_max = new double[feature_max[0].Length]; double[] finalFeature_min = new double[feature_max[0].Length]; double[] finalFeature_medianMax = new double[feature_max[0].Length]; double[] finalFeature_medianMin = new double[feature_max[0].Length]; //create a TEMP list with enough values for the runs, in order to calculate the MIN/MAX median var values_max = new List <double>(feature_max.Length); var values_min = new List <double>(feature_max.Length); var values_medianMax = new List <double>(feature_max.Length); var values_medianMin = new List <double>(feature_max.Length); for (int k = 0; k < feature_max.Length; k++) { //init zeros values_max.Add(0); values_min.Add(0); values_medianMax.Add(0); values_medianMin.Add(0); } for (int j = 0; j < feature_max[0].Length; j++) { for (int k = 0; k < feature_max.Length; k++) { // for each feature group all run-based values together values_max[k] = feature_max[k][j]; values_min[k] = feature_min[k][j]; } //get median of maxes/mins optional here /*finalFeature_max[j] = GetMedian(values_max); * finalFeature_min[j] = GetMedian(values_min); */ //get Max and Min here. finalFeature_max[j] = values_max.Max(); finalFeature_min[j] = values_min.Min(); } //save max/median param file TrainingTesting_SharedVariables._svmscaleTraining.saveConfigMinMax_CSharp( GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scale_paramCS.libsvm", finalFeature_min, finalFeature_max, 204801, 0.0f, 1.0f); int lowRangeMinus = 0; int highRangeMinus = 0; //calculate median ranges. from baseline. for (int j = 1; j < feature_max[0].Length - 1; j++) { for (int k = 0; k < feature_max.Length; k++) { //min = test baseline median - 2nd smallest (training baseline median - training min) //max = test baseline median + 2nd smallest ( training max - training baseline median) values_medianMax[k] = feature_max[k][j] - Preferences.Instance.TrainingBaselineMedians[k].median[j - 1]; values_medianMin[k] = Preferences.Instance.TrainingBaselineMedians[k].median[j - 1] - feature_min[k][j]; } double chosenLowRange; double chosenHighRange; chosenLowRange = getSecondLowest(values_medianMin); chosenHighRange = getSecondLowest(values_medianMax); //chosenLowRange = getSecondHighest(values_medianMin); //chosenHighRange = getSecondHighest(values_medianMax); //chosenLowRange = StatisticsFeatures.GetMedian(values_medianMin); //chosenHighRange = StatisticsFeatures.GetMedian(values_medianMax); finalFeature_medianMin[j] = chosenLowRange; finalFeature_medianMax[j] = chosenHighRange; if (chosenLowRange <= 0) { lowRangeMinus++; } if (chosenHighRange <= 0) { highRangeMinus++; } } ////////////////////////////////////////////////////////// //for verification save ranges (this file it not to be used!) TrainingTesting_SharedVariables._svmscaleTraining.saveConfigMinMax_CSharp( GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scale_MedianRangeFromBaseline.params.txt", finalFeature_medianMin, finalFeature_medianMax, 204801, 0.0f, 1.0f); Preferences.Instance.medianRange = new MinMax(finalFeature_medianMin, finalFeature_medianMax); XMLSerializer.serializeArrayToFile <MinMax>(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_MedianRangeFromBaseline.xml", Preferences.Instance.medianRange); GuiPreferences.Instance.setLog("out of 204K features, low range <= 0: " + lowRangeMinus.ToString() + " && high range <= 0: " + highRangeMinus.ToString()); }
/// <summary> /// WAS A BIG BUTTON: quickloads a range of commands to test the weka pipeline /// loads protocol and data, processes for SMO, trick, IG, etc.. has java/python intergration /// 1. Trick: QuickLoad, Export to Libsvm, separate to TRs files in libsvm, convert TR-3 and TR-4 to arff, use TR4 + IG to get 1000 features, filter TR-3 based on features from TR-4, save result to libsvm format, train using LibSvm (grid?), save model, test on training data - must get 100%, display 1000 on viewport /// 2. No Trick: QuickLoad, Export to Libsvm, separate to TRs files in libsvm, convert TR-3 to arff, filter TR-3 based on 1000 top IG, save result to libsvm format, train using LibSvm (grid?), save model, test on training data - must get 100%, display 1000 on viewport /// </summary> /// <param name="from"></param> /// <returns></returns> public bool QuickProcessWekaPipeline(int from) { // --- from this point the loading data phaze begins --- // // tirosh null movement /* * GuiPreferences.Instance.WorkDirectory = @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20120508.Rapid+NullClass.day2\4\rtp\"; * GuiPreferences.Instance.FileName = "tirosh-"; * GuiPreferences.Instance.FileType = OriBrainLearnerCore.dataType.rawValue; * GuiPreferences.Instance.ProtocolFile = @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20120705.NullClass1_zbaseline.prt"; */ // magali classification /*GuiPreferences.Instance.WorkDirectory = @"H:\My_Dropbox\VERE\Experiment1\Kozin_Magali\20121231.movement.3.imagery.1\18-classification.movement\rtp\"; * GuiPreferences.Instance.FileName = "tirosh-"; * GuiPreferences.Instance.FileType = OriBrainLearnerCore.dataType.rawValue; * GuiPreferences.Instance.ProtocolFile = @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20113110.short.5th.exp.hands.legs.zscore.thought_LRF.prt"; * */ /// moshe sherf classification, 4 aggregated to test on 1. //GuiPreferences.Instance.WorkDirectory = @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\1234-5\"; //GuiPreferences.Instance.ProtocolFile = @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\1234-5\20113110.short.5th.exp.hands.legs.zscore.thought_LRF.prt"; string[] directoryList = { @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\05_classification\rtp\", @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\07_classification\rtp\", @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\09_classification\rtp\", @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\11_classification\rtp\" }; GuiPreferences.Instance.ProtocolFile = @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20113110.short.5th.exp.hands.legs.zscore.thought_LRF.prt"; //GuiPreferences.Instance.WorkDirectory = @"H:\My_Dropbox\VERE\Experiment1\Sherf_Moshe\20121010.movement.1\15_classification\rtp\"; GuiPreferences.Instance.FileName = "tirosh-"; GuiPreferences.Instance.FileType = OriBrainLearnerCore.DataType.rawValue; //read prot file Preferences.Instance.prot = new ProtocolManager(); double[][] topIGFeatures = {}; foreach (string directory in directoryList) { GuiPreferences.Instance.WorkDirectory = directory; //delete all files that are going to be created, in order to prevent anomaly vectors. string[] deleteFiles = { "TrainSet.libsvm", "TrainSet_3th_vectors.libsvm", "TrainSet_3th_vectors_scale_paramCS.libsvm", "TrainSet_3th_vectors_scaledCS.libsvm", "TrainSet_3th_vectors_scaledCS.libsvm.arff", "TrainSet_3th_vectors_scaledCS_filteredIG.arff", "TrainSet_3th_vectors_scaledCS_filteredIG.model", "TrainSet_3th_vectors_scaledCS_filteredIG_indices.xml", "TrainSet_4th_vectors.libsvm", "TrainSet_4th_vectors_scale_paramCS.libsvm", "TrainSet_4th_vectors_scaledCS.libsvm", "TrainSet_4th_vectors_scaledCS.libsvm.arff" }; foreach (string fileName in deleteFiles) { FileDirectoryOperations.DeleteFile(GuiPreferences.Instance.WorkDirectory + fileName); } //get all files in the path with this extention GuiManager.getFilePaths("*.vdat"); //update certain info GuiManager.updateFilePaths(); //assigned after we know what to assign from the protocol //PublicMethods.setClassesLabels(); GuiPreferences.Instance.CmbClass1Selected = 1; //left GuiPreferences.Instance.CmbClass2Selected = 2; //right //NEED TO ADD A VARIABLE FOR EVERY OPTION IN THE GUI. RAW VALUES. UNPROCESSED. MULTI CLASS. CROSS VALD, GRID, FOLDS, ETC... //and for every button a function! //for the training set GuiPreferences.Instance.FromTR = from; // 264; //for the test set //GuiPreferences.Instance.FromTR = 46; //GuiPreferences.Instance.ToTR = 100;// 264; //finally load TrainingTesting_SharedVariables.binary.loadRawData(); topIGFeatures = new double[][] {}; Instances data; //files are loaded,thresholded,vectorized,normalized. false means that IG and training are not done here. if (!Training_MultiRunProcessing.ProcessSingleRunOffline(ref topIGFeatures, Preferences.Instance.ProblemOriginal)) { GuiPreferences.Instance.setLog("Samples are empty"); } //++grab findl vectors and concat them // grab min max values for saving the median. } //create a dir that holds the final DS in C:\ GuiPreferences.Instance.WorkDirectory = @"C:\FinalData_" + DateTime.Now.ToLongTimeString().Replace(':', '-'); GuiPreferences.Instance.setLog(@"Creating Final Directory in: " + GuiPreferences.Instance.WorkDirectory); FileDirectoryOperations.CreateDirectory(GuiPreferences.Instance.WorkDirectory); GuiPreferences.Instance.WorkDirectory += @"\"; //concatenate libsvm normalized and vectorized files FileStream fileStream; FileStream outputFileStream = new FileStream(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm", FileMode.CreateNew, FileAccess.Write); foreach (string directory in directoryList) { fileStream = new FileStream(directory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm", FileMode.Open, FileAccess.Read); Training_MultiRunProcessing.CopyStream(outputFileStream, fileStream); fileStream.Close(); } outputFileStream.Close(); //save concatenated tr3 to a file if (WekaCommonFileOperation.ConvertLIBSVM2ARFF(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm", 204800)) { GuiPreferences.Instance.setLog("Converted to ARFF: TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.arff"); } double[][] feature_max = new double[directoryList.Length][]; double[][] feature_min = new double[directoryList.Length][]; int i = 0; int max_index = -1; foreach (string directory in directoryList) { TrainingTesting_SharedVariables._svmscaleTraining.getConfigFileMinMaxValues( directory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scale_paramCS.libsvm", ref feature_max[i], ref feature_min[i], ref max_index); i++; } //calculate Mean + save new min/max param to C:\ double[] finalFeature_max = new double[feature_max[0].Length]; double[] finalFeature_min = new double[feature_max[0].Length]; //create a list with enough values for the runs, in order to calculate the median var values_max = new List <double>(feature_max.Length); var values_min = new List <double>(feature_max.Length); for (int k = 0; k < feature_max.Length; k++) { //init zeros values_max.Add(0); values_min.Add(0); } for (int j = 0; j < feature_max[0].Length; j++) { for (int k = 0; k < feature_max.Length; k++) { values_max[k] = feature_max[k][j]; values_min[k] = feature_min[k][j]; } //finalFeature_max[j] = GetMedian(values_max); //finalFeature_min[j] = GetMedian(values_min); finalFeature_max[j] = values_max.Max(); finalFeature_min[j] = values_min.Min(); } TrainingTesting_SharedVariables._svmscaleTraining.saveConfigMinMax_CSharp(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scale_paramCS.libsvm", finalFeature_min, finalFeature_max, 204801, 0.0f, 1.0f); //todo check max index in file, //todo check if needs to remove 204801 from it so it doesnt effect the class. double[][] FinaltopIGFeatures = { }; Instances finalData = Training_MultiRunProcessing.ConcatenationPipeLine("TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm.arff", "TrainSet_4th_vectors_scaledCS.libsvm.arff"); WekaTrainingMethods.TrainSMO(finalData); //save median param file //display top IG on dicom view string dicomDir = directoryList[0]; dicomDir = dicomDir.Substring(0, dicomDir.Length - 4) + @"master\"; string[] files = System.IO.Directory.GetFiles(dicomDir, "*.dcm"); string firstFile = files[0].Substring(files[0].LastIndexOf(@"\") + 1); bool thresholdOrVoxelAmount; if (GuiPreferences.Instance.IgSelectionType == IGType.Threshold) { thresholdOrVoxelAmount = true; } else { thresholdOrVoxelAmount = false; } //NOTE final top may be empty, please remember that the IG are not at preferences.instance.attsel.selectedattributes or rankedattributes. Form plotForm = new DicomImageViewer.MainForm(dicomDir + firstFile, firstFile, FinaltopIGFeatures, Convert.ToDouble(GuiPreferences.Instance.NudIGThreshold), Convert.ToInt32(GuiPreferences.Instance.NudIGVoxelAmount), thresholdOrVoxelAmount, GuiPreferences.Instance.WorkDirectory + "brain"); plotForm.StartPosition = FormStartPosition.CenterParent; plotForm.ShowDialog(); plotForm.Close(); return(true); }