Exemple #1
0
 public void clearJobQueue()
 {
     //this may cause some issues, better double check that clearing this wont clear the original problem in preferences
     jobs.Clear();
     lastIndex = 0;
     _problem  = null;
 }
Exemple #2
0
 /// <summary>
 /// determine if we have samples and can proceed with the processing of the samples
 /// </summary>
 public static bool SamplesExist(libSVM_ExtendedProblem problem)
 {
     if (problem.samples == null)
     {
         GuiPreferences.Instance.setLog("Export Failed: Problem has no samples!");
         return(false);
     }
     return(true);
 }
Exemple #3
0
        private void ProcessFULL()
        {
            K = K - 1 + 1;
            _oproblem.labels  = new double[_iproblem.labels.Length];
            _oproblem.samples = new SortedDictionary <int, double> [_iproblem.samples.Length];

            _oproblem = _iproblem;
            return;
        }
Exemple #4
0
        //although SingleRun uses all the functions here, but this class is still for MULTICLASS
        public static bool ProcessSingleRunOffline(ref double[][] topIGFeatures, libSVM_ExtendedProblem problem)
        {
            // --- from this point the preprocessing of a single run begins --- //

            //export to libsvm file
            if (!SamplesExist(problem))
            {
                return(false);
            }

            VectorizeAndNormalize(problem);

            return(true);

            /*
             * //filter tr3 based on top 1000 from tr4 (the trick)
             * //load TR4 !!! NOTE: trick changed from tr4 to 3 because i didnt see any increase in % in real time.. + this line can be removed to speed up things.
             * Instances data = ThePipeline.loadDataSetFile("TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm.arff");
             *
             * //select 1000 IG values, serialize to file
             * ThePipeline.selectIGSerialize(ref topIGFeatures, ref data);
             *
             * //load tr3
             * data = ThePipeline.loadDataSetFile("TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm.arff");
             *
             *
             * //filter top IG
             * data = WekaCommonMethods.useRemoveFilter(data, topIGFeatures, true);
             *
             * // NOTE:!! use this as a replacement for the two lines above, in case its not the same files as the previous
             * // ConverterUtils.DataSource source = new ConverterUtils.DataSource(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm.arff");
             * //data = source.getDataSet();
             *
             * //filter top IG
             * //data = WekaCommonMethods.useRemoveFilter(data, topIGFeatures, true);
             *
             * //after filtering last feature needs to be the class
             * //if (data.classIndex() == -1)
             * //    data.setClassIndex(data.numAttributes() - 1);
             *
             * //save filtered tr3 to a file
             * WekaCommonFileOperation.SaveArff(data, GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG");
             *
             * // --- from this point the training phaze begins --- //
             * ThePipeline.trainSMO(data);
             *
             * return true;*/
        }
Exemple #5
0
        public static void labelStats(libSVM_ExtendedProblem trainingProblem)
        {
            int[] stats = new int[Preferences.Instance.numberOfConditions];
            for (int i = 0; i < trainingProblem.labels.Length; i++)
            {
                stats[(int)trainingProblem.labels[i] - 1]++;
            }

            string line = "";

            for (int i = 0; i < stats.Length; i++)
            {
                line += GuiPreferences.Instance.getLabel(i) + " : " + stats[i].ToString() + ", ";
            }

            GuiPreferences.Instance.setLog(line);
        }
Exemple #6
0
        public static int[] ChronologicalSplitTrainTest(libSVM_ExtendedProblem problem, ref libSVM_ExtendedProblem train_problem, ref libSVM_ExtendedProblem test_problem)
        {
            GuiPreferences.Instance.setLog("Chronological Training/Testing Split");

            train_problem.labels  = new double[1];
            train_problem.samples = new SortedDictionary <int, double> [1];

            test_problem.labels  = new double[1];
            test_problem.samples = new SortedDictionary <int, double> [1];
            int[] test_indices = new int[1];

            int k         = 0;
            int trainSize = (int)Math.Floor(problem.samples.Length * GuiPreferences.Instance.NudTrainTestSplit);

            for (int i = 0; i < trainSize; i++)
            {
                Array.Resize(ref train_problem.labels, k + 1);
                Array.Resize(ref train_problem.samples, k + 1);

                train_problem.labels[k]  = problem.labels[i];
                train_problem.samples[k] = problem.samples[i];
                k++;
            }

            int j = 0;

            for (int i = trainSize + 10; i < problem.samples.Length; i++)
            {
                Array.Resize(ref test_problem.labels, j + 1);
                Array.Resize(ref test_problem.samples, j + 1);
                Array.Resize(ref test_indices, j + 1);

                test_problem.labels[j]  = problem.labels[i];
                test_problem.samples[j] = problem.samples[i];
                test_indices[j]         = i;
                j++;
            }

            return(test_indices);
        }
Exemple #7
0
        /// <summary>
        /// divides teh training and testing set according: every 10th is assigned to the testing set, the rest are training.
        /// </summary>
        /// <param name="problem"></param>
        /// <param name="train_problem"></param>
        /// <param name="test_problem"></param>
        /// <returns></returns>
        private static int[] PrecentileSplitTrainTest(libSVM_ExtendedProblem problem, ref libSVM_ExtendedProblem train_problem, ref libSVM_ExtendedProblem test_problem)
        {
            GuiPreferences.Instance.setLog("Training/Testing Split is conceptually wrong, it will not evenly take 90% of the data");
            GuiPreferences.Instance.setLog("out of the training test 75% can be oen class and 25% of the rest. we need to balance this somehow.");

            train_problem.labels  = new double[1];
            train_problem.samples = new SortedDictionary <int, double> [1];

            test_problem.labels  = new double[1];
            test_problem.samples = new SortedDictionary <int, double> [1];
            int[] test_indices = new int[1];
            int   precentage   = 10;
            int   j            = 0;
            int   k            = 0;

            for (int i = 0; i < problem.samples.Length; i++)
            {
                if (i % precentage == 0)
                {
                    Array.Resize(ref test_problem.labels, j + 1);
                    Array.Resize(ref test_problem.samples, j + 1);
                    Array.Resize(ref test_indices, j + 1);

                    test_problem.labels[j]  = problem.labels[i];
                    test_problem.samples[j] = problem.samples[i];
                    test_indices[j]         = i;
                    j++;
                }
                else
                {
                    Array.Resize(ref train_problem.labels, k + 1);
                    Array.Resize(ref train_problem.samples, k + 1);

                    train_problem.labels[k]  = problem.labels[i];
                    train_problem.samples[k] = problem.samples[i];
                    k++;
                }
            }
            return(test_indices);
        }
Exemple #8
0
        /// <summary>
        /// saves ProblemOriginal to a file, separates into tr3 and tr4, normalizes, and converts the libsvm files into arff.
        /// </summary>
        public static void VectorizeAndNormalize(libSVM_ExtendedProblem problem)
        {
            string trainFileName = GuiPreferences.Instance.WorkDirectory /*+ GuiPreferences.Instance.FileName*/ + "TrainSet";

            //todo add proper named to saved files, check if null is logical at all.
            //if ((Preferences.Instance.ProblemOriginal.samples != null))
            //{
            problem.Save(trainFileName + ".libsvm", 80 * 80 * 32 + 1);
            GuiPreferences.Instance.setLog("saved Original Problem LibSVM file: " + trainFileName + ".libsvm");
            //}

            //separate DS to 3rd and 4th TR
            ////example: ExecuteSelectKthVectorScript(@"TrainSet", @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20120508.Rapid+NullClass.day2\4\rtp\");
            KthExtractionManager.ExecuteSelectKthVectorScript(/*GuiPreferences.Instance.FileName +*/ "TrainSet", GuiPreferences.Instance.WorkDirectory);
            //GuiPreferences.Instance.setLog("Created TR3, TR4, TR5, TR6 files (5+6 depends if added to the python script)");

            //NORMALIZING all TR Files
            NormalizationManager.ScaleTrFiles(GuiPreferences.Instance.WorkDirectory);

            //CONVERTING all TR files
            WekaCommonFileOperation.ConvertToArff(GuiPreferences.Instance.WorkDirectory);
        }
Exemple #9
0
        public static libSVM_ExtendedProblem getBinaryClases(libSVM_ExtendedProblem trainingProblem)
        {
            libSVM_ExtendedProblem _trainingProblem = new libSVM_ExtendedProblem();

            _trainingProblem.labels  = new double[1];
            _trainingProblem.samples = new SortedDictionary <int, double> [1];
            int j = 0;

            for (int i = 0; i < trainingProblem.labels.Length; i++)
            {
                if ((trainingProblem.labels[i] == GuiPreferences.Instance.CmbClass1Selected + 1) || (trainingProblem.labels[i] == GuiPreferences.Instance.CmbClass2Selected + 1))
                {
                    Array.Resize(ref _trainingProblem.labels, j + 1);
                    Array.Resize(ref _trainingProblem.samples, j + 1);

                    _trainingProblem.labels[j]  = trainingProblem.labels[i];
                    _trainingProblem.samples[j] = trainingProblem.samples[i];

                    j++;
                }
            }
            return(_trainingProblem);
        }
Exemple #10
0
        public static Instances WekaPipeline_Unprocessed(libSVM_ExtendedProblem _trialProblem)
        {
            //export to libsvm file
            if (_trialProblem.samples == null)
            {
                GuiPreferences.Instance.setLog("Export Failed: Problem has no samples!");
                return(null);
            }

            string trainFileName = GuiPreferences.Instance.WorkDirectory /*+ GuiPreferences.Instance.FileName*/ + "TrainSet";


            //todo add proper named to saved files, check if null is logical at all.
            if ((_trialProblem.samples != null))
            {
                _trialProblem.Save(trainFileName + ".libsvm");
                GuiPreferences.Instance.setLog("saved Original Problem LibSVM file: " + trainFileName + ".libsvm");
            }

            //separate DS to 3rd and 4th TR
            ////example: ExecuteSelectKthVectorScript(@"TrainSet", @"H:\My_Dropbox\VERE\MRI_data\Tirosh\20120508.Rapid+NullClass.day2\4\rtp\");
            KthExtractionManager.ExecuteSelectKthVectorScript(/*GuiPreferences.Instance.FileName +*/ "TrainSet", GuiPreferences.Instance.WorkDirectory);
            GuiPreferences.Instance.setLog("Created TR3 & TR4 files");

            //normalize 3rd and 4th TR files.
            NormalizationManager.ScaleTrFiles(GuiPreferences.Instance.WorkDirectory);
            GuiPreferences.Instance.setLog("Normalized TR3 & TR4 files");

            //convert tr4 and tr3 to arff + REMOVE 204801 FAKE FEATURE, THAT WAS PLACES TO MAKE SURE WE GET 204800 FEATURES IN THE ARFF FILE.
            if (WekaCommonFileOperation.ConvertLIBSVM2ARFF(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scaledCS.libsvm", 204800))
            {
                GuiPreferences.Instance.setLog("Converted to ARFF: TrainSet_3th_vectors_scaledCS.libsvm");
            }
            if (WekaCommonFileOperation.ConvertLIBSVM2ARFF(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors_scaledCS.libsvm", 204800))
            {
                GuiPreferences.Instance.setLog("Converted to ARFF: TrainSet_4th_vectors_scaledCS.libsvm");
            }

            //---------------------------------- filter tr3 based on top 1000 from tr4 (the trick) -----------------------------
            //load TR4
            ConverterUtils.DataSource source = new ConverterUtils.DataSource(GuiPreferences.Instance.WorkDirectory + "TrainSet_4th_vectors_scaledCS.libsvm.arff");
            Instances data = source.getDataSet();

            //assign last as index.
            if (data.classIndex() == -1)
            {
                data.setClassIndex(data.numAttributes() - 1);
            }

            //if class not nominal, convert to
            if (!data.classAttribute().isNominal())
            {
                var filter = new weka.filters.unsupervised.attribute.NumericToNominal();

                filter.setOptions(weka.core.Utils.splitOptions("-R last"));
                //filter.setAttributeIndices("last");
                filter.setInputFormat(data);
                data = Filter.useFilter(data, filter);
            }

            //run ig and get top 1000 or up to 1000 bigger than zero, from tr4
            WekaTrainingMethods.useLowLevelInformationGainFeatureSelection(data);

            TrainingTesting_SharedVariables._trainTopIGFeatures = Preferences.Instance.attsel.selectedAttributes();

            //this should be done ONCE
            Preferences.Instance.fastvector = RealTimeProcessing.CreateFastVector(TrainingTesting_SharedVariables._trainTopIGFeatures.Length);
            GuiPreferences.Instance.setLog("created fast vector of length " + TrainingTesting_SharedVariables._trainTopIGFeatures.Length.ToString());

            //serialize (save) topIG indices to file.
            XMLSerializer.serializeArrayToFile(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG_indices.xml", TrainingTesting_SharedVariables._trainTopIGFeatures);
            GuiPreferences.Instance.setLog("saved IG indices to a file (in the same order as IG gave it)");
            //int [] _trainTopIGFeatures_loaded = DeserializeArrayToFile(GuiPreferences.Instance.WorkDirectory + "TrainSet_3th_vectors_scaledCS_filteredIG_indices.xml");

            GuiPreferences.Instance.setLog(TrainingTesting_SharedVariables._trainTopIGFeatures.Length.ToString() + " features above zero value selected (including the Class feature)");

            //load tr3
            source = new ConverterUtils.DataSource(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS.libsvm.arff");
            data   = source.getDataSet();

            //filter top IG
            data = WekaTrainingMethods.useRemoveFilter(data, TrainingTesting_SharedVariables._trainTopIGFeatures, true);

            //after filtering last feature needs to be the class
            if (data.classIndex() == -1)
            {
                data.setClassIndex(data.numAttributes() - 1);
            }

            //save filtered to a file
            WekaCommonFileOperation.SaveLIBSVM(data, GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG");

            return(data);
        }
Exemple #11
0
 public JobsQueue(libSVM_ExtendedProblem problem)
 {
     _problem = problem;
     _problem.UpdateMaximumIndex();
     lastIndex = _problem.lastIndex;
 }
Exemple #12
0
 public void clearInput()
 {
     _iproblem = null;
 }
Exemple #13
0
 public void setProblem(libSVM_ExtendedProblem problem)
 {
     _iproblem  = problem;
     _lastIndex = _iproblem.GetLastIndex();
 }
Exemple #14
0
        /////////////////////////////////////////////////////////////////////////////////////////
        private void initVariables()
        {
            udp = new UDP();

            udpsim = new UDPSim();

            //holds min max values which prevents the min from being zeroed out after the threhold filter.
            MinMax    = new List <Dictionary <int, MinMax> >();
            BrainMean = new List <string>();                                                              //checking for global drift before and after IG filter
            TrainingEventStartLocationsPerTR = new Dictionary <int, Dictionary <string, List <int> > >(); //run, tr, list of indices
            validMaxBrainAverage             = 1000;
            corruptedVector = false;
            PipeServerName  = "OBL"; //in here the client uses OBL pipe to communicate to the server, if a communication is needed from server to client, i believe we should use another pipe - check this.
            PipeClientName  = "Unity";

            pipeServer = new NamedPipeServer(PipeServerName);
            pipeClient = new NamedPipeClient(PipeServerName);

            cumulativeTR        = 0;
            nextEvent           = 0;
            nextTRinEvent       = 0;
            currentRunningEvent = "";

            // DirMonitor variables
            //===========================
            masterPath        = "/My_Dropbox/VERE/MRI_data/Nir/";
            pathName          = masterPath + "NIR_run1_rtp/";
            dicomMasterPath   = masterPath + "NIR_run1_dcm_master/";
            dicomTbvWatchPath = masterPath + "NIR_run1_dcm_watch/";

            //configuration - 1b - protocol
            events         = new Events(new List <IntIntStr>());
            protocolLoaded = false;

            //configuration - 3 - import, radio buttons binary file types
            svmLoaded       = false;
            ProblemOriginal = new libSVM_ExtendedProblem();
            svmWrapper      = new libSVM_Extension();

            deleteFiles = new string[]
            {
                "TrainSet.libsvm",
                "TrainSet_3th_vectors.libsvm",
                "TrainSet_3th_vectors_scale_paramCS.libsvm",
                "TrainSet_3th_vectors_scaledCS.libsvm",
                "TrainSet_3th_vectors_scaledCS.libsvm.arff",
                "TrainSet_3th_vectors_scaledCS_filteredIG.arff",
                "TrainSet_3th_vectors_scaledCS_filteredIG.model",
                "TrainSet_3th_vectors_scaledCS_filteredIG_indices.xml",
                "TrainSet_4th_vectors.libsvm",
                "TrainSet_4th_vectors_scale_paramCS.libsvm",
                "TrainSet_4th_vectors_scaledCS.libsvm",
                "TrainSet_4th_vectors_scaledCS.libsvm.arff",
                "TrainSet_5th_vectors.libsvm",
                "TrainSet_5th_vectors_scale_paramCS.libsvm",
                "TrainSet_5th_vectors_scaledCS.libsvm",
                "TrainSet_5th_vectors_scaledCS.libsvm.arff",
                "TrainSet_6th_vectors.libsvm",
                "TrainSet_6th_vectors_scale_paramCS.libsvm",
                "TrainSet_6th_vectors_scaledCS.libsvm",
                "TrainSet_6th_vectors_scaledCS.libsvm.arff"
            };

            //memory capcity test, each array up to 2gb!!

            /*try
             * {
             *  int[] ar = new int[1];
             *  int[] ar2 = new int[1];
             *  for (int i = 1; i < 100000; i++)
             *  {
             *      Array.Resize(ref ar, ar.Length + 100000000);
             *      Array.Resize(ref ar2, ar.Length + 100000000);
             *      long l = PublicMethods.getRam();
             *  }
             * }
             * catch (Exception e)
             * {
             *  long l = PublicMethods.getRam();
             *  string s = e.ToString();
             * }*/


            classification = new List <string[]>();

            //7-plots
            try
            {
                //caused problems in mri's pc
                //glm = new MatlabGLM();
            }
            catch (Exception e)
            {
                MessageBox.Show(
                    "Warning: Matlab Initialization failed, please install matlab 2011a 64bit! (it is SAFE to CONTINUE!)" + e.ToString(),
                    "Warning", MessageBoxButtons.OK, MessageBoxIcon.Information,
                    MessageBoxDefaultButton.Button2);
            }
        }
Exemple #15
0
        /// <summary>
        /// when train button pushed, we add jobs, pre, process, post them.
        /// </summary>
        public static void Train()
        {
            GC.Collect();
            if (TrainingTesting_SharedVariables._trialProblem != null)
            {
                if (TrainingTesting_SharedVariables._trialProblem.samples != null)
                {
                    //temprarily the weka pipeline is the first thing we want to do, as it doesnt need all the C# wrapper functions and configuration
                    if (GuiPreferences.Instance.TrainType == TrainingType.Weka)
                    {
                        if (GuiPreferences.Instance.CbSVMChecked)
                        {
                            GuiPreferences.Instance.setLog("SVM C# Wrapper Training..");
                            SVMWrapperTrainingPipeline();
                        }
                        else if (GuiPreferences.Instance.CbSMOChecked)
                        {
                            GuiPreferences.Instance.setLog("SMO Training..");
                            WekaTrainingPipeline(TrainingTesting_SharedVariables._trialWekaData);
                        }

                        return;
                    }

                    GuiPreferences.Instance.setLog("TODO: if any models were loaded we ignore then as they would be replaced by training again.\n" +
                                                   "actually this should only be for rfe/grid/90-10, as cfv doesnt provide a model.");
                    Preferences.Instance.modelLoaded = false;

                    TrainingTesting_SharedVariables.training = new LibSVM_TrainTest();
                    TrainingTesting_SharedVariables.training.setSvmType();

                    GuiPreferences.Instance.setLog("NOTE: we shouldnt balance labels count. it should be balanced in the protocol");

                    //if not multiclass, keep only the two binary classes
                    if (!GuiPreferences.Instance.CbMultiClassChecked)
                    {
                        TrainingTesting_SharedVariables._trainingProblem = Training_MultiRunProcessing.getBinaryClases(TrainingTesting_SharedVariables._trialProblem);
                        GuiPreferences.Instance.setLog("Using only Binary Classes");
                    }
                    else
                    {
                        TrainingTesting_SharedVariables._trainingProblem = TrainingTesting_SharedVariables._trialProblem;
                    }

                    // show some statistics regarding the labels
                    Training_MultiRunProcessing.labelStats(TrainingTesting_SharedVariables._trainingProblem);

                    if (GuiPreferences.Instance.TrainType == TrainingType.TrainTestSplit)
                    {
                        GuiPreferences.Instance.setLog("Starting Training, splitting folds 90%/10%.");
                        libSVM_ExtendedProblem train_problem = new libSVM_ExtendedProblem();
                        libSVM_ExtendedProblem test_problem  = new libSVM_ExtendedProblem();
                        int[] test_indices = ChronologicalSplitTrainTest(TrainingTesting_SharedVariables._trainingProblem, ref train_problem, ref test_problem);
                        TrainingTesting_SharedVariables.training.TrainTestSplit(train_problem, test_problem, test_indices);
                        GuiPreferences.Instance.setLog("Finished Training.");
                    }
                    else if (GuiPreferences.Instance.TrainType == TrainingType.CrossValidation)
                    {
                        GuiPreferences.Instance.setLog("Starting Cross Validation, using " + GuiPreferences.Instance.NudCVFolds.ToString() + " folds.");
                        TrainingTesting_SharedVariables.training.TrainFolds((int)GuiPreferences.Instance.NudCVFolds, TrainingTesting_SharedVariables._trainingProblem, LibSVM_TrainTest.trainingType.cfv);
                        GuiPreferences.Instance.setLog("Finished Cross Validation.");
                    }
                    else if (GuiPreferences.Instance.TrainType == TrainingType.GridSearch)
                    {
                        GuiPreferences.Instance.setLog("Starting Grid Search, using " + GuiPreferences.Instance.NudGridFolds.ToString() + " folds.");
                        TrainingTesting_SharedVariables.training.TrainFolds((int)GuiPreferences.Instance.NudGridFolds, TrainingTesting_SharedVariables._trainingProblem, LibSVM_TrainTest.trainingType.bestTotal);
                        GuiPreferences.Instance.setLog("Finished Grid Search.");
                    }
                    else if (GuiPreferences.Instance.TrainType == TrainingType.RFE)
                    {
                        GuiPreferences.Instance.setLog("Not Implemented yet.");
                    }

                    ///copy model into main model variable
                    if (GuiPreferences.Instance.TrainType != TrainingType.CrossValidation)
                    {
                        //training.
                        Preferences.Instance.svmModel = (libSVM)Preferences.Instance.svmWrapper;
                        GuiPreferences.Instance.setLog("Model saved into memory");
                    }
                }
                else
                {
                    GuiPreferences.Instance.setLog("Training Failed: Original(unprocessed) or Final Problem(processed) Samples empty!");
                }
            }
            else
            {
                GuiPreferences.Instance.setLog("Training Failed: Original(unprocessed) or Final Problem(processed) null!");
            }
        }
Exemple #16
0
 public void clearOutput()
 {
     _oproblem = null;
 }
Exemple #17
0
        public void ProcessForSingleTest()
        {
            if (Preferences.Instance.currentUDPVector < Preferences.Instance.KInPreviousVectors[currentLocationInFlow])
            {
                return;
            }
            try
            {
                //TODO: init only once, later processing add + 1 array resize
                Array.Resize(ref _oproblem.labels, _oproblem.labels.Count() + 1);
                Array.Resize(ref _oproblem.samples, _oproblem.samples.Count() + 1);

                //temp that holds the last K.
                //TOOD: its possible that we dont need to reassign a new K every processing but its safe to clean it.
                libSVM_ExtendedProblem _iproblemKvec = new libSVM_ExtendedProblem();
                _iproblemKvec.labels  = new double[K];
                _iproblemKvec.samples = new SortedDictionary <int, double> [K];

                int[] _indices = getIndices();

                //get inverse model once - NOT USED IN THIS CODE

                /*MWArray[] invModel = new MWArray[1];
                 * invModel = Preferences.Instance.glm.getInverseModel(1, (MWArray)K);
                 * MWNumericArray invmodel = (MWNumericArray)invModel[0];
                 * double[,] invModelDoubleArray = (double[,])invmodel.ToArray();*/


                //copy from iproblem to iproblemKvec


                int l = 0;
                for (int i = Preferences.Instance.currentUDPVector - K; i < Preferences.Instance.currentUDPVector; i++)
                {
                    _iproblemKvec.samples[l] = _iproblem.samples[i];
                    _iproblemKvec.labels[l]  = _iproblem.labels[i];
                    l++;
                }


                //copy each dictionary to the matrix that is going inside the beta calculation function
                float[,] matrix = new float[K, Preferences.Instance.dim_x *Preferences.Instance.dim_y *Preferences.Instance.dim_z];

                int mx = 0;
                foreach (SortedDictionary <int, double> d in _iproblemKvec.samples)
                {
                    foreach (int key in d.Keys)
                    {
                        matrix[mx, key - 1] = (float)d[key];
                    }
                    mx++;
                }

                //beta function is calculating for all the samples matrix
                MWArray[] Betas = new MWArray[1];

                if (Preferences.Instance.currentUDPVector > 165)
                {
                    int a;
                    /// indices in beta error will occure now.
                }

                //calculate betas
                try
                {
                    Betas = Preferences.Instance.glm.Beta_calculation_indices(1, (MWNumericArray)_indices, (MWNumericArray)matrix, (MWNumericArray)K, (MWNumericArray)Preferences.Instance.glm.getInverseModel(1, (MWArray)K)[0]);
                }
                catch (Exception e)
                {
                    GuiPreferences.Instance.setLog(e.ToString());
                }

                matrix = null;

                MWNumericArray betas = (MWNumericArray)Betas[0];

                Betas = null;

                double[,] betasArray = (double[, ])betas.ToArray();

                betas = null;

                //stitch back betas and labels
                for (int i = 0; i < _iproblemKvec.samples.Length - K + 1; i++)
                {
                    double[] vec = new double[_indices.Length];
                    int      key = 0;
                    for (int j = 0; j < _indices.Length; j++)
                    {
                        vec[key] = betasArray[i, j];
                        key++;
                    }
                    //_oproblem.samples[i].Keys = (SortedDictionary<int,double>.KeyCollection)_indices;

                    //convert to dictionary: zip array '0' indices and new beta values. and create a sorted dictionary
                    //SortedDictionary<int, double> dic = new SortedDictionary<int, double>(_indices.Zip(vec, (k, v) => new { k, v }).ToDictionary(x => x.k, x => x.v));
                    SortedDictionary <int, double> dic = new SortedDictionary <int, double>(_indices.Zip(vec, (k, v) => new { k, v }).ToDictionary(x => x.k, x => x.v));

                    //#endif

                    _oproblem.samples[Preferences.Instance.currentUDPVector - K] = dic;
                    double _label    = _iproblem.labels[Preferences.Instance.currentUDPVector - 1];
                    int    iBackward = Preferences.Instance.currentUDPVector - 2;
                    while (_label == 1 && iBackward >= 0)
                    {
                        iBackward--;
                        if (iBackward >= 0)
                        {
                            _label = _iproblem.labels[iBackward];
                        }
                    }
                    if (_label == 1)
                    {
                        ///TODO: what do we do when the label is 1 and there is no more to go back [1,1,1,1,1,1,1,1,1,1,1,2]
                        ///should we remove this beta from the problem ?
                        ///anyway prediction results should not be sent to unity if label is 1. this is done outside of this function
                    }
                    _oproblem.labels[Preferences.Instance.currentUDPVector - K] = _label; //!!! if baseline take the event that happened before
                }
                _indices   = null;
                betasArray = null;
                _oproblem.UpdateMaximumIndex();
            }
            catch (Exception e)
            {
                GuiPreferences.Instance.setLog(e.ToString());
            }
        }