Пример #1
0
        public static List <DataReading> FaceDataSlice(List <DataReading> data, SAMDataPoint sam)
        {
            var d = data.SkipWhile(x => x.timestamp < sam.timeOffset + FACE_LATENCY).TakeWhile(x => x.timestamp < sam.timeOffset + FACE_LATENCY + FACE_DURATION).ToList();

            if (d.Count == 0)
            {
                int j = 0;
            }
            return(d);
        }
Пример #2
0
        public List<PredictionResult> DoStacking(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            List<PredictionResult> classifiers = new List<PredictionResult>();
            //For each classifier run a crossvalidation and find the best params
            foreach (StdClassifier classifier in standardClassifiers)
            {
                List<PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat);
                classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First());
            }

            List<List<double>> featureList = new List<List<double>>();
            //Create a List of list of answers from each machine
            for (int i = 0; i < samData.dataPoints.Count; i++)
            {
                List<double> featuresToDataPoint = new List<double>();
                foreach (PredictionResult classifier in classifiers)
                {
                    featuresToDataPoint.Add(classifier.guesses[i]);
                }
                featureList.Add(featuresToDataPoint);
            }
            //Split into nfold problems
            List<Tuple<SVMProblem, SVMProblem>> problems = featureList.GetCrossValidationSets<double>(samData, feelingsmodel, nFold, useIAPSratings);

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();

            List<PredictionResult> finalResults = new List<PredictionResult>();

            //Run for each parameter setting
            int cnt = 1;
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(cnt++, Parameters.Count);
                }
                List<double> guesses = new List<double>();
                //model and predict each nfold
                foreach (Tuple<SVMProblem, SVMProblem> tupleProblem in problems)
                {
                    guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, new List<Feature> { }, answers.ToList(), guesses.ConvertAll(x => (int)x));
                finalResults.Add(pR);
            }
            return finalResults;
        }
Пример #3
0
        public double GetValue(SAMDataPoint sam)
        {
            if (dataReadings == null)
            {
                throw new Exception("ERROR: Du skal fandme lige bruge Feature.SetData() først!!");
            }

            if (!cachedResults.ContainsKey(sam))
            {
                cachedResults.Add(sam, featureCalculator(dataReadings, sam));
            }
            return(cachedResults[sam]);
        }
Пример #4
0
        public double GetValue(SAMDataPoint sam)
        {
            if (dataReadings == null)
            {
                throw new Exception("ERROR: Du skal fandme lige bruge Feature.SetData() først!!");
            }

            if (!cachedResults.ContainsKey(sam))
            {
                    cachedResults.Add(sam, featureCalculator(dataReadings, sam));

            }
                return cachedResults[sam];
        }
Пример #5
0
        public static int GetNumberOfLabels(SAMDataPoint.FeelingModel feelingsModel)
        {
            switch (feelingsModel)
            {
                case SAMDataPoint.FeelingModel.Arousal2High:
                case SAMDataPoint.FeelingModel.Arousal2Low:
                case SAMDataPoint.FeelingModel.Valence2High:
                case SAMDataPoint.FeelingModel.Valence2Low:
                    return 2;

                case SAMDataPoint.FeelingModel.Valence3:
                case SAMDataPoint.FeelingModel.Arousal3:
                    //case SAMDataPoint.FeelingModel.ValenceArousal3:
                    return 3;

                //case SAMDataPoint.FeelingModel.Valence9:
                //case SAMDataPoint.FeelingModel.Arousal9:
                //    return 9;

                default:
                    return 0;
            }
        }
Пример #6
0
        public PredictionResult DoBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            if (boostingOrder.Count != standardClassifiers.Count)
            {
                //if boosting order and standardClassifier is not the same size an out of bounds is invetatible
                Log.LogMessage("The Boosting order list is not the same as the number of classifiers, I'm giving you a null");
                return null;
            }

            PredictionResult prevResult = null;
            for (int i = 0; i < boostingOrder.Count; i++)
            {
                if (i == 0)
                {
                    prevResult = FindBestFScorePrediction(standardClassifiers[boostingOrder[i]].CrossValidate(feelingsmodel, useIAPSratings, normalizeFormat));
                }
                else
                {
                    prevResult = FindBestFScorePrediction(standardClassifiers[boostingOrder[i]].CrossValidateWithBoosting(feelingsmodel, nFold, prevResult.guesses.ConvertAll(x => (double)x).ToArray(), useIAPSratings, normalizeFormat));
                }
            }
            return prevResult;
        }
Пример #7
0
        /// <summary>
        /// Run crossvalidation for the feature setup for this machine
        /// </summary>
        /// <param name="feelingsmodel"></param>
        /// <param name="nFold"></param>
        /// <param name="useIAPSratings"></param>
        public List<PredictionResult> OldCrossValidate(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List<PredictionResult> predictedResults = new List<PredictionResult>();
            //Split into crossvalidation parts
            List<Tuple<SVMProblem, SVMProblem>> problems = GetFeatureValues(features, samData).NormalizeFeatureList<double>(normalizationType).GetCrossValidationSets<double>(samData, feelingsmodel, nFold, useIAPSratings);
            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int progressCounter = 0;
            bool postedOneClassError = false;
            if(answers.Distinct().Count() <= 1)
            {
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x=>(double)x).ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                Log.LogMessage(ONLY_ONE_CLASS);
                Log.LogMessage("");
                return predictedResults;
            }
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
                List<double> guesses = new List<double>();
                //model and predict each nfold
                try
                {
                    foreach (Tuple<SVMProblem, SVMProblem> tupleProblem in problems)
                    {
                        SVMModel trainingModel = tupleProblem.Item1.Train(SVMpara);
                        if (trainingModel.ClassCount <= 1)
                        {
                            if (!postedOneClassError)
                            {
                                Log.LogMessage(ONLY_ONE_CLASS_IN_TRAINING);
                                postedOneClassError = true;
                            }
                            guesses.AddRange(tupleProblem.Item1.Y.ToList().Take(tupleProblem.Item2.Y.Count()).ToList());
                        }
                        else
                        {
                            double[] d = tupleProblem.Item2.Predict(trainingModel);
                            guesses.AddRange(d);
                        }
                    }
                }
                catch(Exception e)
                {
                    for (int i = 0; i < samData.dataPoints.Count; i++)
                    {
                        guesses.Add(-1);
                    }
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
            }

            return predictedResults;
        }
Пример #8
0
 public void AddDataToPerson(string name, Book book, PredictionResult predictResult, SAMDataPoint.FeelingModel model)
 {
     Log.LogMessage("Writing results from " + name + " to excel files");
     foreach (Excel.Worksheet ws in books[book].Sheets)
     {
         if (ws.Name == name)
         {
             WriteResult(ws, predictResult, model);
         }
     }
 }
Пример #9
0
        public PredictionResult DoVoting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            List<PredictionResult> classifiers = new List<PredictionResult>();
            //For each classifier run a crossvalidation and find the best params
            int prg = 0;
            foreach (StdClassifier classifier in standardClassifiers)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(prg++, standardClassifiers.Count);
                }
                List<PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat);
                classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First());
            }
            if (UpdateCallback != null)
            {
                UpdateCallback(standardClassifiers.Count, standardClassifiers.Count);
            }
            int labelCount = SAMData.GetNumberOfLabels(feelingsmodel);

            //Full List of indicies
            List<int> counter = new List<int>();
            for (int k = 0; k < samData.dataPoints.Count(); k++)
            {
                counter.Add(k);
            }
            //Divide indicies into correct nfold
            List<List<int>> trainIndicies = new List<List<int>>();
            List<List<int>> predictIndicies = new List<List<int>>();
            for (int i = 0; i < samData.dataPoints.Count(); i += nFold)
            {
                var temp = counter.Skip(i).Take(nFold).ToList();
                predictIndicies.Add(temp);
                trainIndicies.Add(counter.Except(temp).ToList());
            }

            List<Dictionary<int, double>> weightedGuesses = new List<Dictionary<int, double>>();
            //Fill up weightedGuesses List
            for (int nGuesses = 0; nGuesses < samData.dataPoints.Count; nGuesses++)
            {
                Dictionary<int, double> tempGuess = new Dictionary<int, double>();
                for (int indexClass = 0; indexClass < labelCount; indexClass++)
                {
                    tempGuess.Add(indexClass, 0);
                }
                weightedGuesses.Add(tempGuess);
            }

            //Split classifiers
            for (int i = 0; i < trainIndicies.Count; i++)
            {
                foreach (PredictionResult predictResult in classifiers)
                {
                    double correct = 0;
                    //calculate weights
                    for (int trainingIndex = 0; trainingIndex < trainIndicies[i].Count; trainingIndex++)
                    {
                        if (predictResult.guesses[trainIndicies[i][trainingIndex]] == samData.dataPoints[trainIndicies[i][trainingIndex]].ToAVCoordinate(feelingsmodel))
                        {
                            correct++;
                        }
                    }

                    //Add weight from the trainingset to each of the guesses
                    weightedGuesses[i][predictResult.guesses[i]] += (correct / trainIndicies.Count);
                }
            }

            //Calculate final answers
            List<double> guesses = new List<double>();

            foreach (Dictionary<int, double> answer in weightedGuesses)
            {
                int tempKey = -1;
                double tempMax = -1;
                foreach (int key in answer.Keys)
                {
                    if (answer[key] > tempMax)
                    {
                        tempKey = key;
                        tempMax = answer[key];
                    }
                }
                guesses.Add(tempKey);
            }

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);

            //Calculate scoring results
            double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
            List<double> pres = CalculatePrecision(confus, numberOfLabels);
            List<double> recall = CalculateRecall(confus, numberOfLabels);
            List<double> fscore = CalculateFScore(pres, recall);
            return new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), new List<Feature> { }, answers.ToList(), guesses.ConvertAll(x => (int)x));
        }
Пример #10
0
        public static List<Feature> GetFeatures(string machine, SAMDataPoint.FeelingModel feel)
        {
            bool valence = ((int)feel) < 3;

            if (valence)
            {
                if (machine == "HR") return HRValenceOptimizationFeatures;
                if (machine == "EEG") return EEGValenceOptimizationFeatures;
                if (machine == "FACE") return FACEValenceOptimizationFeatures;
            }
            else
            {
                if (machine == "GSR") return GSRArousalOptimizationFeatures;
                if (machine == "HR") return HRArousalOptimizationFeatures;
                if (machine == "EEG") return EEGArousalOptimizationFeatures;
                if (machine == "FACE") return FACEArousalOptimizationFeatures;
            }

            return null;
        }
Пример #11
0
 public static List<DataReading> EEGDataSlice(List<DataReading> data, SAMDataPoint sam)
 {
     return data.SkipWhile(x => x.timestamp < sam.timeOffset + EEG_LATENCY).TakeWhile(x => x.timestamp < sam.timeOffset + EEG_LATENCY + EEG_DURATION).ToList();
 }
Пример #12
0
        public List<PredictionResult> CrossValidate(SAMDataPoint.FeelingModel feelingsmodel, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List<PredictionResult> predictedResults = new List<PredictionResult>();
            //Split into crossvalidation parts
            SVMProblem problems = GetFeatureValues(features, samData).NormalizeFeatureList<double>(normalizationType).CreateCompleteProblem(samData, feelingsmodel);

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int progressCounter = 0;
            if (answers.Distinct().Count() <= 1)
            {
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                Log.LogMessage(ONLY_ONE_CLASS);
                Log.LogMessage("");
                return predictedResults;
            }
            else if(problems.X.Count == 0)
            {
                Log.LogMessage("Empty problem in "+ Name);
                return null;
            }
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
                double[] guesses = new double[samData.dataPoints.Count];
                //model and predict each nfold
                try
                {
                    problems.CrossValidation(SVMpara, samData.dataPoints.Count, out guesses);
                }
                catch (Exception e)
                {
                    for (int i = 0; i < samData.dataPoints.Count; i++)
                    {
                        guesses[i] = -1;
                    }
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), Array.ConvertAll(guesses, (x=> (int)x)).ToList());
                predictedResults.Add(pR);
                progressCounter++;
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
            }

            return predictedResults;
        }
Пример #13
0
 public static List<DataReading> FaceDataSlice(List<DataReading> data, SAMDataPoint sam)
 {
     var d = data.SkipWhile(x => x.timestamp < sam.timeOffset + FACE_LATENCY).TakeWhile(x => x.timestamp < sam.timeOffset + FACE_LATENCY + FACE_DURATION).ToList();
     if (d.Count == 0)
     {
         int j = 0;
     }
     return d;
 }
Пример #14
0
        private void WriteResult(Excel.Worksheet workSheet, PredictionResult pResult, SAMDataPoint.FeelingModel feelingModel)
        {
            int counter = 0;
            switch (feelingModel)
            {
                case SAMDataPoint.FeelingModel.Arousal2High:
                    counter = A2HighStart;
                    break;
                case SAMDataPoint.FeelingModel.Arousal2Low:
                    counter = A2LowStart;
                    break;
                case SAMDataPoint.FeelingModel.Arousal3:
                    counter = A3Start;
                    break;
                case SAMDataPoint.FeelingModel.Valence2High:
                    counter = V2HighStart;
                    break;
                case SAMDataPoint.FeelingModel.Valence2Low:
                    counter = V2LowStart;
                    break;
                case SAMDataPoint.FeelingModel.Valence3:
                    counter = V3Start;
                    break;
            }

            workSheet.Cells[counter, 3] = pResult.GetAccuracy();

            counter++;
            workSheet.Cells[counter, 3] = pResult.GetAverageFScore();

            for (int f = 0; f < pResult.fscores.Count; f++)
            {
                counter++;
                if (double.IsNaN(pResult.fscores[f]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.fscores[f];
                }
            }

            for (int p = 0; p < pResult.precisions.Count; p++)
            {
                counter++;
                if (double.IsNaN(pResult.precisions[p]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.precisions[p];
                }
            }

            for (int r = 0; r < pResult.recalls.Count; r++)
            {
                counter++;
                if (double.IsNaN(pResult.recalls[r]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.recalls[r];
                }
            }
            counter++;
            for (int i = 3; i < pResult.features.Count + 3; i++)
            {
                workSheet.Cells[counter, i] = pResult.features[i - 3].name;
            }

            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.C;

            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.Gamma;

            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.Kernel;
        }
Пример #15
0
        public List<PredictionResult> CrossValidateWithBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, double[] answersFromPrevious, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List<PredictionResult> predictedResults = new List<PredictionResult>();

            List<List<double>> tempFeatuers = GetFeatureValues(features, samData);
            if (answersFromPrevious.Length != tempFeatuers.Count)
            {
                //answers from previous is not the same size as current feature list, e.g. something is wrong
                Log.LogMessage("The number of guessses from previous machine is the same as number of datapoints in this");
                return null;
            }
            //Split into crossvalidation parts
            List<List<double>> tempFeatures = tempFeatuers.NormalizeFeatureList<double>(normalizationType).ToList();
            for (int i = 0; i < tempFeatuers.Count; i++)
            {
                tempFeatuers[i].Add(answersFromPrevious[i]);
            }
            List<Tuple<SVMProblem, SVMProblem>> problems = tempFeatuers.GetCrossValidationSets<double>(samData, feelingsmodel, nFold, useIAPSratings);

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();

            foreach (SVMParameter SVMpara in Parameters)
            {
                List<double> guesses = new List<double>();
                //model and predict each nfold
                foreach (Tuple<SVMProblem, SVMProblem> tupleProblem in problems)
                {
                    guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List<double> pres = CalculatePrecision(confus, numberOfLabels);
                List<double> recall = CalculateRecall(confus, numberOfLabels);
                List<double> fscore = CalculateFScore(pres, recall);
                PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x).ToList());
                predictedResults.Add(pR);
            }
            return predictedResults;
        }
Пример #16
0
        /// <summary>
        /// Run crossvalidation for each combination of the features for this machine
        /// </summary>
        /// <param name="feelingsmodel"></param>
        /// <param name="nFold"></param>
        /// <param name="useIAPSratings"></param>
        public List<PredictionResult> CrossValidateCombinations(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List<List<bool>> combinations = CalculateCombinations(new List<bool>() { }, features.Count);

            //Get different combination of problems
            List<Tuple<List<Tuple<SVMProblem, SVMProblem>>, List<Feature>>> featureCombinationProblems = new List<Tuple<List<Tuple<SVMProblem, SVMProblem>>, List<Feature>>>();

            for (int i = 0; i < combinations.Count; i++)
            {
                List<Feature> tempFeatures = new List<Feature>();
                for (int j = 0; j < combinations[i].Count; j++)
                {
                    // For each feature combination save the different problems for crossvalidation

                    if (combinations[i][j] == true)
                    {
                        tempFeatures.Add(features[j]);
                    }

                }
                featureCombinationProblems.Add
                                        (
                                            new Tuple<List<Tuple<SVMProblem, SVMProblem>>, List<Feature>>
                                            (
                                                GetFeatureValues(tempFeatures, samData).NormalizeFeatureList<double>(normalizationType).GetCrossValidationSets<double>(samData, feelingsmodel, nFold, useIAPSratings),
                                                tempFeatures
                                            )
                                         );
            }

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int progressCounter = 0;
            List<PredictionResult> predictionResults = new List<PredictionResult>();
            foreach (SVMParameter SVMpara in Parameters)
            {
                //For each feature setup
                for (int n = 0; n < featureCombinationProblems.Count; n++)
                {
                    if (UpdateCallback != null)
                    {
                        UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count);
                    }
                    //PrintProgress(progressCounter, featureCombinationProblems.Count);
                    List<double> guesses = new List<double>();
                    //model and predict each nfold
                    foreach (var tupleProblem in featureCombinationProblems[n].Item1)
                    {
                        guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                    }
                    int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                    //Calculate scoring results
                    double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                    List<double> pres = CalculatePrecision(confus, numberOfLabels);
                    List<double> recall = CalculateRecall(confus, numberOfLabels);
                    List<double> fscore = CalculateFScore(pres, recall);
                    PredictionResult pR = new PredictionResult(confus, recall, pres, fscore, SVMpara, featureCombinationProblems[n].Item2, answers.ToList(), guesses.ConvertAll(x => (int)x));
                    predictionResults.Add(pR);
                    progressCounter++;
                }

            }
            if (UpdateCallback != null)
            {
                UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count);
            }

            return predictionResults;
        }
Пример #17
0
 Thread CreateMachineThread(string name, List<SVMParameter> pars, List<Feature> feats, SAMDataPoint.FeelingModel feels, Action<int, int> UpdateCallback, bool useControlSAM)
 {
     return new Thread(() =>
     {
         StdClassifier mac = new StdClassifier(name, pars, feats, samData);
         mac.UpdateCallback = UpdateCallback;
         var res = mac.OldCrossValidate(feels, 1, useControlSAM);
         SaveBestResult(res, mac.Name + "_" + feels);
     });
 }
Пример #18
0
 public static List <DataReading> HRDataSlice(List <DataReading> data, SAMDataPoint sam)
 {
     return(data.SkipWhile(x => x.timestamp < sam.timeOffset + HR_LATENCY).TakeWhile(x => x.timestamp < sam.timeOffset + HR_LATENCY + HR_DURATION).ToList());
 }