예제 #1
0
        public List <PredictionResult> DoStacking(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            List <PredictionResult> classifiers = new List <PredictionResult>();

            //For each classifier run a crossvalidation and find the best params
            foreach (StdClassifier classifier in standardClassifiers)
            {
                List <PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat);
                classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First());
            }


            List <List <double> > featureList = new List <List <double> >();

            //Create a List of list of answers from each machine
            for (int i = 0; i < samData.dataPoints.Count; i++)
            {
                List <double> featuresToDataPoint = new List <double>();
                foreach (PredictionResult classifier in classifiers)
                {
                    featuresToDataPoint.Add(classifier.guesses[i]);
                }
                featureList.Add(featuresToDataPoint);
            }
            //Split into nfold problems
            List <Tuple <SVMProblem, SVMProblem> > problems = featureList.GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings);

            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();

            List <PredictionResult> finalResults = new List <PredictionResult>();

            //Run for each parameter setting
            int cnt = 1;

            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(cnt++, Parameters.Count);
                }
                List <double> guesses = new List <double>();
                //model and predict each nfold
                foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems)
                {
                    guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, new List <Feature> {
                }, answers.ToList(), guesses.ConvertAll(x => (int)x));
                finalResults.Add(pR);
            }
            return(finalResults);
        }
예제 #2
0
        public List <PredictionResult> CrossValidateWithBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, double[] answersFromPrevious, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <PredictionResult> predictedResults = new List <PredictionResult>();

            List <List <double> > tempFeatuers = GetFeatureValues(features, samData);

            if (answersFromPrevious.Length != tempFeatuers.Count)
            {
                //answers from previous is not the same size as current feature list, e.g. something is wrong
                Log.LogMessage("The number of guessses from previous machine is the same as number of datapoints in this");
                return(null);
            }
            //Split into crossvalidation parts
            List <List <double> > tempFeatures = tempFeatuers.NormalizeFeatureList <double>(normalizationType).ToList();

            for (int i = 0; i < tempFeatuers.Count; i++)
            {
                tempFeatuers[i].Add(answersFromPrevious[i]);
            }
            List <Tuple <SVMProblem, SVMProblem> > problems = tempFeatuers.GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings);


            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();

            foreach (SVMParameter SVMpara in Parameters)
            {
                List <double> guesses = new List <double>();
                //model and predict each nfold
                foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems)
                {
                    guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x).ToList());
                predictedResults.Add(pR);
            }
            return(predictedResults);
        }
예제 #3
0
        public static List <Feature> GetFeatures(string machine, SAMDataPoint.FeelingModel feel)
        {
            bool valence = ((int)feel) < 3;

            if (valence)
            {
                if (machine == "HR")
                {
                    return(HRValenceOptimizationFeatures);
                }
                if (machine == "EEG")
                {
                    return(EEGValenceOptimizationFeatures);
                }
                if (machine == "FACE")
                {
                    return(FACEValenceOptimizationFeatures);
                }
            }
            else
            {
                if (machine == "GSR")
                {
                    return(GSRArousalOptimizationFeatures);
                }
                if (machine == "HR")
                {
                    return(HRArousalOptimizationFeatures);
                }
                if (machine == "EEG")
                {
                    return(EEGArousalOptimizationFeatures);
                }
                if (machine == "FACE")
                {
                    return(FACEArousalOptimizationFeatures);
                }
            }

            return(null);
        }
예제 #4
0
        public static int GetNumberOfLabels(SAMDataPoint.FeelingModel feelingsModel)
        {
            switch (feelingsModel)
            {
            case SAMDataPoint.FeelingModel.Arousal2High:
            case SAMDataPoint.FeelingModel.Arousal2Low:
            case SAMDataPoint.FeelingModel.Valence2High:
            case SAMDataPoint.FeelingModel.Valence2Low:
                return(2);

            case SAMDataPoint.FeelingModel.Valence3:
            case SAMDataPoint.FeelingModel.Arousal3:
                //case SAMDataPoint.FeelingModel.ValenceArousal3:
                return(3);

            //case SAMDataPoint.FeelingModel.Valence9:
            //case SAMDataPoint.FeelingModel.Arousal9:
            //    return 9;

            default:
                return(0);
            }
        }
예제 #5
0
        public PredictionResult DoBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            if (boostingOrder.Count != standardClassifiers.Count)
            {
                //if boosting order and standardClassifier is not the same size an out of bounds is invetatible
                Log.LogMessage("The Boosting order list is not the same as the number of classifiers, I'm giving you a null");
                return(null);
            }

            PredictionResult prevResult = null;

            for (int i = 0; i < boostingOrder.Count; i++)
            {
                if (i == 0)
                {
                    prevResult = FindBestFScorePrediction(standardClassifiers[boostingOrder[i]].CrossValidate(feelingsmodel, useIAPSratings, normalizeFormat));
                }
                else
                {
                    prevResult = FindBestFScorePrediction(standardClassifiers[boostingOrder[i]].CrossValidateWithBoosting(feelingsmodel, nFold, prevResult.guesses.ConvertAll(x => (double)x).ToArray(), useIAPSratings, normalizeFormat));
                }
            }
            return(prevResult);
        }
예제 #6
0
        public PredictionResult DoVoting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizeFormat = Normalize.OneMinusOne)
        {
            List <PredictionResult> classifiers = new List <PredictionResult>();
            //For each classifier run a crossvalidation and find the best params
            int prg = 0;

            foreach (StdClassifier classifier in standardClassifiers)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(prg++, standardClassifiers.Count);
                }
                List <PredictionResult> results = classifier.OldCrossValidate(feelingsmodel, 1, useIAPSratings, normalizeFormat);
                classifiers.Add(results.OrderBy(x => x.GetAverageFScore()).First());
            }
            if (UpdateCallback != null)
            {
                UpdateCallback(standardClassifiers.Count, standardClassifiers.Count);
            }
            int labelCount = SAMData.GetNumberOfLabels(feelingsmodel);

            //Full List of indicies
            List <int> counter = new List <int>();

            for (int k = 0; k < samData.dataPoints.Count(); k++)
            {
                counter.Add(k);
            }
            //Divide indicies into correct nfold
            List <List <int> > trainIndicies   = new List <List <int> >();
            List <List <int> > predictIndicies = new List <List <int> >();

            for (int i = 0; i < samData.dataPoints.Count(); i += nFold)
            {
                var temp = counter.Skip(i).Take(nFold).ToList();
                predictIndicies.Add(temp);
                trainIndicies.Add(counter.Except(temp).ToList());
            }


            List <Dictionary <int, double> > weightedGuesses = new List <Dictionary <int, double> >();

            //Fill up weightedGuesses List
            for (int nGuesses = 0; nGuesses < samData.dataPoints.Count; nGuesses++)
            {
                Dictionary <int, double> tempGuess = new Dictionary <int, double>();
                for (int indexClass = 0; indexClass < labelCount; indexClass++)
                {
                    tempGuess.Add(indexClass, 0);
                }
                weightedGuesses.Add(tempGuess);
            }

            //Split classifiers
            for (int i = 0; i < trainIndicies.Count; i++)
            {
                foreach (PredictionResult predictResult in classifiers)
                {
                    double correct = 0;
                    //calculate weights
                    for (int trainingIndex = 0; trainingIndex < trainIndicies[i].Count; trainingIndex++)
                    {
                        if (predictResult.guesses[trainIndicies[i][trainingIndex]] == samData.dataPoints[trainIndicies[i][trainingIndex]].ToAVCoordinate(feelingsmodel))
                        {
                            correct++;
                        }
                    }

                    //Add weight from the trainingset to each of the guesses
                    weightedGuesses[i][predictResult.guesses[i]] += (correct / trainIndicies.Count);
                }
            }

            //Calculate final answers
            List <double> guesses = new List <double>();

            foreach (Dictionary <int, double> answer in weightedGuesses)
            {
                int    tempKey = -1;
                double tempMax = -1;
                foreach (int key in answer.Keys)
                {
                    if (answer[key] > tempMax)
                    {
                        tempKey = key;
                        tempMax = answer[key];
                    }
                }
                guesses.Add(tempKey);
            }


            //Get correct results
            int[] answers        = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int   numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);


            //Calculate scoring results
            double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
            List <double> pres   = CalculatePrecision(confus, numberOfLabels);
            List <double> recall = CalculateRecall(confus, numberOfLabels);
            List <double> fscore = CalculateFScore(pres, recall);

            return(new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), new List <Feature> {
            }, answers.ToList(), guesses.ConvertAll(x => (int)x)));
        }
예제 #7
0
        /// <summary>
        /// Item1 is the training set, Item2 is the prediction set.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="original"></param>
        /// <param name="nFold"></param>
        /// <returns>returns null if the collection can't be nfolded</returns>
        public static List <Tuple <SVMProblem, SVMProblem> > GetCrossValidationSets <T>(this IEnumerable <List <double> > original, SAMData samData, SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool UseIAPSRatings = false)
        {
            //TODO: Needs to be tested, can't test before data can be loaded into the program
            List <Tuple <SVMProblem, SVMProblem> > allSets = new List <Tuple <SVMProblem, SVMProblem> >();

            if (original.Count() % nFold != 0)
            {
                return(null);
            }

            //Full List of indicies
            List <int> counter = new List <int>();

            for (int k = 0; k < original.Count(); k++)
            {
                counter.Add(k);
            }
            //Divide indicies into correct nfold
            List <List <int> > trainIndicies   = new List <List <int> >();
            List <List <int> > predictIndicies = new List <List <int> >();

            for (int i = 0; i < original.Count(); i += nFold)
            {
                var temp = counter.Skip(i).Take(nFold).ToList();
                predictIndicies.Add(temp);
                trainIndicies.Add(counter.Except(temp).ToList());
            }


            for (int j = 0; j < original.Count(); j++)
            {
                //Create training problem
                SVMProblem trainSVMProblem = new SVMProblem();
                //Foreach training index, add features to the problem
                foreach (int trainIndex in trainIndicies[j])
                {
                    SVMNode[] featureVector = new SVMNode[original.ElementAt(trainIndex).Count];
                    for (int w = 0; w < original.ElementAt(trainIndex).Count; w++)
                    {
                        featureVector[w] = new SVMNode(w + 1, original.ElementAt(trainIndex)[w]);
                    }
                    trainSVMProblem.Add(featureVector, samData.dataPoints[trainIndex].ToAVCoordinate(feelingsmodel, UseIAPSRatings));
                }


                //Create predict problem
                SVMProblem predictSVMProblem = new SVMProblem();
                //Foreach predict index, add features to the problem
                foreach (int predictIndex in predictIndicies[j])
                {
                    SVMNode[] featureVector = new SVMNode[original.ElementAt(predictIndex).Count];
                    for (int w = 0; w < original.ElementAt(predictIndex).Count; w++)
                    {
                        featureVector[w] = new SVMNode(w + 1, original.ElementAt(predictIndex)[w]);
                    }
                    predictSVMProblem.Add(featureVector, samData.dataPoints[predictIndex].ToAVCoordinate(feelingsmodel, UseIAPSRatings));
                }

                allSets.Add(new Tuple <SVMProblem, SVMProblem>(trainSVMProblem, predictSVMProblem));
            }

            return(allSets);
        }
예제 #8
0
        public static SVMProblem CreateCompleteProblem(this IEnumerable <List <double> > original, SAMData sam, SAMDataPoint.FeelingModel feelingModel)
        {
            SVMProblem completeProblem = new SVMProblem();

            for (int i = 0; i < original.Count(); i++)
            {
                SVMNode[] nodeSet = new SVMNode[original.ElementAt(i).Count];
                for (int j = 0; j < original.ElementAt(i).Count; j++)
                {
                    SVMNode currentNode = new SVMNode();
                    currentNode.Index = j + 1;
                    currentNode.Value = original.ElementAt(i)[j];
                    nodeSet[j]        = currentNode;
                }
                completeProblem.Add(nodeSet, sam.dataPoints[i].ToAVCoordinate(feelingModel));
            }

            return(completeProblem);
        }
예제 #9
0
        /// <summary>
        /// Run crossvalidation for each combination of the features for this machine
        /// </summary>
        /// <param name="feelingsmodel"></param>
        /// <param name="nFold"></param>
        /// <param name="useIAPSratings"></param>
        public List <PredictionResult> CrossValidateCombinations(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <List <bool> > combinations = CalculateCombinations(new List <bool>()
            {
            }, features.Count);

            //Get different combination of problems
            List <Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> > > featureCombinationProblems = new List <Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> > >();

            for (int i = 0; i < combinations.Count; i++)
            {
                List <Feature> tempFeatures = new List <Feature>();
                for (int j = 0; j < combinations[i].Count; j++)
                {
                    // For each feature combination save the different problems for crossvalidation

                    if (combinations[i][j] == true)
                    {
                        tempFeatures.Add(features[j]);
                    }
                }
                featureCombinationProblems.Add
                (
                    new Tuple <List <Tuple <SVMProblem, SVMProblem> >, List <Feature> >
                    (
                        GetFeatureValues(tempFeatures, samData).NormalizeFeatureList <double>(normalizationType).GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings),
                        tempFeatures
                    )
                );
            }

            //Get correct results
            int[] answers         = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int   progressCounter = 0;
            List <PredictionResult> predictionResults = new List <PredictionResult>();

            foreach (SVMParameter SVMpara in Parameters)
            {
                //For each feature setup
                for (int n = 0; n < featureCombinationProblems.Count; n++)
                {
                    if (UpdateCallback != null)
                    {
                        UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count);
                    }
                    //PrintProgress(progressCounter, featureCombinationProblems.Count);
                    List <double> guesses = new List <double>();
                    //model and predict each nfold
                    foreach (var tupleProblem in featureCombinationProblems[n].Item1)
                    {
                        guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                    }
                    int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                    //Calculate scoring results
                    double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                    List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                    List <double>    recall = CalculateRecall(confus, numberOfLabels);
                    List <double>    fscore = CalculateFScore(pres, recall);
                    PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, featureCombinationProblems[n].Item2, answers.ToList(), guesses.ConvertAll(x => (int)x));
                    predictionResults.Add(pR);
                    progressCounter++;
                }
            }
            if (UpdateCallback != null)
            {
                UpdateCallback(progressCounter, Parameters.Count * featureCombinationProblems.Count);
            }

            return(predictionResults);
        }
예제 #10
0
        public List <PredictionResult> CrossValidate(SAMDataPoint.FeelingModel feelingsmodel, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <PredictionResult> predictedResults = new List <PredictionResult>();
            //Split into crossvalidation parts
            SVMProblem problems = GetFeatureValues(features, samData).NormalizeFeatureList <double>(normalizationType).CreateCompleteProblem(samData, feelingsmodel);

            //Get correct results
            int[] answers         = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int   progressCounter = 0;

            if (answers.Distinct().Count() <= 1)
            {
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                Log.LogMessage(ONLY_ONE_CLASS);
                Log.LogMessage("");
                return(predictedResults);
            }
            else if (problems.X.Count == 0)
            {
                Log.LogMessage("Empty problem in " + Name);
                return(null);
            }
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
                double[] guesses = new double[samData.dataPoints.Count];
                //model and predict each nfold
                try
                {
                    problems.CrossValidation(SVMpara, samData.dataPoints.Count, out guesses);
                }
                catch (Exception e)
                {
                    for (int i = 0; i < samData.dataPoints.Count; i++)
                    {
                        guesses[i] = -1;
                    }
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), Array.ConvertAll(guesses, (x => (int)x)).ToList());
                predictedResults.Add(pR);
                progressCounter++;
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
            }

            return(predictedResults);
        }
예제 #11
0
        /// <summary>
        /// Run crossvalidation for the feature setup for this machine
        /// </summary>
        /// <param name="feelingsmodel"></param>
        /// <param name="nFold"></param>
        /// <param name="useIAPSratings"></param>
        public List <PredictionResult> OldCrossValidate(SAMDataPoint.FeelingModel feelingsmodel, int nFold, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <PredictionResult> predictedResults = new List <PredictionResult>();
            //Split into crossvalidation parts
            List <Tuple <SVMProblem, SVMProblem> > problems = GetFeatureValues(features, samData).NormalizeFeatureList <double>(normalizationType).GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings);

            //Get correct results
            int[] answers             = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();
            int   progressCounter     = 0;
            bool  postedOneClassError = false;

            if (answers.Distinct().Count() <= 1)
            {
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(answers.ToList().ConvertAll(x => (double)x).ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, new SVMParameter(), features, answers.ToList(), answers.ToList().ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                Log.LogMessage(ONLY_ONE_CLASS);
                Log.LogMessage("");
                return(predictedResults);
            }
            foreach (SVMParameter SVMpara in Parameters)
            {
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
                List <double> guesses = new List <double>();
                //model and predict each nfold
                try
                {
                    foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems)
                    {
                        SVMModel trainingModel = tupleProblem.Item1.Train(SVMpara);
                        if (trainingModel.ClassCount <= 1)
                        {
                            if (!postedOneClassError)
                            {
                                Log.LogMessage(ONLY_ONE_CLASS_IN_TRAINING);
                                postedOneClassError = true;
                            }
                            guesses.AddRange(tupleProblem.Item1.Y.ToList().Take(tupleProblem.Item2.Y.Count()).ToList());
                        }
                        else
                        {
                            double[] d = tupleProblem.Item2.Predict(trainingModel);
                            guesses.AddRange(d);
                        }
                    }
                }
                catch (Exception e)
                {
                    for (int i = 0; i < samData.dataPoints.Count; i++)
                    {
                        guesses.Add(-1);
                    }
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x));
                predictedResults.Add(pR);
                progressCounter++;
                if (UpdateCallback != null)
                {
                    UpdateCallback(progressCounter, Parameters.Count);
                }
            }

            return(predictedResults);
        }
예제 #12
0
        private void WriteResult(Excel.Worksheet workSheet, PredictionResult pResult, SAMDataPoint.FeelingModel feelingModel)
        {
            int counter = 0;

            switch (feelingModel)
            {
            case SAMDataPoint.FeelingModel.Arousal2High:
                counter = A2HighStart;
                break;

            case SAMDataPoint.FeelingModel.Arousal2Low:
                counter = A2LowStart;
                break;

            case SAMDataPoint.FeelingModel.Arousal3:
                counter = A3Start;
                break;

            case SAMDataPoint.FeelingModel.Valence2High:
                counter = V2HighStart;
                break;

            case SAMDataPoint.FeelingModel.Valence2Low:
                counter = V2LowStart;
                break;

            case SAMDataPoint.FeelingModel.Valence3:
                counter = V3Start;
                break;
            }

            workSheet.Cells[counter, 3] = pResult.GetAccuracy();

            counter++;
            workSheet.Cells[counter, 3] = pResult.GetAverageFScore();

            for (int f = 0; f < pResult.fscores.Count; f++)
            {
                counter++;
                if (double.IsNaN(pResult.fscores[f]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.fscores[f];
                }
            }

            for (int p = 0; p < pResult.precisions.Count; p++)
            {
                counter++;
                if (double.IsNaN(pResult.precisions[p]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.precisions[p];
                }
            }

            for (int r = 0; r < pResult.recalls.Count; r++)
            {
                counter++;
                if (double.IsNaN(pResult.recalls[r]))
                {
                    workSheet.Cells[counter, 3] = "NaN";
                }
                else
                {
                    workSheet.Cells[counter, 3] = pResult.recalls[r];
                }
            }
            counter++;
            for (int i = 3; i < pResult.features.Count + 3; i++)
            {
                workSheet.Cells[counter, i] = pResult.features[i - 3].name;
            }


            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.C;

            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.Gamma;

            counter++;
            workSheet.Cells[counter, 3] = pResult.svmParams.Kernel;
        }
예제 #13
0
 public void AddDataToPerson(string name, Book book, PredictionResult predictResult, SAMDataPoint.FeelingModel model)
 {
     Log.LogMessage("Writing results from " + name + " to excel files");
     foreach (Excel.Worksheet ws in books[book].Sheets)
     {
         if (ws.Name == name)
         {
             WriteResult(ws, predictResult, model);
         }
     }
 }