Example #1
0
        public List <PredictionResult> CrossValidateWithBoosting(SAMDataPoint.FeelingModel feelingsmodel, int nFold, double[] answersFromPrevious, bool useIAPSratings = false, Normalize normalizationType = Normalize.OneMinusOne)
        {
            List <PredictionResult> predictedResults = new List <PredictionResult>();

            List <List <double> > tempFeatuers = GetFeatureValues(features, samData);

            if (answersFromPrevious.Length != tempFeatuers.Count)
            {
                //answers from previous is not the same size as current feature list, e.g. something is wrong
                Log.LogMessage("The number of guessses from previous machine is the same as number of datapoints in this");
                return(null);
            }
            //Split into crossvalidation parts
            List <List <double> > tempFeatures = tempFeatuers.NormalizeFeatureList <double>(normalizationType).ToList();

            for (int i = 0; i < tempFeatuers.Count; i++)
            {
                tempFeatuers[i].Add(answersFromPrevious[i]);
            }
            List <Tuple <SVMProblem, SVMProblem> > problems = tempFeatuers.GetCrossValidationSets <double>(samData, feelingsmodel, nFold, useIAPSratings);


            //Get correct results
            int[] answers = samData.dataPoints.Select(x => x.ToAVCoordinate(feelingsmodel, useIAPSratings)).ToArray();

            foreach (SVMParameter SVMpara in Parameters)
            {
                List <double> guesses = new List <double>();
                //model and predict each nfold
                foreach (Tuple <SVMProblem, SVMProblem> tupleProblem in problems)
                {
                    guesses.AddRange(tupleProblem.Item2.Predict(tupleProblem.Item1.Train(SVMpara)));
                }
                int numberOfLabels = SAMData.GetNumberOfLabels(feelingsmodel);
                //Calculate scoring results
                double[,] confus = CalculateConfusion(guesses.ToArray(), answers, numberOfLabels);
                List <double>    pres   = CalculatePrecision(confus, numberOfLabels);
                List <double>    recall = CalculateRecall(confus, numberOfLabels);
                List <double>    fscore = CalculateFScore(pres, recall);
                PredictionResult pR     = new PredictionResult(confus, recall, pres, fscore, SVMpara, features, answers.ToList(), guesses.ConvertAll(x => (int)x).ToList());
                predictedResults.Add(pR);
            }
            return(predictedResults);
        }
Example #2
0
        private IEnumerable<Tuple<List<double>, int>> GetTrainingData(SENSOR machine, int start, int trainingEnd)
        {

            //Split into training & prediction set
            List<List<double>> featureVectors = new List<List<double>>();
            List<int> timeStamps = new List<int>();

            if (machine == SENSOR.GSR)
            {
                int stepSize = 100;
                for (int i = 0; i < _fdAnomaly.gsrData.Last().timestamp - _fdAnomaly.gsrData.First().timestamp - GSR_DURATION + GSR_DELAY; i += stepSize)
                {
                    List<double> featureVector = new List<double>();
                    List<double> data = _fdAnomaly.gsrData.SkipWhile(x => (x.timestamp - _fdAnomaly.gsrData.First().timestamp) < i + GSR_DELAY).TakeWhile(x => i + GSR_DURATION > (x.timestamp - _fdAnomaly.gsrData.First().timestamp)).Select(x => (double)x.resistance).ToList();
                    if (data.Count == 0) continue;

                    featureVector.Add(data.Average());
                    featureVector.Add(data.Max());
                    featureVector.Add(data.Min());
                    double avg = data.Average();
                    double sd = Math.Sqrt(data.Average(x => Math.Pow(x - avg, 2)));
                    featureVector.Add(sd);
                    featureVectors.Add(featureVector);
                    timeStamps.Add(i);
                }
            }
            featureVectors = featureVectors.NormalizeFeatureList<double>(Normalize.OneMinusOne).ToList();
            var dataSet = featureVectors.Zip(timeStamps, (first, second) => { return Tuple.Create(first, second); });

            var trainingSet = dataSet.SkipWhile(x => x.Item2 < start).TakeWhile(x => x.Item2 < trainingEnd);

            return trainingSet;
        }
Example #3
0
        private void button1_Click(object sender, EventArgs e)
        {
            noveltyChart.ChartAreas.First().BackColor = Color.DarkGray;
            FolderBrowserDialog fbd = new FolderBrowserDialog();
            if (fbd.ShowDialog() == DialogResult.
                OK)
            {
                string path = fbd.SelectedPath;
                string testSubjectId = path.Split('\\')[path.Split('\\').Length - 2];

                fdNovelty.LoadFromFile(new string[] { path + @"\EEG.dat", path + @"\GSR.dat", path + @"\HR.dat", path + @"\KINECT.dat" }, DateTime.Now, false);
                events = File.ReadAllLines(path + @"\SecondTest.dat");

                string[] tmpSevents = File.ReadAllLines(path + @"\sam.dat");
                foreach (string ev in tmpSevents)
                {
                    sEvents.Add(new samEvents(int.Parse(ev.Split(':')[0]), int.Parse(ev.Split(':')[1]), int.Parse(ev.Split(':')[2])));
                }

            }
            if (events.Length == 0)
            {
                //if events is not assigned with second test data
                return;
            }
            int start = (useRestInTraining.Checked) ? 180000 : 0;
            int trainingEnd = int.Parse(events[2].Split('#')[0]);
            int windowSize = 5000;
            int stepSize = 100;
            int delay = 2000;

            //Split into training & prediction set
            List<List<double>> featureVectors = new List<List<double>>();
            List<int> timeStamps = new List<int>();

            for (int i = 0; i < fdNovelty.gsrData.Last().timestamp - fdNovelty.gsrData.First().timestamp - windowSize; i += stepSize)
            {
                List<double> featureVector = new List<double>();
                List<double> data = fdNovelty.gsrData.SkipWhile(x => (x.timestamp - fdNovelty.gsrData.First().timestamp) < i).TakeWhile(x => i + windowSize > (x.timestamp - fdNovelty.gsrData.First().timestamp)).Select(x => (double)x.resistance).ToList();
                if (data.Count == 0) continue;
                featureVector.Add(data.Average());
                featureVector.Add(data.Max());
                featureVector.Add(data.Min());
                double avg = data.Average();
                double sd = Math.Sqrt(data.Average(x => Math.Pow(x - avg, 2)));
                featureVector.Add(sd);
                featureVectors.Add(featureVector);
                timeStamps.Add(i);
            }

            featureVectors = featureVectors.NormalizeFeatureList<double>(Normalize.OneMinusOne).ToList();
            var dataSet = featureVectors.Zip(timeStamps, (first, second) => { return Tuple.Create(first, second); });

            var trainingSet = dataSet.SkipWhile(x => x.Item2 < start).TakeWhile(x => x.Item2 < trainingEnd);
            var predictionSet = dataSet.SkipWhile(x => x.Item2 < trainingEnd);

            int count = predictionSet.Count();
            int firstPredcition = predictionSet.First().Item2;
            OneClassClassifier occ = new OneClassClassifier(trainingSet.Select(x => x.Item1).ToList());
            SVMParameter svmP = new SVMParameter();
            svmP.Kernel = SVMKernelType.RBF;
            svmP.C = 100;
            svmP.Gamma = 0.01;
            svmP.Nu = 0.01;
            svmP.Type = SVMType.ONE_CLASS;
            occ.CreateModel(svmP);
            /* List<int> indexes = occ.PredictOutliers(predictionSet.Select(x => x.Item1).ToList());

             foreach (int index in indexes)
             {
                 timestampsOutliers.Add(predictionSet.ElementAt(index).Item2 - firstPredcition + 180000 + 4000);
             }
             */

            updateChart();

            int k = 0;
        }