Example #1
0
        /**
         * Use this to average together confusion matrices
         * over different tests.
         */
        public void AddResult(ConfusionMatrices other)
        {
            if (confusion_matrices.Count != other.confusion_matrices.Count)
            {
                throw new Exception("confusion_matrices.Count == other.confusion_matrices.Count");
            }

            entries++;

            for (int ii = 0; ii < other.confusion_matrices.Count; ii++)
            {
                double sum = other.confusion_matrices[ii].Sum();
                confusion_matrices[ii] += other.confusion_matrices[ii] / sum;
            }
        }
Example #2
0
        /**
         * Evaluate all user study sessions for a given device type.
         */
        public static void evaluate_sessions(DeviceType device)
        {
            List <ConfusionMatrices> confusion_matrices = new List <ConfusionMatrices>();

            int[] participants = get_participant_list(device);

            for (int i = 0; i < participants.Length - 1; i++)
            {
                ConfusionMatrices cm = evaluate_session(
                    device,
                    participants[i]);

                confusion_matrices.Add(cm);

                int idx = confusion_matrices.Count - 1;

                Console.WriteLine("Participant: " + participants[i]);

                ResultT resultss = confusion_matrices[idx].Results();
                resultss.Print();
                Console.WriteLine();
            }


            // put all results into first confusion
            // matrix
            for (int ii = 1; ii < confusion_matrices.Count; ii++)
            {
                confusion_matrices[0].AddResult(confusion_matrices[ii]);
            }

            Console.WriteLine("Aggregate results:");

            ResultT result = confusion_matrices[0].Results();

            result.Print();
            Console.WriteLine();
        }
Example #3
0
        /**
         * After the command is complete, update the matrices
         * with detected gestures.
         */
        public void update_confusion_matrices(ConfusionMatrices cm)
        {
            bool found = false;

            for (int ii = 0;
                 ii < detected_ids.Count;
                 ii++)
            {
                int detected_id = detected_ids[ii];

                if (found && expected_id == detected_id)
                {
                    // treat as false positive
                    // because we've already detected
                    // the gesture once
                    cm.AddResult(
                        expected_id,
                        -2);
                    continue;
                }

                cm.AddResult(
                    expected_id,
                    detected_id);

                // mark that we've found this gesture
                found = found || (detected_id == expected_id);
            }

            // false negative
            if (!found)
            {
                cm.AddResult(
                    expected_id,
                    -1);
            }
        }
Example #4
0
        /**
         * Load a participant's dataset and session.
         * Train the recognizer with the training data
         * and run the video through. See what happens...
         */
        public static ConfusionMatrices evaluate_session(DeviceType device, int subject_id)
        {
            // Load up the training dataset.
            Dataset ds = load_subject_dataset(
                device,
                subject_id);

            // Load up the session.
            List <Frame> frames = new List <Frame>();

            load_session(
                device,
                subject_id,
                frames);

            // Create a new recognizer.
            JackknifeBlades blades = new JackknifeBlades();

            blades.SetIPDefaults();
            Jackknife jk = new Jackknife(blades);

            // Train the recognizer, without 'bad' gestures.
            for (int ii = 0;
                 ii < ds.Samples.Count;
                 ii++)
            {
                int    gesture_id   = ds.Samples[ii].GestureId;
                string gesture_name = ds.Gestures[gesture_id];
                if (bad_gesture(gesture_name))
                {
                    continue;
                }
                jk.AddTemplate(ds.Samples[ii]);
            }


            // Get device and application parameters
            // based on the device type.
            configuartion_parameters_t parameters = new configuartion_parameters_t(device);

            // We originally used n=4, r=2 for Kinect data
            // and n=6, r=2 for Leap Motion data, but
            // here we just set the average. There is barely
            // any effect on the results.
            jk.Train(6, 2, 1.00);

            // Play session video through
            // the recognizer.
            List <Vector>         buffer     = new List <Vector>();
            List <int>            detections = new List <int>();
            List <CommandResults> cmds       = new List <CommandResults>();
            int last_cmd_id = -1;
            int next_update = parameters.update_interval;

            int frame_no = 0;

            ExponentialMovingAverage filter = new ExponentialMovingAverage(frames[0].pt);
            Vector pt;

            for (int ii = 0;
                 ii < frames.Count;
                 ii++)
            {
                // skip this frame if its bad
                if (frames[ii].bad_pt)
                {
                    continue;
                }

                // Low pass filter the input.
                // Note, we originally didn't smooth the data,
                // so results now are a little higher than in
                // the paper.
                pt = filter.Filter(
                    frames[ii].pt,
                    1 / (double)parameters.fps);

                //pt = frames[ii].pt;

                frame_no += 1;

                // start a new command
                if (frames[ii].cmd_id != last_cmd_id)
                {
                    last_cmd_id = frames[ii].cmd_id;

                    int gid = convert_gesture_id(
                        ds,
                        frames[ii].gesture_id);

                    CommandResults cmd = new CommandResults(
                        frames[ii].cmd_id,
                        gid);

                    if (bad_gesture(frames[ii].gesture_id))
                    {
                        cmd.ignore = true;
                    }

                    cmds.Add(cmd);
                }

                // This buffering approach is really
                // inefficient, but since this off-line,
                // performance is not important.
                buffer.Add(pt);
                if (buffer.Count > parameters.sliding_window_frame_cnt)
                {
                    buffer.RemoveAt(0);
                }

                // We need to have a couple points before
                // calling the recognizer.
                if (buffer.Count < 2)
                {
                    continue;
                }

                // Wait a few frames again before trying
                // to recognize again.
                if (frame_no < next_update)
                {
                    continue;
                }

                next_update = frame_no + parameters.update_interval;

                // Run the recognizer.
                int gesture_id = jk.Classify(buffer);

                // Add recognition result.
                detections.Add(gesture_id);
                if (detections.Count > parameters.repeat_cnt)
                {
                    detections.RemoveAt(0);
                }

                // Count how many times this gesture was recognized.
                int winner_cnt = 0;
                for (int jj = 0;
                     jj < detections.Count;
                     jj++)
                {
                    if (detections[jj] == gesture_id)
                    {
                        winner_cnt += 1;
                    }
                }

                // Ensure we have enough recognitions.
                if (winner_cnt < parameters.repeat_cnt)
                {
                    continue;
                }

                // If nothing was detected, skip rest.
                if (gesture_id == -1)
                {
                    continue;
                }

                // Hurray! A gesture is recognized!
                // Hopefully it's the right one too!!
                cmds[cmds.Count - 1].add(gesture_id);
                detections.Clear();
                buffer.Clear();
            }

            // Mark bad commands, situations where the participant
            // made a mistake or tracking was lost. We know the
            // command was bad because the protector asked the
            // participant to repeat the gesture, but a new command
            // ID is assigned to the sequence.
            for (int ii = 1;
                 ii < cmds.Count;
                 ii++)
            {
                if (cmds[ii].expected_id == cmds[ii - 1].expected_id)
                {
                    CommandResults temp = cmds[ii - 1];
                    temp.ignore  = true;
                    cmds[ii - 1] = temp;
                }
            }

            // Put all results in confusion matrices.
            ConfusionMatrices ret = new ConfusionMatrices(ds);

            for (int ii = 0; ii < cmds.Count; ii++)
            {
                if (cmds[ii].ignore)
                {
                    continue;
                }

                cmds[ii].update_confusion_matrices(ret);
            }

            return(ret);
        }
Example #5
0
        /**
         * A simple user independent test.
         */
        public static void user_indepedent_test(DeviceType device)
        {
            // First, load all training data for one device type.
            string path = "";

            if (device == DeviceType.KINECT)
            {
                path = Global.GetRootDirectory() + "datasets/jk2017/kinect/training/";
            }
            else if (device == DeviceType.LEAP_MOTION)
            {
                path = Global.GetRootDirectory() + "datasets/jk2017/leap_motion/training/";
            }


            Dataset ds = Dataset.LoadDataset(path);

            int subject_cnt      = ds.Subjects.Count;
            int sample_cnt       = ds.Samples.Count;
            ConfusionMatrices cm = new ConfusionMatrices(ds);

            // iterate through all subjects
            for (int subject_id = 0; subject_id < subject_cnt; subject_id++)
            {
                ConfusionMatrices cm_individual = new ConfusionMatrices(ds);

                Console.WriteLine("Participant: " + subject_id);

                // train a recognizer with the selected subject
                JackknifeBlades blades = new JackknifeBlades();

                blades.SetIPDefaults();

                Jackknife jk = new Jackknife(blades);

                for (int sample_id = 0; sample_id < sample_cnt; sample_id++)
                {
                    Sample sample = ds.Samples[sample_id];

                    if (sample.SubjectId != subject_id)
                    {
                        continue;
                    }

                    jk.AddTemplate(ds.Samples[sample_id]);
                }

                // only train the recognize if you need

                // test the recognizer with all other samples
                for (int sample_id = 0; sample_id < sample_cnt; sample_id++)
                {
                    Sample sample = ds.Samples[sample_id];

                    if (sample.SubjectId == subject_id)
                    {
                        continue;
                    }

                    int gid = jk.Classify(sample);

                    cm_individual.AddResult(
                        sample.GestureId,
                        gid);
                }

                cm.AddResult(cm_individual);

                ResultT resultss = cm_individual.Results();
                resultss.Print();
            }

            Console.WriteLine("Aggregate Results: ");
            ResultT results = cm.Results();

            results.Print();
        }