Exemplo n.º 1
0
        public static bool IsBadCommand(
            List <Frame> frames,
            ContinuousResult detection)
        {
            int hits = 0;

            for (int frame_no = detection.startFrameNo;
                 frame_no < detection.endFrameNo;
                 frame_no++)
            {
                if (frame_no < detection.startFrameNo)
                {
                    continue;
                }

                if (frame_no > detection.endFrameNo)
                {
                    continue;
                }

                if (frames[frame_no].gid != detection.gid)
                {
                    continue;
                }

                if (frames[frame_no].attempt != -1)
                {
                    continue;
                }

                hits++;
            }

            return(hits > 0);
        }
Exemplo n.º 2
0
        /**
         * Call when a false positive has occurred.
         */
        public void FalsePositive(ContinuousResult result)
        {
            // Reset internal state should handle everything.
            this.Reset();

            // Do not reset boundary, because the issue may
            // just be poor segmentation. So allow new scores
            // to result is quick recognition.
        }
Exemplo n.º 3
0
        private void StepWindow()
        {
            List <ContinuousResult> continuous_results = new List <ContinuousResult>();

            Vector pt = ema_filter.Filter(frames[frame_idx].pt, 1 / (double)parameters.fps);

            video.Add(pt);

            // Get the trajectory and pass it to window
            for (double size = window.minimum; size <= window.maximum; size += window.step_size)
            {
                List <Jackknife.Vector> trajectory;
                int start = frame_idx - (int)size + 1;
                int end   = frame_idx;

                if (start < 0)
                {
                    return;
                }

                trajectory = JackknifeConnector.GetJKBufferFromVideo(video, start, end);
                jackknife.ClassForWinCustTemplLen(trajectory, continuous_results, start, end);

                for (int rr = 0; rr < continuous_results.Count; rr++)
                {
                    ContinuousResult  result  = continuous_results[rr];
                    RecognitionResult rresult = new RecognitionResult();
                    rresult.gid   = result.gid;
                    rresult.start = start;
                    rresult.end   = end;
                    rresult.score = result.score;

                    bool match = false;

                    for (int ii = 0; ii < rresults.Count; ii++)
                    {
                        if (rresults[ii].Update(rresult))
                        {
                            match = true;
                            break;
                        }
                    }

                    if (match)
                    {
                        continue;
                    }

                    rresults.Add(rresult);
                }
            }
        }
Exemplo n.º 4
0
        public bool Hit(ContinuousResult detection)
        {
            if (gid != detection.gid)
            {
                return(false);
            }

            if (start > detection.endFrameNo)
            {
                return(false);
            }

            if (end < detection.startFrameNo)
            {
                return(false);
            }

            return(true);
        }
Exemplo n.º 5
0
        public MacheteTemplate(
            DeviceType device_id,
            ContinuousResultOptions cr_options,
            Sample sample,
            bool filtered = true
            )
        {
            trigger = new MacheteTrigger();
            dtw     = new List <MacheteElement>[2] {
                new List <MacheteElement>(),
                new List <MacheteElement>()
            };
            List <Vector> resampled = new List <Vector>();


            this.sample = sample;

            if (filtered == true)
            {
                this.minimumFrameCount = sample.FilteredTrajectory.Count / 2;
                this.maximumFrameCount = sample.FilteredTrajectory.Count * 2;
            }
            else
            {
                this.minimumFrameCount = sample.Trajectory.Count / 2;
                this.maximumFrameCount = sample.Trajectory.Count * 2;
            }

            Prepare(device_id, resampled, filtered);

            this.vectorCount = this.vectors.Count;

            this.cr_options = cr_options;
            this.result     = new ContinuousResult(
                cr_options,
                sample.GestureId,
                sample
                )
            {
                sample = sample
            };
        }
Exemplo n.º 6
0
    public int Segment(List <Machete.ContinuousResult> crs)
    {
        int ret = -1;

        ret = jk.ClassForWinCustTemplLen(
            buffer,
            out double score,
            out int st,
            out int end);

        if (ret != -1)
        {
            Machete.ContinuousResult cr = new Machete.ContinuousResult();
            cr.startFrameNo = st;
            cr.endFrameNo   = end;
            cr.score        = score;
            cr.gid          = ret;
            crs.Add(cr);
        }

        return(ret);
    }
Exemplo n.º 7
0
        public static ContinuousResult SelectResult(
            List <ContinuousResult> results,
            bool cancel_with_something_better
            )
        {
            List <ContinuousResult> triggered = new List <ContinuousResult>();
            List <ContinuousResult> remaining = new List <ContinuousResult>();

            // Get all triggered events
            for (int ii = 0; ii < results.Count; ii++)
            {
                ContinuousResult result = results[ii];

                if (!result.Triggered())
                {
                    continue;
                }

                triggered.Add(result);
            }

            // If none triggered notn to do
            if (triggered.Count == 0)
            {
                return(null);
            }

            for (int ii = 0; ii < triggered.Count; ii++)
            {
                for (int jj = 0; jj < results.Count; jj++)
                {
                    ContinuousResult result = results[jj];

                    if (triggered[ii] == result)
                    {
                        continue;
                    }

                    if (triggered[ii].minimum > result.minimum)
                    {
                        if (cancel_with_something_better == true)
                        {
                            triggered[ii].SetWaitForEnd(result);
                            break;
                        }
                    }
                }

                if (triggered[ii].Triggered())
                {
                    remaining.Add(triggered[ii]);
                }
            }

            // Get the best survivor
            if (remaining.Count == 0)
            {
                return(null);
            }

            return(remaining[0]);
        }
Exemplo n.º 8
0
 /**
  * Called when a gesture is recognized.
  */
 public void SetWaitForEnd(ContinuousResult result)
 {
     boundary = result.endFrameNo;
     state    = ResultStateT.WAIT_FOR_END;
 }
Exemplo n.º 9
0
        public static Results EvaluateSessionWindowed(DeviceType device, int subject_id)
        {
            configuartion_parameters_t parameneters = new configuartion_parameters_t(device);

            // Load subject dataset
            Dataset       ds        = Global.load_subject_dataset(device, subject_id);
            List <Sample> train_set = Global.GetTrainSet(ds, 1);

            // Covert the dataset to format accepted by Jackknife
            List <Jackknife.Sample> jk_train_set = JackknifeConnector.GetJKTrainSet(train_set);

            // Load subject session
            List <Frame> frames = new List <Frame>();

            Global.load_session(device, subject_id, frames, ds);

            // Load ground truth
            List <GestureCommand> cmds = new List <GestureCommand>();

            GestureCommand.GetAllCommands(cmds, ds, device, subject_id);

            // Train the recognizer
            JackknifeBlades blades = new JackknifeBlades();

            blades.SetIPDefaults();

            blades.ResampleCnt = 20;

            Jackknife.Jackknife jk = new Jackknife.Jackknife(blades);
            foreach (Jackknife.Sample s in jk_train_set)
            {
                jk.AddTemplate(s);
            }

            // Set between 2.0 and 10.0 in steps of .25
            // to find the best result
            jk.SetRejectionThresholds(5.25f);

            // Set up filter for session points
            ExponentialMovingAverage ema_filter = new ExponentialMovingAverage(frames[0].pt);
            Vector pt;

            WindowSegmentor windowSegmentor = new WindowSegmentor(jk);

            //List<RecognitionResult> rresults = new List<RecognitionResult>();

            List <ContinuousResult> continuous_results = new List <ContinuousResult>();

            // Go through session
            for (int session_pt = 0; session_pt < frames.Count; session_pt++)
            {
                long ts1 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // at beginning

                pt = ema_filter.Filter(frames[session_pt].pt, 1 / (double)parameneters.fps);

                long ts2 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after filter

                Jackknife.Vector jkpt = JackknifeConnector.ToJKVector(pt);

                windowSegmentor.Update(jkpt);
                windowSegmentor.Segment(continuous_results);

                if (session_pt % 2000 == 0)
                {
                    Debug.Log(string.Format("{0}% Done", (double)session_pt / (double)frames.Count * 100.0));
                }
            }

            foreach (ContinuousResult cr in continuous_results)
            {
                Debug.Log(string.Format("st {0}, en {1}, gid {2}", cr.startFrameNo, cr.endFrameNo, cr.gid));
            }
            // Per gesture confusion matrix
            List <ConfisionMatrix> cm = new List <ConfisionMatrix>();

            for (int ii = 0; ii < ds.Gestures.Count; ii++)
            {
                cm.Add(new ConfisionMatrix());
            }

            for (int ii = 0; ii < continuous_results.Count; ii++)
            {
                ContinuousResult result = continuous_results[ii];

                bool found = false;
                int  cidx  = 0;

                for (cidx = 0; cidx < cmds.Count; cidx++)
                {
                    found = cmds[cidx].Hit(result);

                    if (found == true)
                    {
                        break;
                    }
                }

                if (found == true)
                {
                    // true positive
                    if (cmds[cidx].detected == false)
                    {
                        cmds[cidx].detected = true;
                        cm[result.gid].tp  += 1.0f;
                    }
                }
                else
                {
                    bool bad = GestureCommand.IsBadCommand(
                        frames,
                        result);

                    if (bad == true)
                    {
                        continue;
                    }
                    // false positive
                    cm[result.gid].fp += 1.0f;
                }
            }

            // false negatives
            for (int cidx = 0; cidx < cmds.Count; cidx++)
            {
                if (cmds[cidx].detected == true)
                {
                    continue;
                }

                cm[cmds[cidx].gid].fn += 1.0;
            }

            Results ret = new Results();

            for (int ii = 0; ii < cm.Count; ii++)
            {
                ret.AppendResults(cm[ii]);
            }

            ret.PrintF();
            return(ret);
        }
Exemplo n.º 10
0
        public static Results EvaluateSession(DeviceType device, int subject_id)
        {
            configuartion_parameters_t parameneters = new configuartion_parameters_t(device);

            // Load subject dataset
            Dataset       ds        = Global.load_subject_dataset(device, subject_id);
            List <Sample> train_set = Global.GetTrainSet(ds, 1);
            // Covert the dataset to format accepted by Jackknife
            List <Jackknife.Sample> jk_train_set = JackknifeConnector.GetJKTrainSet(train_set);

            // Load subject session
            List <Frame> frames = new List <Frame>();

            Global.load_session(device, subject_id, frames, ds);
            //Debug.Log("Frame_cnt = " + frames.Count());
            // Load ground truth
            List <GestureCommand> cmds = new List <GestureCommand>();

            GestureCommand.GetAllCommands(cmds, ds, device, subject_id);

            // Train the segmentor
            ContinuousResultOptions cr_options = new ContinuousResultOptions();

            //COREY FIX latency framecount
            cr_options.latencyFrameCount = 1;
            Machete yeah = new Machete(device, cr_options);

            foreach (Sample s in train_set)
            {
                yeah.AddSample(s, filtered: true);
            }

            //PrintYeahStats(yeah);

            // Train the recognizer
            JackknifeBlades blades = new JackknifeBlades();

            blades.SetIPDefaults();

            //COREY FIX RESAMPLECNT
            blades.ResampleCnt = 20;
            blades.LowerBound  = false;

            Jackknife.Jackknife jk = new Jackknife.Jackknife(blades);
            foreach (Jackknife.Sample s in jk_train_set)
            {
                jk.AddTemplate(s);
            }

            // Set between 2.0 and 10.0 in steps of .25
            // to find the best result
            // Best at 7.5 with around 66% :/
            jk.SetRejectionThresholds(7.0);


            // Set up filter for session points
            ExponentialMovingAverage ema_filter = new ExponentialMovingAverage(frames[0].pt, 5.0);
            Vector        pt;
            List <Vector> video = new List <Vector>();

            List <RecognitionResult> rresults = new List <RecognitionResult>();

            int triggered_count = 0;

            // Go through session
            for (int session_pt = 0; session_pt < frames.Count; session_pt++)
            {
                long ts1 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // at beginning

                List <ContinuousResult> continuous_results = new List <ContinuousResult>();

                pt = ema_filter.Filter(frames[session_pt].pt, 1 / (double)parameneters.fps);

                long ts2 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after filter

                video.Add(pt);

                //Debug.Log(string.Format("Pt: {0} {1} {2}", pt.Data[0], pt.Data[1], pt.Data[2]));

                yeah.ProcessFrame(pt, session_pt, continuous_results);

                long ts3 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after processing frame

                bool             cancel_if_better_score = false;
                ContinuousResult result = ContinuousResult.SelectResult(
                    continuous_results,
                    cancel_if_better_score);

                long ts4 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after looking for result

                //Debug.Log(string.Format("{0} {1} {2}", ts2 - ts1, ts3 - ts2, ts4 - ts3));
                //Debug.Log(string.Format("FRAME NO: {0}", frame_no));

                if (result == null)
                {
                    continue;
                }


                //COREY FIX For comparing against the original code
                //if (result.sample.GestureId == 0)
                //    Debug.Log(string.Format("start {0}, end {1}", result.startFrameNo, result.endFrameNo + 1));

                triggered_count += 1;

                //Debug.Log(string.Format("Frame: {3} Result: {0}, Sample: {1}, Score: {2}", result.gid, result.sample.GestureName, result.score, frame_no));
                //Debug.Log(string.Format("Best result as of: {0} {1}", ii, yeah.bestScore));

                // Run recognizer on segmented result
                double recognizer_d = 0.0f;
                bool   match        = false;

                // Save a buffer to pass to recognizer
                List <Jackknife.Vector> jkbuffer = JackknifeConnector.GetJKBufferFromVideo(
                    video,
                    result.startFrameNo,
                    result.endFrameNo);

                long ts5 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // before passing to recognizer

                match = jk.IsMatch(jkbuffer, result.sample.GestureId, out recognizer_d);

                //COREY Fix print out scores

                if (result.sample.GestureId == 0)
                {
                    //Debug.Log(string.Format("start {0}, end {1} ", result.startFrameNo, result.endFrameNo + 1) + string.Format("Is match = {0}, score = {1}",
                    //    match,
                    //    recognizer_d));
                }
                // Matched to template with this gid
                if (match == false)
                {
                    continue;
                }

                long ts6 = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // after classifying

                // Gesture was accepted
                RecognitionResult rresult = new RecognitionResult();
                rresult.gid   = result.gid;
                rresult.start = result.startFrameNo;
                rresult.end   = result.endFrameNo;
                rresult.score = recognizer_d;

                match = false;

                for (int ii = 0; ii < rresults.Count; ii++)
                {
                    if (rresults[ii].Update(rresult) == true)
                    {
                        match = true;
                        break;
                    }
                }

                // if some result was updated for better, continue
                if (match == true)
                {
                    continue;
                }

                rresults.Add(rresult);
            }

            // Per gesture confusion matrix
            List <ConfisionMatrix> cm = new List <ConfisionMatrix>();

            for (int ii = 0; ii < ds.Gestures.Count; ii++)
            {
                cm.Add(new ConfisionMatrix());
            }

            for (int ii = 0; ii < rresults.Count; ii++)
            {
                RecognitionResult result = rresults[ii];

                bool found = false;
                int  cidx  = 0;

                for (cidx = 0; cidx < cmds.Count; cidx++)
                {
                    found = cmds[cidx].Hit(result);

                    if (found == true)
                    {
                        break;
                    }
                }

                if (found == true)
                {
                    // true positive
                    if (cmds[cidx].detected == false)
                    {
                        cmds[cidx].detected = true;
                        cm[result.gid].tp  += 1.0f;
                    }
                }
                else
                {
                    bool bad = GestureCommand.IsBadCommand(
                        frames,
                        result);

                    if (bad == true)
                    {
                        continue;
                    }
                    // false positive
                    cm[result.gid].fp += 1.0f;
                }
            }

            // false negatives
            for (int cidx = 0; cidx < cmds.Count; cidx++)
            {
                if (cmds[cidx].detected == true)
                {
                    continue;
                }

                cm[cmds[cidx].gid].fn += 1.0;
            }

            Results ret = new Results();

            for (int ii = 0; ii < cm.Count; ii++)
            {
                ret.AppendResults(cm[ii]);
                //temp += string.Format("{5}:\t A: {0:N6}, E: {1:N6}, P: {2:N6}, R: {3:N6}, F1: {4:N6}\n", ret.accuracy/ret.total, ret.error / ret.total, ret.precision / ret.total, ret.recall / ret.total, ret.f1_0 / ret.total, ii);
            }
            //Debug.Log(temp);

            return(ret);
        }
Exemplo n.º 11
0
        /**
         * Step forward into calculations. Get next point, filter, segment, recognize
         */
        void Step()
        {
            List <ContinuousResult> continuousResults = new List <ContinuousResult>();

            Vector pt = ema_filter.Filter(frames[frame_idx].pt, 1 / (double)parameters.fps);

            video.Add(pt);

            if (frame_idx == 0)
            {
                last_video_vector = pt;
            }

            // Check if moved "far enough" if mouse
            if (frame_idx > 1 && deviceType == DeviceType.MOUSE)
            {
                Vector vec    = pt - last_video_vector;
                double weight = vec.Length();

                if (weight <= 2.0)
                {
                    return;
                }

                last_video_vector = pt;
            }


            machete.ProcessFrame(pt, frame_idx, continuousResults);

            bool             cancel_if_better_score = false;
            ContinuousResult result = ContinuousResult.SelectResult(
                continuousResults,
                cancel_if_better_score);

            // No trigger, return
            if (result == null)
            {
                return;
            }

            List <Jackknife.Vector> jkbuffer = JackknifeConnector.GetJKBufferFromVideo(
                video,
                result.startFrameNo,
                result.endFrameNo);

            // Check if there was a match
            double recognizer_d = 0.0f;
            bool   match;

            match = jackknife.IsMatch(jkbuffer, result.sample.GestureId, out recognizer_d);

            if (match == false)
            {
                return;
            }

            RecognitionResult rresult = new RecognitionResult();

            rresult.gid   = result.gid;
            rresult.start = result.startFrameNo;
            rresult.end   = result.endFrameNo;
            rresult.score = recognizer_d;

            match = false;

            for (int ii = 0; ii < rresults.Count; ii++)
            {
                if (rresults[ii].Update(rresult))
                {
                    match = true;
                    break;
                }
            }

            // if some result was updated for better, continue
            if (match)
            {
                return;
            }

            rresults.Add(rresult);
        }