private float[,] DoTrain(ref GestureDetector gd)
    {
        /* arrange the key frame data in m_skRecorder.m_dataList */
        m_postFeature.Reset();
        m_postFeature.RegisterJoints(m_jointIdx);         // set the feature type and joint indices to calculate features
        for (int i = 0; i < m_skRecorder.FrameCount; i++)
        {
            int tag = m_skRecorder.m_tagList[i];
            if (tag == 0)
            {
                continue;
            }
            CopyCurFrameJointDataRef(i);
            m_postFeature.UpdateFeatures();             // calculate the features
            for (int j = 0; j < JointCount; j++)
            {
                gd.m_posts[tag - 1][j].AddTrainData(m_postFeature.m_jointVec[(int)m_jointIdx[j]]);
            }
        }

        /* train the posture templates */
        float[,] stdArray = new float[m_tagCount, JointCount];
        for (int i = 0; i < m_tagCount; i++)
        {
            for (int j = 0; j < JointCount; j++)
            {
                stdArray[i, j] = gd.m_posts[i][j].Train();
            }
        }
        // train time interval coefficients
        TrainTimeIntv(gd);

        return(stdArray);
    }
    /// the main work flow. called once per frame
    void Update()
    {
        if (Pause)
        {
            return;
        }

        if (UseRealtimeData && !m_skReader.UpdateSkeletonData())
        {
            return;
        }

        m_postFeature.UpdateFeatures();

        foreach (GestureDetector gd in m_gestures)
        {
            if (!gd.CheckPositionConfidence())
            {
                continue;                 // if data not confident, won't do detection
            }
            if (gd.Detect())
            {
                if (m_gestureDetected != null)
                {
                    m_gestureDetected(this, new GestureEventArgs(gd.m_name, gd.PostureCount == 1));                   // trigger the event
                }
            }
        }
    }