コード例 #1
0
        static void processFrameData(NatNetML.FrameOfMocapData data)
        {
            var new_points = new List <Point3d>();
            var new_sizes  = new List <float>();

            for (int i = 0; i < data.nOtherMarkers; ++i)
            {
                var om = data.OtherMarkers[i];
                new_points.Add(new Point3d(om.x, om.y, om.z));
                new_sizes.Add(1.0f);
            }

            for (int i = 0; i < data.nMarkers; ++i)
            {
                var m = data.LabeledMarkers[i];
                new_points.Add(new Point3d(m.x, m.y, m.z));
                new_sizes.Add(1.0f);
            }

            if (new_points.Count > 0)
            {
                markers      = new_points;
                marker_sizes = new_sizes;
            }

            Rhino.RhinoDoc.ActiveDoc.Views.Redraw();
        }
コード例 #2
0
ファイル: UniForm.cs プロジェクト: zsyzgu/UniversalOptitrack
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            // [optional] High-resolution frame arrival timing information
            Int64 currTime = timer.Value;

            if (lastTime != 0)
            {
                // Get time elapsed in tenths of a millisecond.
                Int64 timeElapsedInTicks = currTime - lastTime;
                Int64 timeElapseInTenthsOfMilliseconds = (timeElapsedInTicks * 10000) / timer.Frequency;
                // uncomment for timing info
                //OutputMessage("Frame Delivered: (" + timeElapseInTenthsOfMilliseconds.ToString() + ")  FrameTimestamp: " + data.fLatency);
            }

            // [NatNet] Add the incoming frame of mocap data to our frame queue,
            // Note: the frame queue is a shared resource with the UI thread, so lock it while writing
            lock (syncLock)
            {
                // [optional] clear the frame queue before adding a new frame
                m_FrameQueue.Clear();
                m_FrameQueue.Enqueue(data);
            }
            lastTime = currTime;
            // MessageBox.Show(data.ToString());
        }
コード例 #3
0
        static void fetchFrameData(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            /*  Exception handler for cases where assets are added or removed.
             *  Data description is re-obtained in the main function so that contents
             *  in the frame handler is kept minimal. */
            if ((data.bTrackingModelsChanged == true || data.nRigidBodies != mRigidBodies.Count || data.nSkeletons != mSkeletons.Count || data.nForcePlates != mForcePlates.Count))
            {
                mAssetChanged = true;
            }

            /*  Processing and ouputting frame data every 200th frame.
             *  This conditional statement is included in order to simplify the program output */
            if (data.iFrame % 200 == 0)
            {
                if (data.bRecording == false)
                {
                    Console.WriteLine("Frame #{0} Received:", data.iFrame);
                }
                else if (data.bRecording == true)
                {
                    Console.WriteLine("[Recording] Frame #{0} Received:", data.iFrame);
                }

                processFrameData(data);
            }
        }
コード例 #4
0
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            // measure time between frame arrival (inter frame)
            m_FramePeriodTimer.Stop();
            interframeDuration = m_FramePeriodTimer.Duration();

            // measure processing time (intra frame)
            m_ProcessingTimer.Start();

            // process data
            // NOTE!  do as little as possible here as we're on the data servicing thread
            ProcessFrameOfData(ref data);

            // report if we are taking longer than a mocap frame time
            // which eventually will back up the network receive buffer and result in frame drop
            m_ProcessingTimer.Stop();
            double appProcessingTimeMSecs = m_ProcessingTimer.Duration();
            double mocapFramePeriodMSecs  = (1.0f / m_ServerFramerate) * 1000.0f;

            if (appProcessingTimeMSecs > mocapFramePeriodMSecs)
            {
                //changed for now
                Console.WriteLine("Warning : Frame handler taking longer than frame period: " + appProcessingTimeMSecs.ToString("F2"));
            }

            m_FramePeriodTimer.Start();
        }
コード例 #5
0
        static void fetchFrameData(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            if ((data.bTrackingModelsChanged == true || data.nRigidBodies != mRigidBodies.Count))
            {
                mAssetChanged = true;
            }

            /*  Processing */

            int index = 1;

            if (data.iFrame % 1 == 0)
            {
                if (data.bRecording == false)
                {
                    Console.WriteLine("Frame #{0} Received:", data.iFrame);
                }
                else if (data.bRecording == true)
                {
                    Console.WriteLine("[Recording] Frame #{0} Received:", data.iFrame);
                }

                index = NatNetClient.processFrameData(data, index);
            }
        }
コード例 #6
0
        static void fetchFrameData(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            if ((data.bTrackingModelsChanged == true || data.nRigidBodies != mRigidBodies.Count))
            {
                mAssetChanged = true;
            }


            /*  Processing */



            if (data.iFrame % 10 == 0)
            {
                if (data.bRecording == false)
                {
                    Console.WriteLine("Frame #{0} Received:", data.iFrame);
                }
                else if (data.bRecording == true)
                {
                    Console.WriteLine("[Recording] Frame #{0} Received:", data.iFrame);
                }

                //Console.WriteLine(index);
                index = NatNetClient.processFrameData(data, index);
                //Console.WriteLine(index);
                if (index > 10)
                {
                    Console.Write("helloworld");
                    mNatNet.OnFrameReady -= fetchFrameData;
                }
            }
        }
コード例 #7
0
        //UI更新线程
        private void UpdateUI()
        {
            m_UIUpdateTimer.Stop();                                 //UI数据时间更新停止
            double interframeDuration = m_UIUpdateTimer.Duration(); //定义持续时间

            QueryPerfCounter uiIntraFrameTimer = new QueryPerfCounter();

            uiIntraFrameTimer.Start();         //UI数据时间更新开始

            // the frame queue is a shared resource with the FrameOfMocap delivery thread, so lock it while reading
            // note this can block the frame delivery thread.  In a production application frame queue management would be optimized.
            //帧队列是一个共享资源的frameofmocap输送线,所以把它锁在阅读
            //注意,这可以阻止帧传递线程。在生产应用程序框架中,队列管理将得到优化。
            //当我们使用线程的时候,效率最高的方式当然是异步,即各个线程同时运行,其间不相互依赖和等待。
            //但当不同的线程都需要访问某个资源的时候,就需要同步机制了,也就是说当对同一个资源进行读写的时候,我们要使该资源在同一时刻只能被一个线程操作,以确保每个操作都是有效即时的,也即保证其操作的原子性。lock是C#中最常用的同步方式
            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)              //如果帧队列数据个数大于0
                {
                    m_FrameOfData = m_FrameQueue.Dequeue(); //帧队列赋值

                    if (m_FrameQueue.Count > 0)
                    {
                        continue;
                    }

                    if (m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools"))
                        {
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);
                        }

                        // update the data grid
                        UpdateDataGrid();  //更新数据
                        // Mocap server timestamp (in seconds)
                        //m_fLastFrameTimestamp = m_FrameOfData.fTimestamp;
                        TimestampValue.Text         = m_FrameOfData.fTimestamp.ToString("F3"); //帧数据中的时间赋值给TimestampValue
                        DroppedFrameCountLabel.Text = mDroppedFrames.ToString();               //丢帧
                    }
                }
            }

            uiIntraFrameTimer.Stop();                                   //UI帧数据时间停止
            double uiIntraFrameDuration = uiIntraFrameTimer.Duration(); //持续时间赋值

            m_UIUpdateTimer.Start();                                    //UI更新时间计数
        }
コード例 #8
0
 // [NatNet] m_NatNet_OnFrameReady will be called when a frame of Mocap
 // data has is received from the server application.
 //
 // Note: This callback is on the network service thread, so it is
 // important  to return from this function quickly as possible
 // to prevent incoming frames of data from buffering up on the
 // network socket.
 //
 // Note: "data" is a reference structure to the current frame of data.
 // NatNet re-uses this same instance for each incoming frame, so it should
 // not be kept (the values contained in "data" will become replaced after
 // this callback function has exited).
 void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
 {
     lock (syncLock)
     {
         m_FrameQueue.Clear();
         m_FrameQueue.Enqueue(data);
     }
 }
コード例 #9
0
        public NatNetPoseData FetchFrameData()
        {
            NatNetPoseData poseData = new NatNetPoseData();

            if (connected_)
            {
                //Console.WriteLine("== NatNet Data Update ==");
                NatNetML.FrameOfMocapData data = mNatNet.GetLastFrameOfData();
                poseData = ProcessPoseData(data);
            }

            return(poseData);
        }
コード例 #10
0
ファイル: Form1.cs プロジェクト: otri/vive
        // UI refresher
        private void UpdateUITimer_Tick(object sender, EventArgs e)
        {
            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)
                {
                    m_FrameOfData = m_FrameQueue.Dequeue();

                    if (m_FrameQueue.Count > 0)
                    {
                        continue;
                    }

                    if (m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools") || desc.HostApp.Contains("Motive"))
                        {
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);
                        }

                        // update the data grid
                        UpdateDataGrid();

                        // update the chart
                        UpdateChart(m_FrameCounter++);

                        // only redraw chart when necessary, not for every frame
                        if (m_FrameQueue.Count == 0)
                        {
                            chart1.ChartAreas[0].RecalculateAxesScale();
                            chart1.ChartAreas[0].AxisX.Minimum = 0;
                            chart1.ChartAreas[0].AxisX.Maximum = GraphFrames;
                            chart1.Invalidate();
                        }

                        m_fLastFrameTimestamp = m_FrameOfData.fLatency;
                    }
                }
            }
        }
コード例 #11
0
        //called when data for any output pin is requested
        public void Evaluate(int SpreadMax)
        {
            if (FFrame.SliceCount > 0)
            {
                // There are avaliable frames

                lock (syncLock) {
                    //FLogger.Log(LogType.Debug, "" + m_FrameQueue.Count);

                    mFrameOfData = FFrame[0];

                    FTracked.SliceCount   = mFrameOfData.nRigidBodies;
                    FTransform.SliceCount = mFrameOfData.nRigidBodies;
                    FLogger.Log(LogType.Debug, "There are " + mFrameOfData.nRigidBodies + " rigid bodies detected");

                    for (int i = 0; i < mFrameOfData.nRigidBodies; ++i)
                    {
                        NatNetML.RigidBodyData rb = mFrameOfData.RigidBodies[i];

                        // Is tracked
                        FTracked[i] = rb.Tracked;

                        // Positions
                        Vector3D newPos = new Vector3D(rb.x, rb.y, rb.z);

                        // Orientations (quaternions)
                        float qx, qy, qw, qz;
                        qx = rb.qx;
                        qy = rb.qy;
                        qz = rb.qz;
                        qw = rb.qw;
                        QuaternionNormalise(ref qx, ref qy, ref qz, ref qw);
                        Vector4D newQuat = new Vector4D(rb.qx, rb.qy, rb.qz, rb.qw);

                        //Orientations (Euler)
                        Vector3D euler = VMath.QuaternionToEulerYawPitchRoll(newQuat);

                        //Matrix transform
                        FTransform[i] = ToVVVVMatrix(newPos, euler);

                        // Marker positions
                        FMarkerPosition.SliceCount = rb.nMarkers;
                        for (int j = 0; j < FMarkerPosition.SliceCount; ++j)
                        {
                            Marker marker = rb.Markers[j];
                            FMarkerPosition[j] = new Vector3D(-marker.x, marker.y, marker.z);
                        }
                    }
                }
            }
        }
コード例 #12
0
		//Constructor
		public DeviceOptiTrackNode() {
			//The client argument:
			//	0: Multicast
			//	1: Unicast
			mNatNet = new NatNetClientML(0);
			
			mFrameOfData = new NatNetML.FrameOfMocapData();
			m_FrameQueue = new Queue<NatNetML.FrameOfMocapData>();
			mDesc = new NatNetML.ServerDescription();
			
			// [NatNet] set a "Frame Ready" callback function (event handler) handler that will be
			// called by NatNet when NatNet receives a frame of data from the server application
			mNatNet.OnFrameReady += new NatNetML.FrameReadyEventHandler(m_NatNet_OnFrameReady);
			//mNatNet.OnFrameReady2 += new FrameReadyEventHandler2(m_NatNet_OnFrameReady2);
		}
コード例 #13
0
        //Constructor
        public DeviceOptiTrackNode()
        {
            //The client argument:
            //	0: Multicast
            //	1: Unicast
            mNatNet = new NatNetClientML(0);

            mFrameOfData = new NatNetML.FrameOfMocapData();
            m_FrameQueue = new Queue <NatNetML.FrameOfMocapData>();
            mDesc        = new NatNetML.ServerDescription();

            // [NatNet] set a "Frame Ready" callback function (event handler) handler that will be
            // called by NatNet when NatNet receives a frame of data from the server application
            mNatNet.OnFrameReady += new NatNetML.FrameReadyEventHandler(m_NatNet_OnFrameReady);
            //mNatNet.OnFrameReady2 += new FrameReadyEventHandler2(m_NatNet_OnFrameReady2);
        }
コード例 #14
0
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            m_FrameQueue.Clear();
            FrameOfMocapData deepCopy = new FrameOfMocapData(data);

            m_FrameQueue.Enqueue(deepCopy);

            FOutputFrameData.SliceCount = 1;
            FOutputFrameData[0]         = deepCopy;

            FOutputTimestamp.SliceCount = 1;
            FOutputTimestamp[0]         = deepCopy.fTimestamp;

            FOutputFrameCount.SliceCount = 1;
            FOutputFrameCount[0]         = deepCopy.iFrame;
        }
コード例 #15
0
        // [NatNet] m_NatNet_OnFrameReady will be called when a frame of Mocap
        // data has is received from the server application.
        //
        // Note: This callback is on the network service thread, so it is
        // important  to return from this function quickly as possible
        // to prevent incoming frames of data from buffering up on the
        // network socket.
        //
        // Note: "data" is a reference structure to the current frame of data.
        // NatNet re-uses this same instance for each incoming frame, so it should
        // not be kept (the values contained in "data" will become replaced after
        // this callback function has exited).
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            Int64 currTime = timer.Value;

            if (lastTime != 0)
            {
                // Get time elapsed in tenths of a millisecond.
                Int64 timeElapsedInTicks = currTime - lastTime;
                Int64 timeElapseInTenthsOfMilliseconds = (timeElapsedInTicks * 10000) / timer.Frequency;
                // uncomment for timing info
                //OutputMessage("Frame Delivered: (" + timeElapseInTenthsOfMilliseconds.ToString() + ")  FrameTimestamp: " + data.fLatency);
            }
            lock (syncLock)
            {
                m_FrameQueue.Clear();
                m_FrameQueue.Enqueue(data);
            }
            lastTime = currTime;
        }
コード例 #16
0
        /// <summary>
        /// [NatNet] m_NatNet_OnFrameReady will be called when a frame of Mocap
        /// data has is received from the server application.
        ///
        /// Note: This callback is on the network service thread, so it is
        /// important to return from this function quickly as possible
        /// to prevent incoming frames of data from buffering up on the
        /// network socket.
        ///
        /// Note: "data" is a reference structure to the current frame of data.
        /// NatNet re-uses this same instance for each incoming frame, so it should
        /// not be kept (the values contained in "data" will become replaced after
        /// this callback function has exited).
        /// </summary>
        /// <param name="data">The actual frame of mocap data</param>
        /// <param name="client">The NatNet client instance</param>
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            double           elapsedIntraMS = 0.0f;
            QueryPerfCounter intraTimer     = new QueryPerfCounter();

            intraTimer.Start();

            // check and report frame arrival period (time elapsed since previous frame arrived)
            m_FramePeriodTimer.Stop();
            double elapsedMS = m_FramePeriodTimer.Duration();

            if ((mLastFrame % 100) == 0)
            {
                OutputMessage("FrameID:" + data.iFrame + "   Timestamp: " + data.fTimestamp + "   Period:" + elapsedMS);
            }

            // check and report frame drop
            if ((mLastFrame != 0) && ((data.iFrame - mLastFrame) != 1))
            {
                OutputMessage("Frame Drop: ( ThisFrame: " + data.iFrame.ToString() + "  LastFrame: " + mLastFrame.ToString() + " )");
            }

            // [NatNet] Add the incoming frame of mocap data to our frame queue,
            // Note: the frame queue is a shared resource with the UI thread, so lock it while writing
            lock (syncLock)
            {
                // [optional] clear the frame queue before adding a new frame
                m_FrameQueue.Clear();
                FrameOfMocapData deepCopy = new FrameOfMocapData(data);
                m_FrameQueue.Enqueue(deepCopy);
            }

            intraTimer.Stop();
            elapsedIntraMS = intraTimer.Duration();
            if (elapsedIntraMS > 5.0f)
            {
                OutputMessage("Warning : Frame handler taking too long: " + elapsedIntraMS.ToString("F2"));
            }

            mLastFrame = data.iFrame;
            m_FramePeriodTimer.Start();
        }
コード例 #17
0
        private void HandleFrameReady(NatNetML.FrameOfMocapData mocapData)
        {
            // update last position of rigid bodies
            for (var i = 0; i < mocapData.nRigidBodies; i++)
            {
                var rigidBodyData = mocapData.RigidBodies[i];

                var rigidBody = _rigidBodies.Where(p => p.Key.ID == rigidBodyData.ID)
                                .Select(p => p.Key)
                                .DefaultIfEmpty(null)
                                .FirstOrDefault();

                if (rigidBody == null)
                {
                    continue;
                }

                _rigidBodies[rigidBody] = rigidBodyData;
            }
        }
コード例 #18
0
        /// <summary>
        /// [NatNet] Request a description of the Active Model List from the server (e.g. Motive) and build up a new spreadsheet
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>

        //处理帧数据 //保存数据
        void ProcessFrameOfData(ref NatNetML.FrameOfMocapData data)
        {
            // detect and reported any 'reported' frame drop (as reported by server)
            if (m_fLastFrameTimestamp != 0.0f)
            {
                double framePeriod = 1.0f / m_ServerFramerate;
                double thisPeriod  = data.fTimestamp - m_fLastFrameTimestamp;
                double fudgeFactor = 0.002f; // 2 ms
                if ((thisPeriod - framePeriod) > fudgeFactor)
                {
                    //OutputMessage("Frame Drop: ( ThisTS: " + data.fTimestamp.ToString("F3") + "  LastTS: " + m_fLastFrameTimestamp.ToString("F3") + " )");
                    mDroppedFrames++;
                }
            }

            // check and report frame drop (frame id based)
            if (mLastFrame != 0)
            {
                if ((data.iFrame - mLastFrame) != 1)
                {
                    //OutputMessage("Frame Drop: ( ThisFrame: " + data.iFrame.ToString() + "  LastFrame: " + mLastFrame.ToString() + " )");
                    //mDroppedFrames++;
                }
            }

            // recording : write packet to data file


            // [NatNet] Add the incoming frame of mocap data to our frame queue,
            // Note: the frame queue is a shared resource with the UI thread, so lock it while writing
            lock (syncLock)
            {
                // [optional] clear the frame queue before adding a new frame
                m_FrameQueue.Clear();
                FrameOfMocapData deepCopy = new FrameOfMocapData(data);
                m_FrameQueue.Enqueue(deepCopy);
            }

            mLastFrame            = data.iFrame;
            m_fLastFrameTimestamp = data.fTimestamp;
        }
コード例 #19
0
        private NatNetPoseData ProcessPoseData(NatNetML.FrameOfMocapData data)
        {
            NatNetPoseData poseData = new NatNetPoseData();

            /*  Parsing Simple Marker Data   */
            for (int i = 0; i < data.nMarkers; i++)
            {
                Marker marker = data.LabeledMarkers[i];
                int    mID    = marker.ID;
                // Console.WriteLine("\tMarker ({0}): \t\tpos ({0:N3}, {1:N3}, {2:N3})", mID, marker.x, marker.y, marker.z);
                poseData.mPos = new Vector3(marker.x, marker.y, marker.z);
            }

            /*  Parsing Rigid Body Frame Data   */
            for (int i = 0; i < mRigidBodies.Count; i++)
            {
                int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                for (int j = 0; j < data.nRigidBodies; j++)
                {
                    if (rbID == data.RigidBodies[j].ID)                      // When rigid body ID of the descriptions matches rigid body ID of the frame data.
                    {
                        NatNetML.RigidBodyData rbData = data.RigidBodies[j]; // Received rigid body descriptions

                        if (rbData.Tracked == true)
                        {
                            poseData.rbPos = new Vector3(rbData.x, rbData.y, rbData.z);
                            poseData.rbRot = new Quaternion(rbData.qx, rbData.qy, rbData.qz, rbData.qw);
                        }
                        else
                        {
                            NatNetML.RigidBody rb = mRigidBodies[i];                // Saved rigid body descriptions
                            Console.WriteLine("\t{0} is not tracked in current frame", rb.Name);
                        }
                    }
                }
            }

            return(poseData);
        }
コード例 #20
0
        /// <summary>
        /// [NatNet] m_NatNet_OnFrameReady will be called when a frame of Mocap
        /// data has is received from the server application.
        ///
        /// Note: This callback is on the network service thread, so it is
        /// important to return from this function quickly as possible
        /// to prevent incoming frames of data from buffering up on the
        /// network socket.
        ///
        /// Note: "data" is a reference structure to the current frame of data.
        /// NatNet re-uses this same instance for each incoming frame, so it should
        /// not be kept (the values contained in "data" will become replaced after
        /// this callback function has exited).
        /// </summary>
        /// <param name="data">The actual frame of mocap data</param>
        /// <param name="client">The NatNet client instance</param>
        void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            double           elapsedIntraMS = 0.0f;
            QueryPerfCounter intraTimer     = new QueryPerfCounter();

            intraTimer.Start();

            // detect and report and 'measured' frame drop (as measured by client)
            m_FramePeriodTimer.Stop();
            double elapsedMS = m_FramePeriodTimer.Duration();

            ProcessFrameOfData(ref data);

            // report if we are taking too long, which blocks packet receiving, which if long enough would result in socket buffer drop
            intraTimer.Stop();
            elapsedIntraMS = intraTimer.Duration();
            if (elapsedIntraMS > 5.0f)
            {
                OutputMessage("Warning : Frame handler taking too long: " + elapsedIntraMS.ToString("F2"));
            }
            m_FramePeriodTimer.Start();
        }
コード例 #21
0
        /// <summary>
        /// [NatNet] parseFrameData will be called when a frame of Mocap
        /// data has is received from the server application.
        ///
        /// Note: This callback is on the network service thread, so it is
        /// important to return from this function quickly as possible
        /// to prevent incoming frames of data from buffering up on the
        /// network socket.
        ///
        /// Note: "data" is a reference structure to the current frame of data.
        /// NatNet re-uses this same instance for each incoming frame, so it should
        /// not be kept (the values contained in "data" will become replaced after
        /// this callback function has exited).
        /// </summary>
        /// <param name="data">The actual frame of mocap data</param>
        /// <param name="client">The NatNet client instance</param>
        static void fetchFrameData(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
        {
            /*  Exception handler for cases where assets are added or removed.
             *  Data description is re-obtained in the main function so that contents
             *  in the frame handler is kept minimal. */
            if ((data.bTrackingModelsChanged == true || data.nRigidBodies != mRigidBodies.Count || data.nSkeletons != mSkeletons.Count || data.nForcePlates != mForcePlates.Count))
            {
                mAssetChanged = true;
            }

            /*  Processing and ouputting frame data every 200th frame.
             *  This conditional statement is included in order to simplify the program output */
            //if(data.iFrame % 12 == 0) //camera 120 fps, but ardupilot limit data rate to 70ms
            //for parrot bebop2, I can feed it with 14Hz. But for skyviper v2450, 5hz is max. Faster data seems overload its CPU
            {
                //if (data.bRecording == false)
                //    Console.WriteLine("Frame #{0} Received:", data.iFrame);
                //else if (data.bRecording == true)
                //    Console.WriteLine("[Recording] Frame #{0} Received:", data.iFrame);

                processFrameData(data);
            }
        }
コード例 #22
0
        static void processFrameData(NatNetML.FrameOfMocapData data)
        {
            /*  Parsing Rigid Body Frame Data   */
            for (int i = 0; i < mRigidBodies.Count; i++)
            {
                int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                for (int j = 0; j < data.nRigidBodies; j++)
                {
                    if (rbID == data.RigidBodies[j].ID)                      // When rigid body ID of the descriptions matches rigid body ID of the frame data.
                    {
                        NatNetML.RigidBody     rb     = mRigidBodies[i];     // Saved rigid body descriptions
                        NatNetML.RigidBodyData rbData = data.RigidBodies[j]; // Received rigid body descriptions

                        if (rbData.Tracked == true)
                        {
                            Console.WriteLine("\tRigidBody ({0}):", rb.Name);
                            Console.WriteLine("\t\tpos ({0:N3}, {1:N3}, {2:N3})", rbData.x, rbData.y, rbData.z);

                            // Rigid Body Euler Orientation
                            float[] quat = new float[4] {
                                rbData.qx, rbData.qy, rbData.qz, rbData.qw
                            };
                            float[] eulers = new float[3];

                            eulers = NatNetClientML.QuatToEuler(quat, NATEulerOrder.NAT_XYZr); //Converting quat orientation into XYZ Euler representation.
                            double xrot = RadiansToDegrees(eulers[0]);
                            double yrot = RadiansToDegrees(eulers[1]);
                            double zrot = RadiansToDegrees(eulers[2]);

                            Console.WriteLine("\t\tori ({0:N3}, {1:N3}, {2:N3})", xrot, yrot, zrot);
                        }
                        else
                        {
                            Console.WriteLine("\t{0} is not tracked in current frame", rb.Name);
                        }
                    }
                }
            }

            /* Parsing Skeleton Frame Data  */
            for (int i = 0; i < mSkeletons.Count; i++)      // Fetching skeleton IDs from the saved descriptions
            {
                int sklID = mSkeletons[i].ID;

                for (int j = 0; j < data.nSkeletons; j++)
                {
                    if (sklID == data.Skeletons[j].ID)                     // When skeleton ID of the description matches skeleton ID of the frame data.
                    {
                        NatNetML.Skeleton     skl     = mSkeletons[i];     // Saved skeleton descriptions
                        NatNetML.SkeletonData sklData = data.Skeletons[j]; // Received skeleton frame data

                        Console.WriteLine("\tSkeleton ({0}):", skl.Name);
                        Console.WriteLine("\t\tSegment count: {0}", sklData.nRigidBodies);

                        /*  Now, for each of the skeleton segments  */
                        for (int k = 0; k < sklData.nRigidBodies; k++)
                        {
                            NatNetML.RigidBodyData boneData = sklData.RigidBodies[k];

                            /*  Decoding skeleton bone ID   */
                            int skeletonID  = HighWord(boneData.ID);
                            int rigidBodyID = LowWord(boneData.ID);
                            int uniqueID    = skeletonID * 1000 + rigidBodyID;
                            int key         = uniqueID.GetHashCode();

                            NatNetML.RigidBody bone = (RigidBody)mHtSkelRBs[key];   //Fetching saved skeleton bone descriptions

                            //Outputting only the hip segment data for the purpose of this sample.
                            if (k == 0)
                            {
                                Console.WriteLine("\t\t{0:N3}: pos({1:N3}, {2:N3}, {3:N3})", bone.Name, boneData.x, boneData.y, boneData.z);
                            }
                        }
                    }
                }
            }

            /*  Parsing Force Plate Frame Data  */
            for (int i = 0; i < mForcePlates.Count; i++)
            {
                int fpID = mForcePlates[i].ID;                  // Fetching force plate IDs from the saved descriptions

                for (int j = 0; j < data.nForcePlates; j++)
                {
                    if (fpID == data.ForcePlates[j].ID)                       // When force plate ID of the descriptions matches force plate ID of the frame data.
                    {
                        NatNetML.ForcePlate     fp     = mForcePlates[i];     // Saved force plate descriptions
                        NatNetML.ForcePlateData fpData = data.ForcePlates[i]; // Received forceplate frame data

                        Console.WriteLine("\tForce Plate ({0}):", fp.Serial);

                        // Here we will be printing out only the first force plate "subsample" (index 0) that was collected with the mocap frame.
                        for (int k = 0; k < fpData.nChannels; k++)
                        {
                            Console.WriteLine("\t\tChannel {0}: {1}", fp.ChannelNames[k], fpData.ChannelData[k].Values[0]);
                        }
                    }
                }
            }
            Console.WriteLine("\n");
        }
コード例 #23
0
        static void processFrameData(NatNetML.FrameOfMocapData data)
        {
            //mPoints.Clear();
            //// Standard marker streaming.
            //for (int i = 0; i < data.nOtherMarkers; i++)
            //{
            //    // Recreate point data.
            //    Point3d markerPoint = new Point3d(Math.Round(data.LabeledMarkers[i].x, 4), Math.Round(data.LabeledMarkers[i].y, 4), Math.Round(data.LabeledMarkers[i].z, 4));
            //    mPoints.Add(markerPoint);
            //    /*
            //    for (int j = 0; j < data.MarkerSets[i].nMarkers; j++)
            //    {
            //        // Recreate point data.
            //        Point3d markerPoint = new Point3d(Math.Round(data.LabeledMarkers[i].x, 4), Math.Round(data.LabeledMarkers[i].y, 4), Math.Round(data.LabeledMarkers[i].z, 4));
            //        mPoints.Add(markerPoint);
            //    }
            //    */

            //    /*
            //    // Create marker labels.
            //    string markerLabel = data.OtherMarkers[i].ID.ToString();
            //    mLabels.Add(markerLabel);
            //    */
            //}

            if (RigidBody)
            {
                rigidBodyNames.Clear();
                rigidBodyPos.Clear();
                rigidBodyQuat.Clear();
                rigidBodyPlanes.Clear();

                /*  Parsing Rigid Body Frame Data   */
                for (int i = 0; i < mRigidBodies.Count; i++)
                {
                    // int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                    for (int j = 0; j < data.nRigidBodies; j++)
                    {
                        RigidBody     rb     = mRigidBodies[i];        // Saved rigid body descriptions
                        RigidBodyData rbData = data.RigidBodies[j];    // Received rigid body descriptions

                        if (rbData.Tracked == true)
                        {
                            // Rigid Body ID
                            rigidBodyNames.Add(rb.Name);

                            // Rigid Body Position
                            rigidBodyPos.Add(Math.Round(rbData.x, 4));
                            rigidBodyPos.Add(Math.Round(rbData.y, 4));
                            rigidBodyPos.Add(Math.Round(rbData.z, 4));

                            // Rigid Body Euler Orientation
                            //float[] quat = new float[4] { rbData.qx, rbData.qy, rbData.qz, rbData.qw };

                            // Attempt to normalise the rotation notation to fit with robot programming (WXYZ).
                            rigidBodyQuat.Add(Math.Round(rbData.qw, 6));
                            rigidBodyQuat.Add(Math.Round(rbData.qx, 6));
                            rigidBodyQuat.Add(Math.Round(rbData.qy, 6));
                            rigidBodyQuat.Add(Math.Round(rbData.qz, 6));

                            /*
                             * float[] eulers = new float[3];
                             *
                             * eulers = NatNetClientML.QuatToEuler(quat, NATEulerOrder.NAT_XYZr); //Converting quat orientation into XYZ Euler representation.
                             * double xrot = RadiansToDegrees(eulers[0]);
                             * double yrot = RadiansToDegrees(eulers[1]);
                             * double zrot = RadiansToDegrees(eulers[2]);
                             *
                             * Console.WriteLine("\t\tori ({0:N3}, {1:N3}, {2:N3})", xrot, yrot, zrot);
                             */


                            // Create Plane
                            Plane      rbPlane = new Plane();
                            Quaternion rbQuat  = new Quaternion(Math.Round(rbData.qw, 6), Math.Round(rbData.qx, 6), Math.Round(rbData.qy, 6), Math.Round(rbData.qz, 6));
                            rbQuat.GetRotation(out rbPlane);
                            rbPlane.Origin = new Point3d(Math.Round(rbData.x, 4), Math.Round(rbData.y, 4), Math.Round(rbData.z, 4));

                            //Scale from meter to milli meter
                            rbPlane.Transform(Transform.Scale(new Point3d(0, 0, 0), 1000));

                            Transform xformYup = new Transform();
                            xformYup.M01 = xformYup.M02 = xformYup.M03 = xformYup.M10 = xformYup.M11 = xformYup.M13 = xformYup.M20 = xformYup.M22 = xformYup.M33 = xformYup.M30 = xformYup.M31 = xformYup.M32 = 0;
                            xformYup.M00 = xformYup.M21 = xformYup.M33 = 1;
                            xformYup.M12 = -1;

                            Transform xRotate = new Transform();
                            xRotate.M00 = xRotate.M02 = xRotate.M03 = xRotate.M11 = xRotate.M12 = xRotate.M13 = xRotate.M20 = xRotate.M21 = xRotate.M33 = xRotate.M30 = xRotate.M31 = xRotate.M32 = 0;
                            xRotate.M10 = xRotate.M22 = xRotate.M33 = 1;
                            xRotate.M01 = -1;

                            if (yUp) //Detect if Y is pointing up
                            {
                                rbPlane.Transform(xformYup);
                            }

                            rbPlane.Transform(xRotate);
                            rigidBodyPlanes.Add(rbPlane);
                        }
                        else
                        {
                            log.Add(rb.Name.ToString() + " is not tracked in current frame.");
                        }
                    }
                }
            }

            //if (Skeleton)
            //{
            //    /* Parsing Skeleton Frame Data  */
            //    for (int i = 0; i < mSkeletons.Count; i++)      // Fetching skeleton IDs from the saved descriptions
            //    {
            //        int sklID = mSkeletons[i].ID;

            //        for (int j = 0; j < data.nSkeletons; j++)
            //        {
            //            if (sklID == data.Skeletons[j].ID)      // When skeleton ID of the description matches skeleton ID of the frame data.
            //            {
            //                NatNetML.Skeleton skl = mSkeletons[i];              // Saved skeleton descriptions
            //                NatNetML.SkeletonData sklData = data.Skeletons[j];  // Received skeleton frame data

            //                Console.WriteLine("\tSkeleton ({0}):", skl.Name);
            //                Console.WriteLine("\t\tSegment count: {0}", sklData.nRigidBodies);

            //                /*  Now, for each of the skeleton segments  */
            //                for (int k = 0; k < sklData.nRigidBodies; k++)
            //                {
            //                    NatNetML.RigidBodyData boneData = sklData.RigidBodies[k];

            //                    /*  Decoding skeleton bone ID   */
            //                    int skeletonID = HighWord(boneData.ID);
            //                    int rigidBodyID = LowWord(boneData.ID);
            //                    int uniqueID = skeletonID * 1000 + rigidBodyID;
            //                    int key = uniqueID.GetHashCode();

            //                    NatNetML.RigidBody bone = (RigidBody)mHtSkelRBs[key];   //Fetching saved skeleton bone descriptions

            //                    //Outputting only the hip segment data for the purpose of this sample.
            //                    if (k == 0)
            //                        Console.WriteLine("\t\t{0:N3}: pos({1:N3}, {2:N3}, {3:N3})", bone.Name, boneData.x, boneData.y, boneData.z);
            //                }
            //            }
            //        }
            //    }
            //}

            //if (ForcePlate)
            //{
            //    /*  Parsing Force Plate Frame Data  */
            //    for (int i = 0; i < mForcePlates.Count; i++)
            //    {
            //        int fpID = mForcePlates[i].ID;                  // Fetching force plate IDs from the saved descriptions

            //        for (int j = 0; j < data.nForcePlates; j++)
            //        {
            //            if (fpID == data.ForcePlates[j].ID)         // When force plate ID of the descriptions matches force plate ID of the frame data.
            //            {
            //                NatNetML.ForcePlate fp = mForcePlates[i];                // Saved force plate descriptions
            //                NatNetML.ForcePlateData fpData = data.ForcePlates[i];    // Received forceplate frame data

            //                Console.WriteLine("\tForce Plate ({0}):", fp.Serial);

            //                // Here we will be printing out only the first force plate "subsample" (index 0) that was collected with the mocap frame.
            //                for (int k = 0; k < fpData.nChannels; k++)
            //                {
            //                    Console.WriteLine("\t\tChannel {0}: {1}", fp.ChannelNames[k], fpData.ChannelData[k].Values[0]);
            //                }
            //            }
            //        }
            //    }
            //}
        }
コード例 #24
0
        /// <summary>
        /// Refresh the UI at a fixed period specified by the timer
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void UpdateUITimer_Tick(object sender, EventArgs e)
        {
            m_UIUpdateTimer.Stop();
            double interframeDuration = m_UIUpdateTimer.Duration();

            QueryPerfCounter uiIntraFrameTimer = new QueryPerfCounter();
            uiIntraFrameTimer.Start();

            // the frame queue is a shared resource with the FrameOfMocap delivery thread, so lock it while reading
            // note this can block the frame delivery thread.  In a production application frame queue management would be optimized.
            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)
                {
                    m_FrameOfData = m_FrameQueue.Dequeue();

                    if (m_FrameQueue.Count > 0)
                        continue;

                    if (m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools"))
                        {
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);

                        }

                        // update the data grid
                        UpdateDataGrid();

                        // update the chart
                        UpdateChart(m_FrameOfData.iFrame);

                        // only redraw chart when necessary, not for every frame
                        if (m_FrameQueue.Count == 0)
                        {
                            chart1.ChartAreas[0].RecalculateAxesScale();
                            chart1.ChartAreas[0].AxisX.Minimum = 0;
                            chart1.ChartAreas[0].AxisX.Maximum = GraphFrames;
                            chart1.Invalidate();
                        }

                        // Mocap server timestamp (in seconds)
                        m_fLastFrameTimestamp = m_FrameOfData.fTimestamp;
                        TimestampValue.Text = m_FrameOfData.fTimestamp.ToString("F3");

                        // SMPTE timecode (if timecode generator present)
                        int hour, minute, second, frame, subframe;
                        bool bSuccess = m_NatNet.DecodeTimecode(m_FrameOfData.Timecode, m_FrameOfData.TimecodeSubframe, out hour, out minute, out second, out frame, out subframe);
                        if (bSuccess)
                            TimecodeValue.Text = string.Format("{0:D2}:{1:D2}:{2:D2}:{3:D2}.{4:D2}", hour, minute, second, frame, subframe);

                        if (m_FrameOfData.bRecording)
                            chart1.BackColor = Color.Red;
                        else
                            chart1.BackColor = Color.White;
                    }
                }
            }

            uiIntraFrameTimer.Stop();
            double uiIntraFrameDuration = uiIntraFrameTimer.Duration();
            m_UIUpdateTimer.Start();
        }
コード例 #25
0
        //called when data for any output pin is requested
        public void Evaluate(int SpreadMax)
        {
            int returnCode = 0;

            if (FEnabled[0])
            {
                //If not connected already, try to connect
                if (!FConnected[0])
                {
                    // [NatNet] connect to a NatNet server
                    returnCode = mNatNet.Initialize(FLocal[0], FServer[0], FPort[0]);
                    FLogger.Log(LogType.Debug, "Trying to connect...");

                    if (returnCode == 0)
                    {
                        FLogger.Log(LogType.Debug, "Connected!");

                        // [NatNet] validate the connection
                        returnCode = mNatNet.GetServerDescription(mDesc);

                        if (returnCode == 0)
                        {
                            FLogger.Log(LogType.Debug, "Server App Name: " + mDesc.HostApp);


                            // [NatNet] [optional] send a test/response message
                            FLogger.Log(LogType.Debug, "Sending TestRequest");
                            int    nBytes   = 0;
                            byte[] response = new byte[10000];
                            int    rc       = mNatNet.SendMessageAndWait("TestRequest", out response, out nBytes);
                            if (rc == 0)
                            {
                                string str = "   Server: " + System.Text.Encoding.ASCII.GetString(response, 0, nBytes);
                                FLogger.Log(LogType.Debug, str);
                            }
                            else
                            {
                                FLogger.Log(LogType.Debug, "   Server: No Response.");
                            }

                            rc = mNatNet.SendMessageAndWait("FrameRate", out response, out nBytes);
                            if (rc == 0)
                            {
                                try
                                {
                                    double serverFramerate = BitConverter.ToSingle(response, 0);
                                    FLogger.Log(LogType.Debug, String.Format("   Server Framerate: {0}", serverFramerate));
                                }
                                catch (System.Exception ex)
                                {
                                    FLogger.Log(LogType.Debug, ex.Message);
                                }
                            }


                            //Connected
                            FConnected[0] = true;
                            FEnabled[0]   = FConnected[0];
                        }
                    }

                    //Get the rigid body names
                    this.RequestDataDescription();
                }
                else
                {
                    // Already connected, update output

                    // Upon update, query server for data description
                    if (FUpdate.IsChanged)
                    {
                        this.RequestDataDescription();
                    }
                    lock (syncLock) {
                        //FLogger.Log(LogType.Debug, "" + m_FrameQueue.Count);
                        while (m_FrameQueue.Count > 0)
                        {
                            mFrameOfData = m_FrameQueue.Dequeue();

                            if (m_FrameQueue.Count > 0)
                            {
                                continue;
                            }
                            FFrame.SliceCount = 0;
                            FFrame.Add(mFrameOfData);
                        }
                    }


                    // Update the rigid bodies names
                    FRigidBody.SliceCount = mRBNames.Count;
                    for (int i = 0; i < mRBNames.Count; ++i)
                    {
                        FRigidBody[i] = mRBNames[i];
                    }
                }
            }
            else
            {
                //If connected already, disconnect, else, do nothing
                if (FConnected[0])
                {
                    mRBNames.Clear();
                    returnCode    = mNatNet.Uninitialize();
                    FConnected[0] = (0 == returnCode);
                }
            }
        }
コード例 #26
0
		//called when data for any output pin is requested
		public void Evaluate(int SpreadMax)
		{
           if(FFrame.SliceCount>0) {
                // There are avaliable frames

            	lock(syncLock) {
            		//FLogger.Log(LogType.Debug, "" + m_FrameQueue.Count);
            		
        			mFrameOfData = FFrame[0];
        	
        			FTracked.SliceCount = mFrameOfData.nRigidBodies;
            		FTransform.SliceCount = mFrameOfData.nRigidBodies;
            		FLogger.Log(LogType.Debug, "There are " + mFrameOfData.nRigidBodies + " rigid bodies detected");
            		
        			for (int i = 0; i < mFrameOfData.nRigidBodies; ++i)
        			{
        				NatNetML.RigidBodyData rb = mFrameOfData.RigidBodies[i];
        				
        				// Is tracked
        				FTracked[i] = rb.Tracked;
        				
        				// Positions
        				Vector3D newPos = new Vector3D(rb.x, rb.y, rb.z);
        				
        				// Orientations (quaternions)
        				float qx, qy, qw, qz;
        				qx = rb.qx;
        				qy = rb.qy;
        				qz = rb.qz;
        				qw = rb.qw;
        				QuaternionNormalise(ref qx, ref qy, ref qz, ref qw);
        				Vector4D newQuat = new Vector4D(rb.qx, rb.qy, rb.qz, rb.qw);

        				//Orientations (Euler)
                        Vector3D euler = VMath.QuaternionToEulerYawPitchRoll(newQuat);
        				
        				//Matrix transform
        				FTransform[i] = ToVVVVMatrix(newPos, euler);
        				
       					// Marker positions
        				FMarkerPosition.SliceCount = rb.nMarkers;
	            		for(int j = 0; j < FMarkerPosition.SliceCount; ++j){
	            			Marker marker = rb.Markers[j];
	            			FMarkerPosition[j] = new Vector3D(-marker.x, marker.y, marker.z);
	            		}
            			
            		}
            	}

       		}
		}
コード例 #27
0
ファイル: Form1.cs プロジェクト: deeni/vive
        // UI refresher
        private void UpdateUITimer_Tick(object sender, EventArgs e)
        {

            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)
                {
                    m_FrameOfData = m_FrameQueue.Dequeue();
                    
                    if (m_FrameQueue.Count > 0)
                        continue;
                    
                    if(m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a 
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools") || desc.HostApp.Contains("Motive"))
                        {                      
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);

                        }

                        // update the data grid
                        UpdateDataGrid();

                        // update the chart
                        UpdateChart(m_FrameCounter++);
                        
                        // only redraw chart when necessary, not for every frame
                        if (m_FrameQueue.Count == 0)
                        {
                            chart1.ChartAreas[0].RecalculateAxesScale();
                            chart1.ChartAreas[0].AxisX.Minimum = 0;
                            chart1.ChartAreas[0].AxisX.Maximum = GraphFrames;
                            chart1.Invalidate();
                        }
                        
                        m_fLastFrameTimestamp = m_FrameOfData.fLatency;        

                    }

                }
            }
        }
コード例 #28
0
ファイル: Form1.cs プロジェクト: michaelcoe/PyNatNet
 // [NatNet] m_NatNet_OnFrameReady will be called when a frame of Mocap data has is received
 // from the server application.
 void m_NatNet_OnFrameReady(NatNetML.FrameOfMocapData data, NatNetML.NatNetClientML client)
 {
     m_FrameQueue.Enqueue(data);
 }
コード例 #29
0
		//called when data for any output pin is requested
		public void Evaluate(int SpreadMax)
		{
            int returnCode = 0;
            if(FEnabled[0]) {

                //If not connected already, try to connect
                if(!FConnected[0]) {

                    // [NatNet] connect to a NatNet server
                    returnCode = mNatNet.Initialize(FLocal[0], FServer[0], FPort[0]);
                    FLogger.Log(LogType.Debug, "Trying to connect...");
                	
                	if(returnCode == 0) {
                		FLogger.Log(LogType.Debug, "Connected!");

                		// [NatNet] validate the connection
            			returnCode = mNatNet.GetServerDescription(mDesc);
                		
                		if(returnCode == 0) {
                			FLogger.Log(LogType.Debug, "Server App Name: " + mDesc.HostApp);
                			
                			
                			// [NatNet] [optional] send a test/response message
			                FLogger.Log(LogType.Debug, "Sending TestRequest");
			                int nBytes = 0;
			                byte[] response = new byte[10000];
			                int rc = mNatNet.SendMessageAndWait("TestRequest", out response, out nBytes);
			                if (rc == 0)
			                {
			                    string str = "   Server: " + System.Text.Encoding.ASCII.GetString(response, 0, nBytes);
			                    FLogger.Log(LogType.Debug, str);
			                }
			                else
			                {
			                    FLogger.Log(LogType.Debug, "   Server: No Response.");
			                }
			
			                rc = mNatNet.SendMessageAndWait("FrameRate", out response, out nBytes);
			                if (rc == 0)
			                {
			                    try
			                    {
			                        double serverFramerate = BitConverter.ToSingle(response, 0);
			                        FLogger.Log(LogType.Debug, String.Format("   Server Framerate: {0}", serverFramerate));
			                    }
			                    catch (System.Exception ex)
			                    {
			                        FLogger.Log(LogType.Debug, ex.Message);
			                    }
			                }
                			
   
                			//Connected
                			FConnected[0] = true;
                    		FEnabled[0] = FConnected[0];
                		}
                	}

                    //Get the rigid body names
                    this.RequestDataDescription();
                } else {
                    // Already connected, update output

                    // Upon update, query server for data description
                    if (FUpdate.IsChanged)
                        this.RequestDataDescription();
                	lock(syncLock) {
                		//FLogger.Log(LogType.Debug, "" + m_FrameQueue.Count);
                		while(m_FrameQueue.Count > 0) {
                			mFrameOfData = m_FrameQueue.Dequeue();
                			
                			if(m_FrameQueue.Count > 0)
                				continue;
                			FFrame.SliceCount = 0;
                			FFrame.Add(mFrameOfData);
            			
            			}
            		}
                	
                	
                	// Update the rigid bodies names
                	FRigidBody.SliceCount = mRBNames.Count;
                	for(int i = 0; i < mRBNames.Count; ++i)
                		FRigidBody[i] = mRBNames[i];
                }
            } else {
                //If connected already, disconnect, else, do nothing
                if(FConnected[0]) {
                    mRBNames.Clear();
                    returnCode = mNatNet.Uninitialize();
                    FConnected[0] = (0 == returnCode);
                }
            }
		}
コード例 #30
0
        //static uint GPS_LEAPSECONDS_MILLIS = 18000;

        static void processFrameData(NatNetML.FrameOfMocapData data)
        {
            bool data_gogo = true;

            /*  Parsing Rigid Body Frame Data   */
            for (int i = 0; i < mRigidBodies.Count; i++)
            {
                int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                for (int j = 0; j < data.nRigidBodies; j++)
                {
                    if (rbID == data.RigidBodies[j].ID)                      // When rigid body ID of the descriptions matches rigid body ID of the frame data.
                    {
                        NatNetML.RigidBody     rb     = mRigidBodies[i];     // Saved rigid body descriptions
                        NatNetML.RigidBodyData rbData = data.RigidBodies[j]; // Received rigid body descriptions

                        if (rbData.Tracked == true)
                        {
#if DEBUG_MSG
                            Console.WriteLine("\tRigidBody ({0}):", rb.Name);
                            Console.WriteLine("\t\tpos ({0:N3}, {1:N3}, {2:N3})", rbData.x, rbData.y, rbData.z);

                            // Rigid Body Euler Orientation
                            float[] quat = new float[4] {
                                rbData.qx, rbData.qy, rbData.qz, rbData.qw
                            };
                            float[] eulers = new float[3];

                            eulers = NatNetClientML.QuatToEuler(quat, NATEulerOrder.NAT_XYZr); //Converting quat orientation into XYZ Euler representation.
                            double xrot = RadiansToDegrees(eulers[0]);
                            double yrot = RadiansToDegrees(eulers[1]);
                            double zrot = RadiansToDegrees(eulers[2]);
                            Console.WriteLine("\t\tori ({0:N3}, {1:N3}, {2:N3})", xrot, yrot, zrot);
#endif
                            if (drones.ContainsKey(rb.Name))
                            {
                                DroneData drone = drones[rb.Name];
                                drone.lost_count = 0;
                                long cur_ms = stopwatch.ElapsedMilliseconds;
                                if (drone.send_count > 0)
                                {
                                    drone.send_count--;
                                }
                                else if (data_gogo)
                                {
                                    drone.send_count = 10;
                                    data_gogo        = false;
                                    MAVLink.mavlink_att_pos_mocap_t att_pos = new MAVLink.mavlink_att_pos_mocap_t();
                                    att_pos.time_usec = (ulong)(cur_ms * 1000);
                                    att_pos.x         = rbData.x;  //north
                                    att_pos.y         = rbData.z;  //east
                                    att_pos.z         = -rbData.y; //down
                                    att_pos.q         = new float[4] {
                                        rbData.qw, rbData.qx, rbData.qz, -rbData.qy
                                    };
                                    byte[] pkt = mavlinkParse.GenerateMAVLinkPacket10(MAVLink.MAVLINK_MSG_ID.ATT_POS_MOCAP, att_pos);

                                    if (drone.lastTime >= 0)
                                    {
                                        MAVLink.mavlink_vision_speed_estimate_t vis_speed = new MAVLink.mavlink_vision_speed_estimate_t();
                                        float total_s = (cur_ms - drone.lastTime) * 0.001f;
                                        vis_speed.x    = (rbData.x - drone.lastPosN) / total_s;
                                        vis_speed.y    = (rbData.z - drone.lastPosE) / total_s;
                                        vis_speed.z    = (-rbData.y - drone.lastPosD) / total_s;
                                        vis_speed.usec = (ulong)(cur_ms * 1000);
                                        byte[] pkt2      = mavlinkParse.GenerateMAVLinkPacket10(MAVLink.MAVLINK_MSG_ID.VISION_SPEED_ESTIMATE, vis_speed);
                                        int    total_len = pkt.Length + pkt2.Length;
                                        byte[] uwb_data  = new byte[10 + total_len + 1];
                                        uwb_data[0] = 0x54;
                                        uwb_data[1] = 0xf1;
                                        uwb_data[2] = 0xff;
                                        uwb_data[3] = 0xff;
                                        uwb_data[4] = 0xff;
                                        uwb_data[5] = 0xff;
                                        uwb_data[6] = 2;
                                        uwb_data[7] = drone.uwb_tag_id;
                                        uwb_data[8] = (byte)(total_len & 0xff);
                                        uwb_data[9] = (byte)(total_len >> 16);
                                        Array.Copy(pkt, 0, uwb_data, 10, pkt.Length);
                                        Array.Copy(pkt2, 0, uwb_data, 10 + pkt.Length, pkt2.Length);
                                        byte chk_sum = 0;
                                        foreach (byte uwb_data_byte in uwb_data)
                                        {
                                            chk_sum += uwb_data_byte;
                                        }
                                        uwb_data[uwb_data.Length - 1] = chk_sum;
                                        mavSock.SendTo(uwb_data, drone.ep);
                                    }
                                }
                                drone.lastTime = cur_ms;
                                drone.lastPosN = rbData.x;
                                drone.lastPosE = rbData.z;
                                drone.lastPosD = -rbData.y;
                            }
                        }
                        else
                        {
                            Console.WriteLine("\t{0} is not tracked in current frame", rb.Name);

                            /*if (drones.ContainsKey(rb.Name))
                             * {
                             *  DroneData drone = drones[rb.Name];
                             *  drone.lost_count++;
                             *  if (drone.lost_count > 3)
                             *  {
                             *      drone.lost_count = 0;
                             *      MAVLink.mavlink_gps_input_t gps_input = new MAVLink.mavlink_gps_input_t();
                             *      gps_input.gps_id = 0;
                             *      gps_input.fix_type = (byte)MAVLink.GPS_FIX_TYPE.NO_FIX;
                             *      byte[] pkt = mavlinkParse.GenerateMAVLinkPacket10(MAVLink.MAVLINK_MSG_ID.GPS_INPUT, gps_input);
                             *      mavSock.SendTo(pkt, drone.ep);
                             *  }
                             * }*/
                        }
                    }
                }
            }

            /* Parsing Skeleton Frame Data  */
            for (int i = 0; i < mSkeletons.Count; i++)      // Fetching skeleton IDs from the saved descriptions
            {
                int sklID = mSkeletons[i].ID;

                for (int j = 0; j < data.nSkeletons; j++)
                {
                    if (sklID == data.Skeletons[j].ID)                     // When skeleton ID of the description matches skeleton ID of the frame data.
                    {
                        NatNetML.Skeleton     skl     = mSkeletons[i];     // Saved skeleton descriptions
                        NatNetML.SkeletonData sklData = data.Skeletons[j]; // Received skeleton frame data

                        Console.WriteLine("\tSkeleton ({0}):", skl.Name);
                        Console.WriteLine("\t\tSegment count: {0}", sklData.nRigidBodies);

                        /*  Now, for each of the skeleton segments  */
                        for (int k = 0; k < sklData.nRigidBodies; k++)
                        {
                            NatNetML.RigidBodyData boneData = sklData.RigidBodies[k];

                            /*  Decoding skeleton bone ID   */
                            int skeletonID  = HighWord(boneData.ID);
                            int rigidBodyID = LowWord(boneData.ID);
                            int uniqueID    = skeletonID * 1000 + rigidBodyID;
                            int key         = uniqueID.GetHashCode();

                            NatNetML.RigidBody bone = (RigidBody)mHtSkelRBs[key];   //Fetching saved skeleton bone descriptions

                            //Outputting only the hip segment data for the purpose of this sample.
                            if (k == 0)
                            {
                                Console.WriteLine("\t\t{0:N3}: pos({1:N3}, {2:N3}, {3:N3})", bone.Name, boneData.x, boneData.y, boneData.z);
                            }
                        }
                    }
                }
            }

            /*  Parsing Force Plate Frame Data  */
            for (int i = 0; i < mForcePlates.Count; i++)
            {
                int fpID = mForcePlates[i].ID;                  // Fetching force plate IDs from the saved descriptions

                for (int j = 0; j < data.nForcePlates; j++)
                {
                    if (fpID == data.ForcePlates[j].ID)                       // When force plate ID of the descriptions matches force plate ID of the frame data.
                    {
                        NatNetML.ForcePlate     fp     = mForcePlates[i];     // Saved force plate descriptions
                        NatNetML.ForcePlateData fpData = data.ForcePlates[i]; // Received forceplate frame data

                        Console.WriteLine("\tForce Plate ({0}):", fp.Serial);

                        // Here we will be printing out only the first force plate "subsample" (index 0) that was collected with the mocap frame.
                        for (int k = 0; k < fpData.nChannels; k++)
                        {
                            Console.WriteLine("\t\tChannel {0}: {1}", fp.ChannelNames[k], fpData.ChannelData[k].Values[0]);
                        }
                    }
                }
            }
#if DEBUG_MSG
            Console.WriteLine("\n");
#endif
        }
コード例 #31
0
        void ProcessFrameOfData(ref NatNetML.FrameOfMocapData data)
        {
            TelemetryData telemetry = new TelemetryData();
            bool          bMotiveHardwareLatenciesAvailable = data.CameraMidExposureTimestamp != 0;

            if (bMotiveHardwareLatenciesAvailable)
            {
                telemetry.TotalLatency       = mNatNet.SecondsSinceHostTimestamp(data.CameraMidExposureTimestamp) * 1000.0;
                telemetry.MotiveTotalLatency = (data.TransmitTimestamp - data.CameraMidExposureTimestamp) / (double)desc.HighResClockFrequency * 1000.0;
            }
            bool bMotiveLatenciesAvailable = data.CameraDataReceivedTimestamp != 0;

            if (bMotiveLatenciesAvailable)
            {
            }
            telemetry.TransmitLatency = mNatNet.SecondsSinceHostTimestamp(data.TransmitTimestamp) * 1000.0;


            // detect and reported any 'reported' frame drop (as reported by server)
            if (m_fLastFrameTimestamp != 0.0f)
            {
                double framePeriod = 1.0f / m_ServerFramerate;
                double thisPeriod  = data.fTimestamp - m_fLastFrameTimestamp;
                double delta       = thisPeriod - framePeriod;
                double fudgeFactor = 0.002f; // 2 ms
                if (delta > fudgeFactor)
                {
                    //OutputMessage("Frame Drop: ( ThisTS: " + data.fTimestamp.ToString("F3") + "  LastTS: " + m_fLastFrameTimestamp.ToString("F3") + " )");
                    double missingPeriod = delta / framePeriod;
                    int    nMissing      = (int)(missingPeriod + 0.5);
                    mDroppedFrames         += nMissing;
                    telemetry.DroppedFrames = nMissing;
                    droppedFrameIndicator   = 10; // for graphing only
                }
                else
                {
                    droppedFrameIndicator = 0;
                }
            }

            // check and report frame drop (frame id based)
            if (mLastFrame != 0)
            {
                if ((data.iFrame - mLastFrame) != 1)
                {
                    //OutputMessage("Frame Drop: ( ThisFrame: " + data.iFrame.ToString() + "  LastFrame: " + mLastFrame.ToString() + " )");
                    //mDroppedFrames++;
                }
            }

            /*
             * if (data.bTrackingModelsChanged)
             *  mNeedTrackingListUpdate = true;
             */
            // NatNet manages the incoming frame of mocap data, so if we want to keep it, we must make a copy of it
            FrameOfMocapData deepCopy = new FrameOfMocapData(data);

            // Add frame to a background queue for access by other threads
            // Note: this lock should always succeed immediately, unless connecting/disconnecting, when the queue gets reset
            lock (BackQueueLock)
            {
                m_BackQueue.Enqueue(deepCopy);

                // limit background queue size to 10 frames
                while (m_BackQueue.Count > 3)
                {
                    m_BackQueue.Dequeue();
                }
            }

            // Update the shared UI queue, only if the UI thread is not updating (we don't want to wait here as we're in the data update thread)
            bool lockAcquired = false;

            try
            {
                Monitor.TryEnter(FrontQueueLock, ref lockAcquired);
                if (lockAcquired)
                {
                    // [optional] clear the frame queue before adding a new frame (UI only wants most recent frame)
                    m_FrontQueue.Clear();
                    m_FrontQueue.Enqueue(deepCopy);

                    m_FrameTransitLatencies.Clear();
                    m_FrameTransitLatencies.Enqueue(telemetry.TransmitLatency);

                    m_TotalLatencies.Clear();
                    m_TotalLatencies.Enqueue(telemetry.TotalLatency);
                }
                else
                {
                    mUIBusyCount++;
                }
            }
            finally
            {
                if (lockAcquired)
                {
                    Monitor.Exit(FrontQueueLock);
                }
            }

            // recording : write packet to data file
            if (mRecording)
            {
                WriteFrame(deepCopy, telemetry);
            }

            mLastFrame            = data.iFrame;
            m_fLastFrameTimestamp = data.fTimestamp;
        }
コード例 #32
0
        static int processFrameData(NatNetML.FrameOfMocapData data, int index)
        {
            int index2 = 1;

            string startupPath = Environment.CurrentDirectory;
            //Console.WriteLine(startupPath);
            string pathCmd = string.Format("cd {0}\\..\\..", startupPath);

            //Console.WriteLine(pathCmd);

            // Create the MATLAB instance
            MLApp.MLApp matlab = new MLApp.MLApp();

            // Change to the directory where the function is located
            matlab.Execute(pathCmd);


            Console.WriteLine("labeled markers: " + data.nMarkers);

            /*  Parsing Rigid Body Frame Data   */
            for (int i = 0; i < mRigidBodies.Count; i++)
            {
                int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                for (int j = 0; j < data.nRigidBodies; j++)
                {
                    if (rbID == data.RigidBodies[j].ID)                      // When rigid body ID of the descriptions matches rigid body ID of the frame data.
                    {
                        NatNetML.RigidBody     rb     = mRigidBodies[i];     // Saved rigid body descriptions
                        NatNetML.RigidBodyData rbData = data.RigidBodies[j]; // Received rigid body descriptions

                        if (rbData.Tracked == true && HLdata.Count > 0 && index > 0)
                        {
                            NatNetML.Marker marker = data.LabeledMarkers[i];

                            int mID = marker.ID;
                            //Console.WriteLine("\tMarker ({0}):", mID);
                            //Console.WriteLine("\t\tpos ({0:N3}, {1:N3}, {2:N3})", marker.x, marker.y, marker.z);


                            //Console.WriteLine("\tRigidBody ({0}):", rb.Name);
                            //Console.WriteLine("\t\tpos ({0:N3}, {1:N3}, {2:N3})", rbData.x, rbData.y, rbData.z);

                            // Rigid Body Euler Orientation
                            double[] rbquat = new double[4] {
                                rbData.qx, rbData.qy, rbData.qz, rbData.qw
                            };
                            double[] rbpos = new double[3] {
                                rbData.x, rbData.y, rbData.z
                            };
                            double[] mkpos = new double[3] {
                                marker.x, marker.y, marker.z
                            };

                            int      idxHl = HLdata.Count - 1;
                            double[] kbP   = new double[3] {
                                (double)HLdata[idxHl][0], (double)HLdata[idxHl][1], (double)HLdata[idxHl][2]
                            };
                            double[] kbQ = new double[4] {
                                (double)HLdata[idxHl][3], (double)HLdata[idxHl][4], (double)HLdata[idxHl][5], (double)HLdata[idxHl][6]
                            };
                            double[] hlPs = new double[3] {
                                (double)HLdata[idxHl][7], (double)HLdata[idxHl][8], (double)HLdata[idxHl][9]
                            };
                            double[] hlQs = new double[4] {
                                (double)HLdata[idxHl][10], (double)HLdata[idxHl][11], (double)HLdata[idxHl][12], (double)HLdata[idxHl][13]
                            };

                            bool mlEval = true;

                            if (mlEval)
                            {
                                object newmarkerpos = null;

                                // object result = null;
                                //float[] A = { 1, 2 };
                                //float[] B = { 2, 2 };
                                // matlab.Feval("add", 1, out result, A, B);
                                //object[] res2 = result as object[];
                                //Console.WriteLine(Convert.ToSingle(res2[0]));
                                //Console.WriteLine(res2[1]);

                                matlab.Feval("trans", 3, out newmarkerpos, mkpos, rbpos, rbquat, kbP, kbQ, hlPs, hlQs);

                                object[] res = newmarkerpos as object[];


                                double xpos = Convert.ToDouble(res[0]);
                                double ypos = Convert.ToDouble(res[1]);
                                double zpos = Convert.ToDouble(res[2]);


                                double[] newmarkerpos1 = new double[3] {
                                    xpos, ypos, zpos
                                };



                                object result1 = null;
                                //Console.WriteLine(index);
                                matlab.Feval("recognition", 2, out result1, newmarkerpos1, index);

                                object[] res2 = result1 as object[];

                                Console.WriteLine(res2[0]);
                                Console.WriteLine(res2[1]);


                                index2 = Convert.ToInt32(res2[0]);
                            }
                        }
                        else
                        {
                            if (rbData.Tracked)
                            {
                                Console.WriteLine("\t HLData {0}, index {1}", HLdata.Count, index);
                            }
                            else
                            {
                                Console.WriteLine("\t{0} is not tracked in current frame", rb.Name);
                            }
                        }
                    }
                }
            }
            //Console.WriteLine(index2);
            return(index2);
        }
コード例 #33
0
ファイル: Program.cs プロジェクト: madelinegannon/UR5-Tests
        private static void Update()
        {
            m_UIUpdateTimer.Stop();
            double interframeDuration = m_UIUpdateTimer.Duration();

            QueryPerfCounter uiIntraFrameTimer = new QueryPerfCounter();
            uiIntraFrameTimer.Start();

            // the frame queue is a shared resource with the FrameOfMocap delivery thread, so lock it while reading
            // note this can block the frame delivery thread.  In a production application frame queue management would be optimized.
            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)
                {
                    m_FrameOfData = m_FrameQueue.Dequeue();

                    if (m_FrameQueue.Count > 0)
                        continue;

                    if (m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools"))
                        {
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);

                        }

                        // update the mocap data
                        UpdateData();
                    }
                }

                uiIntraFrameTimer.Stop();
                double uiIntraFrameDuration = uiIntraFrameTimer.Duration();
                m_UIUpdateTimer.Start();

            }
        }
コード例 #34
0
        /// <summary>
        /// Refresh the UI at a fixed period specified by the timer
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void UpdateUITimer_Tick(object sender, EventArgs e)
        {
            m_UIUpdateTimer.Stop();
            double interframeDuration = m_UIUpdateTimer.Duration();

            QueryPerfCounter uiIntraFrameTimer = new QueryPerfCounter();

            uiIntraFrameTimer.Start();

            // the frame queue is a shared resource with the FrameOfMocap delivery thread, so lock it while reading
            // note this can block the frame delivery thread.  In a production application frame queue management would be optimized.
            lock (syncLock)
            {
                while (m_FrameQueue.Count > 0)
                {
                    m_FrameOfData = m_FrameQueue.Dequeue();

                    if (m_FrameQueue.Count > 0)
                    {
                        continue;
                    }

                    if (m_FrameOfData != null)
                    {
                        // for servers that only use timestamps, not frame numbers, calculate a
                        // frame number from the time delta between frames
                        if (desc.HostApp.Contains("TrackingTools"))
                        {
                            m_fCurrentMocapFrameTimestamp = m_FrameOfData.fLatency;
                            if (m_fCurrentMocapFrameTimestamp == m_fLastFrameTimestamp)
                            {
                                continue;
                            }
                            if (m_fFirstMocapFrameTimestamp == 0.0f)
                            {
                                m_fFirstMocapFrameTimestamp = m_fCurrentMocapFrameTimestamp;
                            }
                            m_FrameOfData.iFrame = (int)((m_fCurrentMocapFrameTimestamp - m_fFirstMocapFrameTimestamp) * m_ServerFramerate);
                        }

                        // update the data grid
                        UpdateDataGrid();

                        // update the chart
                        UpdateChart(m_FrameOfData.iFrame);

                        // only redraw chart when necessary, not for every frame
                        if (m_FrameQueue.Count == 0)
                        {
                            chart1.ChartAreas[0].RecalculateAxesScale();
                            chart1.ChartAreas[0].AxisX.Minimum = 0;
                            chart1.ChartAreas[0].AxisX.Maximum = GraphFrames;
                            chart1.Invalidate();
                        }

                        // Mocap server timestamp (in seconds)
                        m_fLastFrameTimestamp = m_FrameOfData.fTimestamp;
                        TimestampValue.Text   = m_FrameOfData.fTimestamp.ToString("F3");

                        // SMPTE timecode (if timecode generator present)
                        int  hour, minute, second, frame, subframe;
                        bool bSuccess = m_NatNet.DecodeTimecode(m_FrameOfData.Timecode, m_FrameOfData.TimecodeSubframe, out hour, out minute, out second, out frame, out subframe);
                        if (bSuccess)
                        {
                            TimecodeValue.Text = string.Format("{0:D2}:{1:D2}:{2:D2}:{3:D2}.{4:D2}", hour, minute, second, frame, subframe);
                        }

                        if (m_FrameOfData.bRecording)
                        {
                            chart1.BackColor = Color.Red;
                        }
                        else
                        {
                            chart1.BackColor = Color.White;
                        }
                    }
                }
            }

            uiIntraFrameTimer.Stop();
            double uiIntraFrameDuration = uiIntraFrameTimer.Duration();

            m_UIUpdateTimer.Start();
        }
コード例 #35
0
        private NatNetPoseData ProcessPoseData(NatNetML.FrameOfMocapData data)
        {
            NatNetPoseData poseData = new NatNetPoseData();

            /*  Parsing Simple Marker Data   */
            for (int i = 0; i < data.nMarkers; i++)
            {
                Marker marker = data.LabeledMarkers[i];
                int    mID    = marker.ID;
                // Console.WriteLine("\tMarker ({0}): \t\tpos ({0:N3}, {1:N3}, {2:N3})", mID, marker.x, marker.y, marker.z);
                poseData.mPos = new Vector3(marker.x, marker.y, marker.z);
            }

            /*  Parsing Rigid Body Frame Data   */
            for (int i = 0; i < mRigidBodies.Count; i++)
            {
                int rbID = mRigidBodies[i].ID;              // Fetching rigid body IDs from the saved descriptions

                for (int j = 0; j < data.nRigidBodies; j++)
                {
                    if (rbID == data.RigidBodies[j].ID)                      // When rigid body ID of the descriptions matches rigid body ID of the frame data.
                    {
                        NatNetML.RigidBody     rb     = mRigidBodies[i];     // Saved rigid body descriptions
                        NatNetML.RigidBodyData rbData = data.RigidBodies[j]; // Received rigid body descriptions

                        if (rbData.Tracked == true)
                        {
                            poseData.rbPos = new Vector3(rbData.x, rbData.y, rbData.z);
                            poseData.rbRot = new Quaternion(rbData.qx, rbData.qy, rbData.qz, rbData.qw);

                            //Console.WriteLine("\tRigidBody ({0}):", rb.Name);
                            //Console.WriteLine("\t\tpos ({0:N3}, {1:N3}, {2:N3})", rbData.x, rbData.y, rbData.z);

                            // Rigid Body Euler Orientation
                            float[] quat = new float[4] {
                                rbData.qx, rbData.qy, rbData.qz, rbData.qw
                            };
                            float[] eulers = new float[3];

                            eulers = NatNetClientML.QuatToEuler(quat, NATEulerOrder.NAT_XYZr); //Converting quat orientation into XYZ Euler representation.
                            double xrot = RadiansToDegrees(eulers[0]);
                            double yrot = RadiansToDegrees(eulers[1]);
                            double zrot = RadiansToDegrees(eulers[2]);

                            //Console.WriteLine("\t\tori ({0:N3}, {1:N3}, {2:N3})", xrot, yrot, zrot);

                            string display = string.Format("\tRigidBody ({0}):", rb.Name);
                            display += string.Format("\t\tpos ({0:N3}, {1:N3}, {2:N3})", rbData.x, rbData.y, rbData.z);
                            display += string.Format("\t\tori ({0:N3}, {1:N3}, {2:N3})", xrot, yrot, zrot);
                            //Console.WriteLine(display);
                        }
                        else
                        {
                            Console.WriteLine("\t{0} is not tracked in current frame", rb.Name);
                        }
                    }
                }
            }

            return(poseData);
        }