Esempio n. 1
0
        // Converts JSON string to PoseData pose
        public static PoseData JSONstring2PoseData(string frame_json)
        {
            JointData[] joint_data_recorded_array = new JointData[(int)JointId.Count];
            PoseData    recorded_data             = new PoseData {
                data = { }
            };

            //Debug.Log(frame_json);
            PoseDataJSON saved_joint_data = JsonUtility.FromJson <PoseDataJSON>(frame_json);

            for (JointId jt = 0; jt < JointId.Count; jt++)
            {
                // play recording
                JointDataJSON jd              = saved_joint_data.data[(int)jt];
                Vector3       v_saved         = JsonUtility.FromJson <Vector3>(jd.position);
                Quaternion    r_saved         = JsonUtility.FromJson <Quaternion>(jd.rotation);
                var           joint_data_live = new JointData
                {
                    Position    = v_saved,
                    Orientation = r_saved
                };
                joint_data_recorded_array[(int)jt] = joint_data_live;
            }
            recorded_data = new PoseData {
                data = joint_data_recorded_array
            };
            return(recorded_data);
        }
Esempio n. 2
0
        // Converts Azure Kinect SDK BT Body to PoseDataJSON (serializable for JSON) pose
        public static PoseDataJSON Body2PoseDataJSON(Body body)
        {
            JointDataJSON[] joint_data_array = new JointDataJSON[(int)JointId.Count];
            for (JointId jt = 0; jt < JointId.Count; jt++)
            {
                // write recorded poses to file
                Microsoft.Azure.Kinect.BodyTracking.Joint joint = body.Skeleton.GetJoint(jt);
                var pos         = joint.Position;
                var orientation = joint.Quaternion;
                // save raw data
                var v          = new Vector3(pos.X, pos.Y, pos.Z);
                var r          = new Quaternion(orientation.X, orientation.Y, orientation.Z, orientation.W);
                var joint_data = new JointDataJSON {
                    position = JsonUtility.ToJson(v), rotation = JsonUtility.ToJson(r)
                };
                joint_data_array[(int)jt] = joint_data;
            }
            PoseDataJSON jdl = new PoseDataJSON {
                data = joint_data_array
            };

            return(jdl);
        }
Esempio n. 3
0
        public PoseData GetNextPose()
        {
            switch (CurrentPoseInputSource)
            {
            case PoseInputSource.WEBSOCKET:
#if !UNITY_WEBGL || UNITY_EDITOR
                websocket.DispatchMessageQueue();
#endif
                // poseLiveWS is non-null if alternative is sending pose data over websocket
                if (poseLiveWS != null)
                {
                    // Assign last pose from websocket
                    CurrentPose = poseLiveWS;
                }
                else
                {
                    Debug.Log("No pose recieved from WebSocket!");
                }
                break;

            case PoseInputSource.FILE:

                if (SequenceEnum != null && SequenceEnum.MoveNext())
                {
                    _CurrentFilePoseNumber++;
                }
                else
                {
                    // Quick and dirty way to loop (by reloading file)
                    if (SequenceEnum != null && !loop)
                    {
                        break;
                    }
                    LoadData();
                    SequenceEnum.MoveNext();
                    _CurrentFilePoseNumber = 1;
                }


                string   frame_json     = SequenceEnum.Current;
                PoseData fake_live_data = PoseDataUtils.JSONstring2PoseData(frame_json);
                CurrentPose = fake_live_data;

                if (recording)     // recording
                {
                    File.AppendAllText(WriteDataPath, frame_json + Environment.NewLine);
                }
                break;

            case PoseInputSource.KINECT:
                if (device != null)
                {
                    using (Capture capture = device.GetCapture())
                    {
                        // Make tracker estimate body
                        tracker.EnqueueCapture(capture);

                        // Code for getting RGB image from camera

                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                        if (color != null && color.WidthPixels > 0 && (streamCanvas != null || videoRenderer != null))
                        {
                            UnityEngine.Object.Destroy(tex);    // required to not keep old images in memory
                            tex = new Texture2D(color.WidthPixels, color.HeightPixels, TextureFormat.BGRA32, false);
                            tex.LoadRawTextureData(color.Memory.ToArray());
                            tex.Apply();

                            //Fetch the RawImage component from the GameObject
                            if (tex != null)
                            {
                                if (streamCanvas != null)
                                {
                                    m_RawImage         = streamCanvas.GetComponent <RawImage>();
                                    m_RawImage.texture = tex;
                                }
                                if (videoRenderer != null)
                                {
                                    videoRenderer.material.mainTexture = tex;
                                }
                            }
                        }
                    }

                    // Get pose estimate from tracker
                    using (Frame frame = tracker.PopResult())
                    {
                        //Debug.LogFormat("{0} bodies found.", frame.NumberOfBodies);

                        //  At least one body found by Body Tracking
                        if (frame.NumberOfBodies > 0)
                        {
                            // Use first estimated person, if mutiple are in the image
                            // !!! There are (probably) no guarantees on consisitent ordering between estimates
                            //var bodies = frame.Bodies;
                            var body = frame.GetBody(0);

                            // Apply pose to user avatar(s)
                            PoseData live_data = PoseDataUtils.Body2PoseData(body);

                            if (recording)     // recording
                            {
                                PoseDataJSON jdl = PoseDataUtils.Body2PoseDataJSON(body);
                                AppendRecordedFrame(jdl);
                            }
                            CurrentPose = live_data;
                        }
                    }
                }
                else
                {
                    Debug.Log("device is null!");
                }
                break;
            }
            return(CurrentPose);
        }
Esempio n. 4
0
        // Appends the passed pose (PoseDataJSON format) to the file as JSON
        void AppendRecordedFrame(PoseDataJSON jdl)
        {
            string json = JsonUtility.ToJson(jdl) + Environment.NewLine;

            File.AppendAllText(WriteDataPath, json);
        }