예제 #1
0
        private async void StartWebsocket()
        {
            // Contains code to instantiate a websocket to obtain pose data
            // Web socket is currently used only for Kinect Alternative

            websocket = new WebSocket(WS_ip);

            websocket.OnOpen += () =>
            {
                Debug.Log("WS connection open!");
            };

            websocket.OnError += (e) =>
            {
                Debug.Log("Error! " + e);
            };

            websocket.OnClose += (e) =>
            {
                Debug.Log("WS connection closed!");
            };

            websocket.OnMessage += (bytes) =>
            {
                // If joint information is recieved, set poseLiveWS
                var message = System.Text.Encoding.UTF8.GetString(bytes);
                Debug.Log("WS message received: " + message);
                var remote_joints = PoseDataUtils.DeserializeRJL(message);
                Debug.Log(remote_joints);
                poseLiveWS = PoseDataUtils.Remote2PoseData(remote_joints);
            };

            // TODO: if using websocket, make this work
            // Keep sending messages at every 0.3s
            //InvokeRepeating("SendWebSocketMessage", 0.0f, 0.3f);

            await websocket.Connect();
        }
예제 #2
0
        public PoseData GetNextPose()
        {
            switch (CurrentPoseInputSource)
            {
            case PoseInputSource.WEBSOCKET:
#if !UNITY_WEBGL || UNITY_EDITOR
                websocket.DispatchMessageQueue();
#endif
                // poseLiveWS is non-null if alternative is sending pose data over websocket
                if (poseLiveWS != null)
                {
                    // Assign last pose from websocket
                    CurrentPose = poseLiveWS;
                }
                else
                {
                    Debug.Log("No pose recieved from WebSocket!");
                }
                break;

            case PoseInputSource.FILE:

                if (SequenceEnum != null && SequenceEnum.MoveNext())
                {
                    _CurrentFilePoseNumber++;
                }
                else
                {
                    // Quick and dirty way to loop (by reloading file)
                    if (SequenceEnum != null && !loop)
                    {
                        break;
                    }
                    LoadData();
                    SequenceEnum.MoveNext();
                    _CurrentFilePoseNumber = 1;
                }


                string   frame_json     = SequenceEnum.Current;
                PoseData fake_live_data = PoseDataUtils.JSONstring2PoseData(frame_json);
                CurrentPose = fake_live_data;

                if (recording)     // recording
                {
                    File.AppendAllText(WriteDataPath, frame_json + Environment.NewLine);
                }
                break;

            case PoseInputSource.KINECT:
                if (device != null)
                {
                    using (Capture capture = device.GetCapture())
                    {
                        // Make tracker estimate body
                        tracker.EnqueueCapture(capture);

                        // Code for getting RGB image from camera

                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                        if (color != null && color.WidthPixels > 0 && (streamCanvas != null || videoRenderer != null))
                        {
                            UnityEngine.Object.Destroy(tex);    // required to not keep old images in memory
                            tex = new Texture2D(color.WidthPixels, color.HeightPixels, TextureFormat.BGRA32, false);
                            tex.LoadRawTextureData(color.Memory.ToArray());
                            tex.Apply();

                            //Fetch the RawImage component from the GameObject
                            if (tex != null)
                            {
                                if (streamCanvas != null)
                                {
                                    m_RawImage         = streamCanvas.GetComponent <RawImage>();
                                    m_RawImage.texture = tex;
                                }
                                if (videoRenderer != null)
                                {
                                    videoRenderer.material.mainTexture = tex;
                                }
                            }
                        }
                    }

                    // Get pose estimate from tracker
                    using (Frame frame = tracker.PopResult())
                    {
                        //Debug.LogFormat("{0} bodies found.", frame.NumberOfBodies);

                        //  At least one body found by Body Tracking
                        if (frame.NumberOfBodies > 0)
                        {
                            // Use first estimated person, if mutiple are in the image
                            // !!! There are (probably) no guarantees on consisitent ordering between estimates
                            //var bodies = frame.Bodies;
                            var body = frame.GetBody(0);

                            // Apply pose to user avatar(s)
                            PoseData live_data = PoseDataUtils.Body2PoseData(body);

                            if (recording)     // recording
                            {
                                PoseDataJSON jdl = PoseDataUtils.Body2PoseDataJSON(body);
                                AppendRecordedFrame(jdl);
                            }
                            CurrentPose = live_data;
                        }
                    }
                }
                else
                {
                    Debug.Log("device is null!");
                }
                break;
            }
            return(CurrentPose);
        }