예제 #1
0
        // Update is called once per frame
        void Update()
        {
            _recordedTransform.position = dce.position;
            _recordedTransform.rotation = dce.rotation;

            RGBDStreamEventArgs evArgs = PollEventQueue();

            if (evArgs != null && RGBDStreamEvent != null)
            {
                RGBDStreamEvent(this, evArgs);
            }

            if (_state == RGBD_STATE.IDLE && last_idle + 2.0f < Time.time)
            {
                RequestConfig();
                last_idle = Time.time;
            }

            RGBDBodyFrame f = DequeueBodyFrame();

            while (f != null)
            {
                if (!_trackedBodies.ContainsKey(f.trackingID) || _trackedBodies[f.trackingID] == null)
                {
                    GameObject newBody = Instantiate(bodyPrefab, sfs.cameraTransform);
                    newBody.transform.localPosition = Vector3.zero;
                    newBody.transform.localRotation = Quaternion.identity;
                    _trackedBodies.Add(f.trackingID, newBody.GetComponent <RGBDBody>());
                }

                _trackedBodies[f.trackingID].ApplyFrame(f);
                f = DequeueBodyFrame();
            }
        }
예제 #2
0
        private RGBDBodyFrame DequeueBodyFrame()
        {
            RGBDBodyFrame res = null;

            lock (_bodyFramesLock) {
                if (_bodyFrames.Count > 0)
                {
                    res = _bodyFrames.Dequeue();
                }
            }
            return(res);
        }
예제 #3
0
 public void ApplyFrame(RGBDBodyFrame frame)
 {
     lastFrameUpdate = Time.time;
     for (int i = 0; i < (int)JointType.Count; i++)
     {
         if (frame.jointTrackingState[i] != TrackingState.NotTracked)
         {
             Rig[i].position = transform.TransformPoint(frame.jointPosition[i]);
         }
     }
     currentFrame = frame;
 }
예제 #4
0
 public void QueueBodyFrame(RGBDBodyFrame frame)
 {
     lock (_bodyFramesLock) {
         _bodyFrames.Enqueue(frame);
     }
 }
예제 #5
0
        private void Listen()
        {
            _listening = true;

            while (_listening)
            {
                OIMSG msg_in = udpClient.GetNewData();
                if (msg_in == null || msg_in.data == null)
                {
                    continue;
                }
                if (msg_in.data.Length < 2)
                {
                    continue;
                }

                byte[] receiveBytes = msg_in.data;

                byte frameType = msg_in.msgType;
                byte deviceID  = receiveBytes[0];

                switch (frameType)
                {
                case (byte)FrameType.Config:
                    ConfigMessage cm = new ConfigMessage();
                    // Currently only one processor per port.
                    // We could support here:
                    //   - Changing configuration (based on throtteling, etc)
                    //   - Multiple devices on one port (routing DepthPackets to processor based on id)
                    //   - (...which would require some changes to how streaming source & render works)

                    // TODO: Parse config data
                    cm.deviceType = (DepthDeviceType)receiveBytes[1];
                    byte dataFlags = receiveBytes[2];

                    ushort frameWidth  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort frameHeight = BitConverter.ToUInt16(receiveBytes, 6);
                    ushort maxLines    = BitConverter.ToUInt16(receiveBytes, 8);

                    float cx         = BitConverter.ToSingle(receiveBytes, 12);
                    float cy         = BitConverter.ToSingle(receiveBytes, 16);
                    float fx         = BitConverter.ToSingle(receiveBytes, 20);
                    float fy         = BitConverter.ToSingle(receiveBytes, 24);
                    float depthScale = BitConverter.ToSingle(receiveBytes, 28);
                    cm.intrinsics = new DepthCameraIntrinsics(
                        cx, cy, fx, fy, depthScale, frameWidth, frameHeight);

                    float Px = BitConverter.ToSingle(receiveBytes, 32);
                    float Py = BitConverter.ToSingle(receiveBytes, 36);
                    float Pz = BitConverter.ToSingle(receiveBytes, 40);
                    float Qx = BitConverter.ToSingle(receiveBytes, 44);
                    float Qy = BitConverter.ToSingle(receiveBytes, 48);
                    float Qz = BitConverter.ToSingle(receiveBytes, 52);
                    float Qw = BitConverter.ToSingle(receiveBytes, 56);
                    cm.extrinsics = new DepthCameraExtrinsics(
                        Px, Py, Pz, Qx, Qy, Qz, Qw
                        );


                    int guid_offset = 63;
                    cm.GUID = "";
                    for (int sOffset = 0; sOffset < 32; sOffset++)
                    {
                        byte c = receiveBytes[guid_offset + sOffset];
                        if (c == 0x00)
                        {
                            break;
                        }
                        cm.GUID += (char)c;
                    }

                    //int filename_offset = 99;
                    cm.filename = "";
                    cm.live     = (dataFlags & LIVE_DATA) != 0;
                    cm.hasAudio = (dataFlags & AUDIO_DATA) != 0;
                    cm.hasBody  = (dataFlags & BODY_DATA) != 0;
                    cm.hasRGBD  = (dataFlags & RGBD_DATA) != 0;

                    /*
                     * if (!cm.live) {
                     *  for (int sOffset = 0; sOffset < 32; sOffset++) {
                     *      byte c = receiveBytes[filename_offset + sOffset];
                     *      if (c == 0x00) break;
                     *      cm.filename += (char)c;
                     *  }
                     *  Debug.Log("Replaying file: "+ cm.filename);
                     * } */


                    Debug.Log("Config:\n\tFrame: " + frameWidth + " " + frameHeight + " " + maxLines +
                              "\n\tIntrinsics: " + cx + " " + cy + " " + fx + " " + fy + " " + depthScale +
                              "\n\tExtrinsics: " + cm.extrinsics.position.x + " " + cm.extrinsics.position.y + " " + cm.extrinsics.position.z +
                              " " + cm.extrinsics.rotation.x + " " + cm.extrinsics.rotation.y + " " + cm.extrinsics.rotation.z + " " + cm.extrinsics.rotation.w +
                              "\n\tGUID: " + cm.GUID);

                    // We could also implement & choose a specific Processor
                    // (i.e. with custom Proccess() function) based on DepthDeviceType...
                    if (processor == null)
                    {
                        //processor = new DefaultDepthStreamingProcessor(
                        //processor = new VSyncProcessor(
                        processor = new FastProcessor(_frameSource, cm);
                    }

                    if (_control != null)
                    {
                        _control.UpdateState(cm);
                    }

                    break;

                case (byte)FrameType.DepthBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    ushort unused1   = BitConverter.ToUInt16(receiveBytes, 0);
                    ushort delta_t   = BitConverter.ToUInt16(receiveBytes, 2);
                    ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort endRowD   = BitConverter.ToUInt16(receiveBytes, 6);

                    //Debug.Log("Seq: "+sequence+" start: "+startRow+" end: "+endRow);
                    processor.HandleDepthData(startRowD, endRowD, msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.Color:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    //ulong timestampC = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleColorData(msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.BodyIndexBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    ulong timestampBI = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleBodyIndexData(timestampBI, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.AudioSamples:
                    RGBDAudioFrame aframe = new RGBDAudioFrame();
                    int            delta  = (int)msg_in.sequenceID - (int)lastAudioFrameSequence;
                    if (delta > 1)
                    {
                        Debug.LogWarning("Missing " + delta + " audio frames.");
                    }
                    else if (delta < 0)
                    {
                        Debug.LogWarning("Out of order audio: " + delta);
                        lastAudioFrameSequence = msg_in.sequenceID;
                        break;
                    }
                    aframe.frequency = BitConverter.ToUInt16(receiveBytes, 2);
                    aframe.channels  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort n_samples = BitConverter.ToUInt16(receiveBytes, 6);
                    aframe.timestamp = msg_in.timestamp;
                    aframe.samples   = new float[n_samples];
                    //Debug.Log(receiveBytes.Length)
                    for (int i = 0; i < n_samples; i++)
                    {
                        aframe.samples[i] = BitConverter.ToSingle(receiveBytes, 8 + i * 4);
                        //BitConverter.ToUInt16(receiveBytes, 12+i*2) / 32767.0f;
                    }
                    if (_audio != null)
                    {
                        _audio.QueueBuffer(aframe);
                    }
                    lastAudioFrameSequence = msg_in.sequenceID;
                    break;

                case (byte)FrameType.BodyData:
                    ushort nBodies    = BitConverter.ToUInt16(receiveBytes, 2);
                    ulong  timestampB = msg_in.timestamp;
                    for (ushort i = 0; i < nBodies; i++)
                    {
                        RGBDBodyFrame bodyFrame = new RGBDBodyFrame();
                        bodyFrame.timestamp = timestampB;

                        int dataOffset = _body_header_size + i * _body_data_size;

                        bodyFrame.trackingID        = BitConverter.ToUInt32(receiveBytes, dataOffset + 0);
                        bodyFrame.handStateLeft     = (HandState)receiveBytes[dataOffset + 4];
                        bodyFrame.handStateRight    = (HandState)receiveBytes[dataOffset + 5];
                        bodyFrame.leanTrackingState = (TrackingState)receiveBytes[dataOffset + 7];
                        bodyFrame.lean = new Vector2(
                            BitConverter.ToSingle(receiveBytes, dataOffset + 8),
                            BitConverter.ToSingle(receiveBytes, dataOffset + 12));

                        int positionOffset      = dataOffset + 16;
                        int positionDatSize     = 3 * 4 * (int)JointType.Count;
                        int trackingStateOffset = 3 + positionOffset + positionDatSize;
                        for (int j = 0; j < (int)JointType.Count; j++)
                        {
                            bodyFrame.jointPosition[j] = new Vector3(
                                -BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 0),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 4),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 8));
                            bodyFrame.jointTrackingState[j] = (TrackingState)receiveBytes[trackingStateOffset + j];
                        }

                        if (_control != null)
                        {
                            _control.QueueBodyFrame(bodyFrame);
                        }
                    }
                    break;

                default:
                    Debug.Log("Unknown DepthStreaming frame type: " + msg_in.msgType);
                    break;
                }
            }

            _listening = false;

            Debug.Log("Listen Thread Closed");
        }