コード例 #1
0
 public void HandleMeshMessage(OIMSG msg)
 {
     if (msg.msgFamily != (byte)OI_MSGFAMILY.XR)
     {
         return;
     }
     if (msg.msgType == (byte)OI_MSGTYPE_XR.SPATIAL_MESH_ADD)
     {
         MeshStruct ms       = OIMeshSerializer.OIDeserialize(msg.data);
         Transform  existing = GetMeshObject(ms.ID);
         if (existing != null)   // update
         {
             GameObject.Destroy(existing.gameObject);
         }
         AddMeshObject(ms.mesh, ms.ID);
     }
     else if (msg.msgType == (byte)OI_MSGTYPE_XR.SPATIAL_MESH_REMOVE)
     {
         List <int> remove_ids = new List <int>(IdsSerializer.Deserialize(msg.data));
         foreach (int ID in remove_ids)
         {
             Transform existing = GetMeshObject(ID);
             if (existing != null)
             {
                 Destroy(existing.gameObject);
             }
         }
     }
 }
コード例 #2
0
        private void ParseData(UDPConnector udpSource)
        {
            OIMSG msg = udpSource.GetNewData();

            while (msg != null && msg.data != null && msg.data.Length > 0)
            {
                int packetID = -1;
                using (MemoryStream str = new MemoryStream(msg.data)) {
                    using (BinaryReader reader = new BinaryReader(str)) {
                        packetID = reader.ReadInt32();
                    }
                }

                if (packetID == 2)   // transform packet
                {
                    Vector3    pos;
                    Quaternion rot;
                    int        transformID;
                    TransformSerializer.Deserialize(msg.data, out transformID, out pos, out rot);

                    if (transformCopies.ContainsKey(transformID))
                    {
                        transformCopies[transformID].NewTransform(pos, rot);
                    }
                }
                msg = udpSource.GetNewData();
            }
        }
コード例 #3
0
        // Update is called once per frame
        void Update()
        {
            timer += Time.deltaTime;
            OIMSG msg = connector.GetNewData();

            if (msg != null && msg.data != null)
            {
                Debug.Log("Got msg on spatialmesh connector: " + msg.data.Length);
            }
            if (timer >= intervalSec)
            {
                timer = 0;
                SendMeshes();
            }
        }
コード例 #4
0
        void Update()
        {
            OIMSG msg = oiudp.GetNewData();

            if (msg == null || msg.data == null || msg.data.Length == 0)
            {
                return;
            }
            HandleMeshMessage(msg);

            /*
             * if (lastSent + 1.0f < Time.time) {
             *  oiudp.SendData(new byte[] { 0x00, 0x00, 0x00 });
             *  lastSent = Time.time;
             *
             * }*/
        }
コード例 #5
0
        void UpdateReceiveData()
        {
            OIMSG msg = UDPClient.GetNewData();

            while (msg != null && msg.data != null && msg.data.Length != 0)
            {
                int packetID = -1;
                using (MemoryStream str = new MemoryStream(msg.data)) {
                    using (BinaryReader reader = new BinaryReader(str)) {
                        packetID = reader.ReadInt32();
                    }
                }

                if (packetID == 3)    // line point packet
                {
                    string  lineID;
                    int     pointID;
                    Vector3 pos;
                    LinePointSerializer.Deserialize(msg.data, out lineID, out pointID, out pos);
                    NewPoint(lineID, pointID, pos);
                }
                else if (packetID == 4)        // line settings packet
                {
                    string lineID;
                    Color  col;
                    float  width;
                    LineSettingsSerializer.Deserialize(msg.data, out lineID, out col, out width);
                    LineSettings(lineID, col, width);
                }
                else if (packetID == 5)        // line remove packet
                {
                    string lineID;
                    LineRemoveSerializer.Deserialize(msg.data, out lineID);
                    RemoveLine(lineID);
                }
                else if (packetID == 6)        // lines reset packet
                {
                    ResetLines();
                }
                msg = UDPClient.GetNewData();
            }
        }
コード例 #6
0
        private void SendMeshes()
        {
#if !UNITY_EDITOR && UNITY_METRO
            List <ObjectSurfaceObserver.SurfaceObject> surfaces = new List <SpatialMappingSource.SurfaceObject>();
            surfaces.AddRange(send_new);
            send_new.Clear();

            for (int index = 0; index < surfaces.Count; index++)
            {
                MeshFilter     filter = surfaces[index].Filter;
                Mesh           source = filter.sharedMesh;
                Mesh           clone  = new Mesh();
                List <Vector3> verts  = new List <Vector3>();
                verts.AddRange(source.vertices);

                for (int vertIndex = 0; vertIndex < verts.Count; vertIndex++)
                {
                    verts[vertIndex] = filter.transform.TransformPoint(verts[vertIndex]);
                }

                clone.SetVertices(verts);
                clone.SetTriangles(source.triangles, 0);

                OIMSG msg = new OIMSG((byte)OI_MSGFAMILY.XR, (byte)OI_MSGTYPE_XR.SPATIAL_MESH_ADD, OIMeshSerializer.OISerialize(surfaces[index].ID, clone));

                connector.SendData(msg);
            }

            if (remove_ids.Count > 0)
            {
                OIMSG msg = new OIMSG((byte)OI_MSGFAMILY.XR, (byte)OI_MSGTYPE_XR.SPATIAL_MESH_REMOVE, IdsSerializer.Serialize(remove_ids));

                remove_ids.Clear();
                connector.SendData(msg);
            }
#endif
        }
コード例 #7
0
        private void Listen()
        {
            _listening = true;

            while (_listening)
            {
                OIMSG msg_in = udpClient.GetNewData();
                if (msg_in == null || msg_in.data == null)
                {
                    continue;
                }
                if (msg_in.data.Length < 2)
                {
                    continue;
                }

                byte[] receiveBytes = msg_in.data;

                byte frameType = msg_in.msgType;
                byte deviceID  = receiveBytes[0];

                switch (frameType)
                {
                case (byte)FrameType.Config:
                    ConfigMessage cm = new ConfigMessage();
                    // Currently only one processor per port.
                    // We could support here:
                    //   - Changing configuration (based on throtteling, etc)
                    //   - Multiple devices on one port (routing DepthPackets to processor based on id)
                    //   - (...which would require some changes to how streaming source & render works)

                    // TODO: Parse config data
                    cm.deviceType = (DepthDeviceType)receiveBytes[1];
                    byte dataFlags = receiveBytes[2];

                    ushort frameWidth  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort frameHeight = BitConverter.ToUInt16(receiveBytes, 6);
                    ushort maxLines    = BitConverter.ToUInt16(receiveBytes, 8);

                    float cx         = BitConverter.ToSingle(receiveBytes, 12);
                    float cy         = BitConverter.ToSingle(receiveBytes, 16);
                    float fx         = BitConverter.ToSingle(receiveBytes, 20);
                    float fy         = BitConverter.ToSingle(receiveBytes, 24);
                    float depthScale = BitConverter.ToSingle(receiveBytes, 28);
                    cm.intrinsics = new DepthCameraIntrinsics(
                        cx, cy, fx, fy, depthScale, frameWidth, frameHeight);

                    float Px = BitConverter.ToSingle(receiveBytes, 32);
                    float Py = BitConverter.ToSingle(receiveBytes, 36);
                    float Pz = BitConverter.ToSingle(receiveBytes, 40);
                    float Qx = BitConverter.ToSingle(receiveBytes, 44);
                    float Qy = BitConverter.ToSingle(receiveBytes, 48);
                    float Qz = BitConverter.ToSingle(receiveBytes, 52);
                    float Qw = BitConverter.ToSingle(receiveBytes, 56);
                    cm.extrinsics = new DepthCameraExtrinsics(
                        Px, Py, Pz, Qx, Qy, Qz, Qw
                        );


                    int guid_offset = 63;
                    cm.GUID = "";
                    for (int sOffset = 0; sOffset < 32; sOffset++)
                    {
                        byte c = receiveBytes[guid_offset + sOffset];
                        if (c == 0x00)
                        {
                            break;
                        }
                        cm.GUID += (char)c;
                    }

                    //int filename_offset = 99;
                    cm.filename = "";
                    cm.live     = (dataFlags & LIVE_DATA) != 0;
                    cm.hasAudio = (dataFlags & AUDIO_DATA) != 0;
                    cm.hasBody  = (dataFlags & BODY_DATA) != 0;
                    cm.hasRGBD  = (dataFlags & RGBD_DATA) != 0;

                    /*
                     * if (!cm.live) {
                     *  for (int sOffset = 0; sOffset < 32; sOffset++) {
                     *      byte c = receiveBytes[filename_offset + sOffset];
                     *      if (c == 0x00) break;
                     *      cm.filename += (char)c;
                     *  }
                     *  Debug.Log("Replaying file: "+ cm.filename);
                     * } */


                    Debug.Log("Config:\n\tFrame: " + frameWidth + " " + frameHeight + " " + maxLines +
                              "\n\tIntrinsics: " + cx + " " + cy + " " + fx + " " + fy + " " + depthScale +
                              "\n\tExtrinsics: " + cm.extrinsics.position.x + " " + cm.extrinsics.position.y + " " + cm.extrinsics.position.z +
                              " " + cm.extrinsics.rotation.x + " " + cm.extrinsics.rotation.y + " " + cm.extrinsics.rotation.z + " " + cm.extrinsics.rotation.w +
                              "\n\tGUID: " + cm.GUID);

                    // We could also implement & choose a specific Processor
                    // (i.e. with custom Proccess() function) based on DepthDeviceType...
                    if (processor == null)
                    {
                        //processor = new DefaultDepthStreamingProcessor(
                        //processor = new VSyncProcessor(
                        processor = new FastProcessor(_frameSource, cm);
                    }

                    if (_control != null)
                    {
                        _control.UpdateState(cm);
                    }

                    break;

                case (byte)FrameType.DepthBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    ushort unused1   = BitConverter.ToUInt16(receiveBytes, 0);
                    ushort delta_t   = BitConverter.ToUInt16(receiveBytes, 2);
                    ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort endRowD   = BitConverter.ToUInt16(receiveBytes, 6);

                    //Debug.Log("Seq: "+sequence+" start: "+startRow+" end: "+endRow);
                    processor.HandleDepthData(startRowD, endRowD, msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.Color:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    //ulong timestampC = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleColorData(msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.BodyIndexBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    ulong timestampBI = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleBodyIndexData(timestampBI, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.AudioSamples:
                    RGBDAudioFrame aframe = new RGBDAudioFrame();
                    int            delta  = (int)msg_in.sequenceID - (int)lastAudioFrameSequence;
                    if (delta > 1)
                    {
                        Debug.LogWarning("Missing " + delta + " audio frames.");
                    }
                    else if (delta < 0)
                    {
                        Debug.LogWarning("Out of order audio: " + delta);
                        lastAudioFrameSequence = msg_in.sequenceID;
                        break;
                    }
                    aframe.frequency = BitConverter.ToUInt16(receiveBytes, 2);
                    aframe.channels  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort n_samples = BitConverter.ToUInt16(receiveBytes, 6);
                    aframe.timestamp = msg_in.timestamp;
                    aframe.samples   = new float[n_samples];
                    //Debug.Log(receiveBytes.Length)
                    for (int i = 0; i < n_samples; i++)
                    {
                        aframe.samples[i] = BitConverter.ToSingle(receiveBytes, 8 + i * 4);
                        //BitConverter.ToUInt16(receiveBytes, 12+i*2) / 32767.0f;
                    }
                    if (_audio != null)
                    {
                        _audio.QueueBuffer(aframe);
                    }
                    lastAudioFrameSequence = msg_in.sequenceID;
                    break;

                case (byte)FrameType.BodyData:
                    ushort nBodies    = BitConverter.ToUInt16(receiveBytes, 2);
                    ulong  timestampB = msg_in.timestamp;
                    for (ushort i = 0; i < nBodies; i++)
                    {
                        RGBDBodyFrame bodyFrame = new RGBDBodyFrame();
                        bodyFrame.timestamp = timestampB;

                        int dataOffset = _body_header_size + i * _body_data_size;

                        bodyFrame.trackingID        = BitConverter.ToUInt32(receiveBytes, dataOffset + 0);
                        bodyFrame.handStateLeft     = (HandState)receiveBytes[dataOffset + 4];
                        bodyFrame.handStateRight    = (HandState)receiveBytes[dataOffset + 5];
                        bodyFrame.leanTrackingState = (TrackingState)receiveBytes[dataOffset + 7];
                        bodyFrame.lean = new Vector2(
                            BitConverter.ToSingle(receiveBytes, dataOffset + 8),
                            BitConverter.ToSingle(receiveBytes, dataOffset + 12));

                        int positionOffset      = dataOffset + 16;
                        int positionDatSize     = 3 * 4 * (int)JointType.Count;
                        int trackingStateOffset = 3 + positionOffset + positionDatSize;
                        for (int j = 0; j < (int)JointType.Count; j++)
                        {
                            bodyFrame.jointPosition[j] = new Vector3(
                                -BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 0),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 4),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 8));
                            bodyFrame.jointTrackingState[j] = (TrackingState)receiveBytes[trackingStateOffset + j];
                        }

                        if (_control != null)
                        {
                            _control.QueueBodyFrame(bodyFrame);
                        }
                    }
                    break;

                default:
                    Debug.Log("Unknown DepthStreaming frame type: " + msg_in.msgType);
                    break;
                }
            }

            _listening = false;

            Debug.Log("Listen Thread Closed");
        }
コード例 #8
0
        private void Listen()
        {
            _listening = true;

            while (_listening)
            {
                OIMSG msg_in = udpClient.GetNewData();

                if (msg_in == null || msg_in.data == null)
                {
                    continue;
                }
                if (msg_in.data.Length < 2)
                {
                    continue;
                }

                byte[] receiveBytes = msg_in.data;
                byte   frameType    = msg_in.msgType;

                switch (frameType)
                {
                case (byte)FrameType.Config:
                    ConfigMessage cm = new ConfigMessage();
                    cm.deviceType = (DepthDeviceType)receiveBytes[1];
                    byte dataFlags = receiveBytes[2];

                    ushort frameWidth  = System.BitConverter.ToUInt16(receiveBytes, 4);
                    ushort frameHeight = System.BitConverter.ToUInt16(receiveBytes, 6);
                    ushort maxLines    = System.BitConverter.ToUInt16(receiveBytes, 8);

                    float cx         = System.BitConverter.ToSingle(receiveBytes, 12);
                    float cy         = System.BitConverter.ToSingle(receiveBytes, 16);
                    float fx         = System.BitConverter.ToSingle(receiveBytes, 20);
                    float fy         = System.BitConverter.ToSingle(receiveBytes, 24);
                    float depthScale = System.BitConverter.ToSingle(receiveBytes, 28);
                    cm.intrinsics = new DepthCameraIntrinsics(
                        cx, cy, fx, fy, depthScale, frameWidth, frameHeight);

                    float Px = System.BitConverter.ToSingle(receiveBytes, 32);
                    float Py = System.BitConverter.ToSingle(receiveBytes, 36);
                    float Pz = System.BitConverter.ToSingle(receiveBytes, 40);
                    float Qx = System.BitConverter.ToSingle(receiveBytes, 44);
                    float Qy = System.BitConverter.ToSingle(receiveBytes, 48);
                    float Qz = System.BitConverter.ToSingle(receiveBytes, 52);
                    float Qw = System.BitConverter.ToSingle(receiveBytes, 56);
                    cm.extrinsics = new DepthCameraExtrinsics(
                        Px, Py, Pz, Qx, Qy, Qz, Qw
                        );

                    int guid_offset = 63;
                    cm.GUID = "";
                    for (int sOffset = 0; sOffset < 32; sOffset++)
                    {
                        byte c = receiveBytes[guid_offset + sOffset];
                        if (c == 0x00)
                        {
                            break;
                        }
                        cm.GUID += (char)c;
                    }

                    cm.filename = "";
                    cm.live     = (dataFlags & LIVE_DATA) != 0;
                    cm.hasAudio = (dataFlags & AUDIO_DATA) != 0;
                    cm.hasBody  = (dataFlags & BODY_DATA) != 0;
                    cm.hasRGBD  = (dataFlags & RGBD_DATA) != 0;

                    _frameSource.SetTextureSize(frameWidth, frameHeight);
                    Debug.Log("Config:\n\tFrame: " + frameWidth + " " + frameHeight + " " + maxLines +
                              "\n\tIntrinsics: " + cx + " " + cy + " " + fx + " " + fy + " " + depthScale +
                              "\n\tExtrinsics: " + cm.extrinsics.position.x + " " + cm.extrinsics.position.y + " " + cm.extrinsics.position.z +
                              " " + cm.extrinsics.rotation.x + " " + cm.extrinsics.rotation.y + " " + cm.extrinsics.rotation.z + " " + cm.extrinsics.rotation.w +
                              "\n\tGUID: " + cm.GUID);
                    break;

                case (byte)FrameType.JPEG:
                    int header_size = 8;
                    int jpegLength  = (int)System.BitConverter.ToUInt32(receiveBytes, 0);
                    int dataLength  = receiveBytes.Length - header_size;
                    if (jpegLength != dataLength)
                    {
                        Debug.LogWarning("Unexpected amount of data.");
                        return;
                    }
                    byte[] JPEG_colors = new byte[dataLength];
                    System.Buffer.BlockCopy(receiveBytes, header_size, JPEG_colors, 0, JPEG_colors.Length);
                    _frameSource.frameQueue.Enqueue(JPEG_colors);
                    break;

                default:
                    Debug.Log("Unknown DepthStreaming frame type: " + msg_in.msgType);
                    break;
                }
            }
        }