Exemplo n.º 1
0
 public FastProcessor(StreamFrameSource fs, ConfigMessage cm)
     : base(fs, cm.deviceType, cm.intrinsics, cm.intrinsics.width, cm.intrinsics.height, cm.maxLines, cm.GUID)
 {
     _frameBuffer = new Queue <FastFrame>();
     for (int i = 0; i < _frameBufferSize; i++)
     {
         _frameBuffer.Enqueue(new FastFrame(this));
     }
 }
Exemplo n.º 2
0
        // CALLED FROM DIFFERENT THREAD(?)
        public void UpdateState(ConfigMessage cm)
        {
            ClearBodies();
            RGBD_STATE new_state = cm.live ? RGBD_STATE.LIVE : RGBD_STATE.REPLAYING;

            if (_state != RGBD_STATE.REPLAYING && new_state == RGBD_STATE.REPLAYING)
            {
                QueueEvent(new RGBDStreamEventArgs("REPLAY_STARTED"));
            }

            if (_state != RGBD_STATE.LIVE && new_state == RGBD_STATE.LIVE)
            {
                QueueEvent(new RGBDStreamEventArgs("REPLAY_STOPPED"));
                if (_audio != null)
                {
                    _audio.Clear();
                }
            }

            if (new_state == RGBD_STATE.LIVE)
            {
                Debug.Log("Live");
                sfs.cameraTransform = _defaultTransform;
                _replay_t0          = 0;
            }

            if (new_state == RGBD_STATE.REPLAYING)
            {
                Debug.Log("Replaying");
                _replay_t0 = NOW();
                if (_audio != null)
                {
                    _audio.Clear();
                }
                sfs.cameraTransform = _recordedTransform;
                dce = cm.extrinsics;
            }

            _state = new_state;
        }
Exemplo n.º 3
0
        private void Listen()
        {
            _listening = true;

            while (_listening)
            {
                OIMSG msg_in = udpClient.GetNewData();
                if (msg_in == null || msg_in.data == null)
                {
                    continue;
                }
                if (msg_in.data.Length < 2)
                {
                    continue;
                }

                byte[] receiveBytes = msg_in.data;

                byte frameType = msg_in.msgType;
                byte deviceID  = receiveBytes[0];

                switch (frameType)
                {
                case (byte)FrameType.Config:
                    ConfigMessage cm = new ConfigMessage();
                    // Currently only one processor per port.
                    // We could support here:
                    //   - Changing configuration (based on throtteling, etc)
                    //   - Multiple devices on one port (routing DepthPackets to processor based on id)
                    //   - (...which would require some changes to how streaming source & render works)

                    // TODO: Parse config data
                    cm.deviceType = (DepthDeviceType)receiveBytes[1];
                    byte dataFlags = receiveBytes[2];

                    ushort frameWidth  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort frameHeight = BitConverter.ToUInt16(receiveBytes, 6);
                    ushort maxLines    = BitConverter.ToUInt16(receiveBytes, 8);

                    float cx         = BitConverter.ToSingle(receiveBytes, 12);
                    float cy         = BitConverter.ToSingle(receiveBytes, 16);
                    float fx         = BitConverter.ToSingle(receiveBytes, 20);
                    float fy         = BitConverter.ToSingle(receiveBytes, 24);
                    float depthScale = BitConverter.ToSingle(receiveBytes, 28);
                    cm.intrinsics = new DepthCameraIntrinsics(
                        cx, cy, fx, fy, depthScale, frameWidth, frameHeight);

                    float Px = BitConverter.ToSingle(receiveBytes, 32);
                    float Py = BitConverter.ToSingle(receiveBytes, 36);
                    float Pz = BitConverter.ToSingle(receiveBytes, 40);
                    float Qx = BitConverter.ToSingle(receiveBytes, 44);
                    float Qy = BitConverter.ToSingle(receiveBytes, 48);
                    float Qz = BitConverter.ToSingle(receiveBytes, 52);
                    float Qw = BitConverter.ToSingle(receiveBytes, 56);
                    cm.extrinsics = new DepthCameraExtrinsics(
                        Px, Py, Pz, Qx, Qy, Qz, Qw
                        );


                    int guid_offset = 63;
                    cm.GUID = "";
                    for (int sOffset = 0; sOffset < 32; sOffset++)
                    {
                        byte c = receiveBytes[guid_offset + sOffset];
                        if (c == 0x00)
                        {
                            break;
                        }
                        cm.GUID += (char)c;
                    }

                    //int filename_offset = 99;
                    cm.filename = "";
                    cm.live     = (dataFlags & LIVE_DATA) != 0;
                    cm.hasAudio = (dataFlags & AUDIO_DATA) != 0;
                    cm.hasBody  = (dataFlags & BODY_DATA) != 0;
                    cm.hasRGBD  = (dataFlags & RGBD_DATA) != 0;

                    /*
                     * if (!cm.live) {
                     *  for (int sOffset = 0; sOffset < 32; sOffset++) {
                     *      byte c = receiveBytes[filename_offset + sOffset];
                     *      if (c == 0x00) break;
                     *      cm.filename += (char)c;
                     *  }
                     *  Debug.Log("Replaying file: "+ cm.filename);
                     * } */


                    Debug.Log("Config:\n\tFrame: " + frameWidth + " " + frameHeight + " " + maxLines +
                              "\n\tIntrinsics: " + cx + " " + cy + " " + fx + " " + fy + " " + depthScale +
                              "\n\tExtrinsics: " + cm.extrinsics.position.x + " " + cm.extrinsics.position.y + " " + cm.extrinsics.position.z +
                              " " + cm.extrinsics.rotation.x + " " + cm.extrinsics.rotation.y + " " + cm.extrinsics.rotation.z + " " + cm.extrinsics.rotation.w +
                              "\n\tGUID: " + cm.GUID);

                    // We could also implement & choose a specific Processor
                    // (i.e. with custom Proccess() function) based on DepthDeviceType...
                    if (processor == null)
                    {
                        //processor = new DefaultDepthStreamingProcessor(
                        //processor = new VSyncProcessor(
                        processor = new FastProcessor(_frameSource, cm);
                    }

                    if (_control != null)
                    {
                        _control.UpdateState(cm);
                    }

                    break;

                case (byte)FrameType.DepthBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    ushort unused1   = BitConverter.ToUInt16(receiveBytes, 0);
                    ushort delta_t   = BitConverter.ToUInt16(receiveBytes, 2);
                    ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort endRowD   = BitConverter.ToUInt16(receiveBytes, 6);

                    //Debug.Log("Seq: "+sequence+" start: "+startRow+" end: "+endRow);
                    processor.HandleDepthData(startRowD, endRowD, msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.Color:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    //ulong timestampC = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleColorData(msg_in.timestamp, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.BodyIndexBlock:
                    if (processor == null)
                    {
                        break;
                    }
                    //ushort delta_t = BitConverter.ToUInt16(receiveBytes, 2);
                    //ushort startRowD = BitConverter.ToUInt16(receiveBytes, 4);
                    //ushort endRowD = BitConverter.ToUInt16(receiveBytes, 6);
                    ulong timestampBI = BitConverter.ToUInt32(receiveBytes, 8);
                    processor.HandleBodyIndexData(timestampBI, ref receiveBytes, _rgbd_header_size);
                    break;

                case (byte)FrameType.AudioSamples:
                    RGBDAudioFrame aframe = new RGBDAudioFrame();
                    int            delta  = (int)msg_in.sequenceID - (int)lastAudioFrameSequence;
                    if (delta > 1)
                    {
                        Debug.LogWarning("Missing " + delta + " audio frames.");
                    }
                    else if (delta < 0)
                    {
                        Debug.LogWarning("Out of order audio: " + delta);
                        lastAudioFrameSequence = msg_in.sequenceID;
                        break;
                    }
                    aframe.frequency = BitConverter.ToUInt16(receiveBytes, 2);
                    aframe.channels  = BitConverter.ToUInt16(receiveBytes, 4);
                    ushort n_samples = BitConverter.ToUInt16(receiveBytes, 6);
                    aframe.timestamp = msg_in.timestamp;
                    aframe.samples   = new float[n_samples];
                    //Debug.Log(receiveBytes.Length)
                    for (int i = 0; i < n_samples; i++)
                    {
                        aframe.samples[i] = BitConverter.ToSingle(receiveBytes, 8 + i * 4);
                        //BitConverter.ToUInt16(receiveBytes, 12+i*2) / 32767.0f;
                    }
                    if (_audio != null)
                    {
                        _audio.QueueBuffer(aframe);
                    }
                    lastAudioFrameSequence = msg_in.sequenceID;
                    break;

                case (byte)FrameType.BodyData:
                    ushort nBodies    = BitConverter.ToUInt16(receiveBytes, 2);
                    ulong  timestampB = msg_in.timestamp;
                    for (ushort i = 0; i < nBodies; i++)
                    {
                        RGBDBodyFrame bodyFrame = new RGBDBodyFrame();
                        bodyFrame.timestamp = timestampB;

                        int dataOffset = _body_header_size + i * _body_data_size;

                        bodyFrame.trackingID        = BitConverter.ToUInt32(receiveBytes, dataOffset + 0);
                        bodyFrame.handStateLeft     = (HandState)receiveBytes[dataOffset + 4];
                        bodyFrame.handStateRight    = (HandState)receiveBytes[dataOffset + 5];
                        bodyFrame.leanTrackingState = (TrackingState)receiveBytes[dataOffset + 7];
                        bodyFrame.lean = new Vector2(
                            BitConverter.ToSingle(receiveBytes, dataOffset + 8),
                            BitConverter.ToSingle(receiveBytes, dataOffset + 12));

                        int positionOffset      = dataOffset + 16;
                        int positionDatSize     = 3 * 4 * (int)JointType.Count;
                        int trackingStateOffset = 3 + positionOffset + positionDatSize;
                        for (int j = 0; j < (int)JointType.Count; j++)
                        {
                            bodyFrame.jointPosition[j] = new Vector3(
                                -BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 0),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 4),
                                BitConverter.ToSingle(receiveBytes, positionOffset + j * 3 * 4 + 8));
                            bodyFrame.jointTrackingState[j] = (TrackingState)receiveBytes[trackingStateOffset + j];
                        }

                        if (_control != null)
                        {
                            _control.QueueBodyFrame(bodyFrame);
                        }
                    }
                    break;

                default:
                    Debug.Log("Unknown DepthStreaming frame type: " + msg_in.msgType);
                    break;
                }
            }

            _listening = false;

            Debug.Log("Listen Thread Closed");
        }
Exemplo n.º 4
0
        private void Listen()
        {
            _listening = true;

            while (_listening)
            {
                OIMSG msg_in = udpClient.GetNewData();

                if (msg_in == null || msg_in.data == null)
                {
                    continue;
                }
                if (msg_in.data.Length < 2)
                {
                    continue;
                }

                byte[] receiveBytes = msg_in.data;
                byte   frameType    = msg_in.msgType;

                switch (frameType)
                {
                case (byte)FrameType.Config:
                    ConfigMessage cm = new ConfigMessage();
                    cm.deviceType = (DepthDeviceType)receiveBytes[1];
                    byte dataFlags = receiveBytes[2];

                    ushort frameWidth  = System.BitConverter.ToUInt16(receiveBytes, 4);
                    ushort frameHeight = System.BitConverter.ToUInt16(receiveBytes, 6);
                    ushort maxLines    = System.BitConverter.ToUInt16(receiveBytes, 8);

                    float cx         = System.BitConverter.ToSingle(receiveBytes, 12);
                    float cy         = System.BitConverter.ToSingle(receiveBytes, 16);
                    float fx         = System.BitConverter.ToSingle(receiveBytes, 20);
                    float fy         = System.BitConverter.ToSingle(receiveBytes, 24);
                    float depthScale = System.BitConverter.ToSingle(receiveBytes, 28);
                    cm.intrinsics = new DepthCameraIntrinsics(
                        cx, cy, fx, fy, depthScale, frameWidth, frameHeight);

                    float Px = System.BitConverter.ToSingle(receiveBytes, 32);
                    float Py = System.BitConverter.ToSingle(receiveBytes, 36);
                    float Pz = System.BitConverter.ToSingle(receiveBytes, 40);
                    float Qx = System.BitConverter.ToSingle(receiveBytes, 44);
                    float Qy = System.BitConverter.ToSingle(receiveBytes, 48);
                    float Qz = System.BitConverter.ToSingle(receiveBytes, 52);
                    float Qw = System.BitConverter.ToSingle(receiveBytes, 56);
                    cm.extrinsics = new DepthCameraExtrinsics(
                        Px, Py, Pz, Qx, Qy, Qz, Qw
                        );

                    int guid_offset = 63;
                    cm.GUID = "";
                    for (int sOffset = 0; sOffset < 32; sOffset++)
                    {
                        byte c = receiveBytes[guid_offset + sOffset];
                        if (c == 0x00)
                        {
                            break;
                        }
                        cm.GUID += (char)c;
                    }

                    cm.filename = "";
                    cm.live     = (dataFlags & LIVE_DATA) != 0;
                    cm.hasAudio = (dataFlags & AUDIO_DATA) != 0;
                    cm.hasBody  = (dataFlags & BODY_DATA) != 0;
                    cm.hasRGBD  = (dataFlags & RGBD_DATA) != 0;

                    _frameSource.SetTextureSize(frameWidth, frameHeight);
                    Debug.Log("Config:\n\tFrame: " + frameWidth + " " + frameHeight + " " + maxLines +
                              "\n\tIntrinsics: " + cx + " " + cy + " " + fx + " " + fy + " " + depthScale +
                              "\n\tExtrinsics: " + cm.extrinsics.position.x + " " + cm.extrinsics.position.y + " " + cm.extrinsics.position.z +
                              " " + cm.extrinsics.rotation.x + " " + cm.extrinsics.rotation.y + " " + cm.extrinsics.rotation.z + " " + cm.extrinsics.rotation.w +
                              "\n\tGUID: " + cm.GUID);
                    break;

                case (byte)FrameType.JPEG:
                    int header_size = 8;
                    int jpegLength  = (int)System.BitConverter.ToUInt32(receiveBytes, 0);
                    int dataLength  = receiveBytes.Length - header_size;
                    if (jpegLength != dataLength)
                    {
                        Debug.LogWarning("Unexpected amount of data.");
                        return;
                    }
                    byte[] JPEG_colors = new byte[dataLength];
                    System.Buffer.BlockCopy(receiveBytes, header_size, JPEG_colors, 0, JPEG_colors.Length);
                    _frameSource.frameQueue.Enqueue(JPEG_colors);
                    break;

                default:
                    Debug.Log("Unknown DepthStreaming frame type: " + msg_in.msgType);
                    break;
                }
            }
        }