Esempio n. 1
0
    public static InitSenderPacketData Parse(byte[] packetBytes)
    {
        var reader = new BinaryReader(new MemoryStream(packetBytes));

        reader.BaseStream.Position = 5;

        var initSenderPacketData = new InitSenderPacketData();

        initSenderPacketData.depthWidth  = reader.ReadInt32();
        initSenderPacketData.depthHeight = reader.ReadInt32();

        var depthIntrinsics = new AzureKinectCalibration.Intrinsics();

        depthIntrinsics.cx                   = reader.ReadSingle();
        depthIntrinsics.cy                   = reader.ReadSingle();
        depthIntrinsics.fx                   = reader.ReadSingle();
        depthIntrinsics.fy                   = reader.ReadSingle();
        depthIntrinsics.k1                   = reader.ReadSingle();
        depthIntrinsics.k2                   = reader.ReadSingle();
        depthIntrinsics.k3                   = reader.ReadSingle();
        depthIntrinsics.k4                   = reader.ReadSingle();
        depthIntrinsics.k5                   = reader.ReadSingle();
        depthIntrinsics.k6                   = reader.ReadSingle();
        depthIntrinsics.codx                 = reader.ReadSingle();
        depthIntrinsics.cody                 = reader.ReadSingle();
        depthIntrinsics.p2                   = reader.ReadSingle();
        depthIntrinsics.p1                   = reader.ReadSingle();
        depthIntrinsics.metricRadius         = reader.ReadSingle();
        initSenderPacketData.depthIntrinsics = depthIntrinsics;

        initSenderPacketData.depthMetricRadius = reader.ReadSingle();

        return(initSenderPacketData);
    }
    public static SenderPacketSet Receive(UdpSocket udpSocket)
    {
        var senderPacketSet = new SenderPacketSet();

        while (true)
        {
            var packet = udpSocket.Receive();
            if (packet == null)
            {
                break;
            }

            //int sessionId = PacketHelper.getSessionIdFromSenderPacketBytes(packet);
            senderPacketSet.ReceivedAny = true;
            switch (PacketHelper.getPacketTypeFromSenderPacketBytes(packet))
            {
            case SenderPacketType.Init:
                senderPacketSet.InitPacketDataList.Add(InitSenderPacketData.Parse(packet));
                break;

            case SenderPacketType.Frame:
                senderPacketSet.VideoPacketDataList.Add(VideoSenderPacketData.Parse(packet));
                break;

            case SenderPacketType.Parity:
                senderPacketSet.FecPacketDataList.Add(ParitySenderPacketData.Parse(packet));
                break;

            case SenderPacketType.Audio:
                senderPacketSet.AudioPacketDataList.Add(AudioSenderPacketData.Parse(packet));
                break;

            case SenderPacketType.Floor:
                senderPacketSet.FloorPacketDataList.Add(FloorSenderPacketData.Parse(packet));
                break;
            }
        }

        return(senderPacketSet);
    }
Esempio n. 3
0
    public TextureGroupUpdater(Material azureKinectScreenMaterial, InitSenderPacketData initPacketData, int sessionId, IPEndPoint endPoint)
    {
        this.azureKinectScreenMaterial = azureKinectScreenMaterial;

        textureGroup = new TextureGroup(Plugin.texture_group_create());
        UnityEngine.Debug.Log($"textureGroup id: {textureGroup.GetId()}");

        lastVideoFrameId = -1;

        prepared = false;

        videoMessages  = new Dictionary <int, VideoSenderMessageData>();
        frameStopWatch = Stopwatch.StartNew();

        textureGroup.SetWidth(initPacketData.depthWidth);
        textureGroup.SetHeight(initPacketData.depthHeight);
        PluginHelper.InitTextureGroup(textureGroup.GetId());

        colorDecoder = new Vp8Decoder();
        depthDecoder = new TrvlDecoder(initPacketData.depthWidth * initPacketData.depthHeight);

        this.sessionId = sessionId;
        this.endPoint  = endPoint;
    }
Esempio n. 4
0
    //private static Mesh CreateMesh(AzureKinectCalibration calibration)
    private static Mesh CreateMesh(InitSenderPacketData initSenderPacketData)
    {
        int width  = initSenderPacketData.depthWidth;
        int height = initSenderPacketData.depthHeight;

        //var depthCamera = calibration.DepthCamera;

        var vertices = new Vector3[width * height];
        var uv       = new Vector2[width * height];

        for (int i = 0; i < width; ++i)
        {
            for (int j = 0; j < height; ++j)
            {
                float[] xy    = new float[2];
                int     valid = 0;
                if (AzureKinectIntrinsicTransformation.Unproject(initSenderPacketData.depthIntrinsics,
                                                                 initSenderPacketData.depthMetricRadius,
                                                                 new float[2] {
                    i, j
                }, ref xy, ref valid))
                {
                    vertices[i + j * width] = new Vector3(xy[0], xy[1], 1.0f);
                }
                else
                {
                    vertices[i + j * width] = new Vector3(0.0f, 0.0f, 0.0f);
                }
                uv[i + j * width] = new Vector2(i / (float)(width - 1), j / (float)(height - 1));
            }
        }

        int quadWidth    = width - 2;
        int quadHeight   = height - 2;
        var quadVertices = new Vector3[quadWidth * quadHeight * 4];
        var quadUv       = new Vector2[quadWidth * quadHeight * 4];

        for (int ii = 0; ii < quadWidth; ++ii)
        {
            for (int jj = 0; jj < quadHeight; ++jj)
            {
                int quadIndex = ii + jj * quadWidth;
                int i         = ii + 1;
                int j         = jj + 1;
                quadVertices[quadIndex * 4 + 0] = (vertices[i + j * width] + vertices[(i - 1) + (j - 1) * width]) * 0.5f;
                quadVertices[quadIndex * 4 + 1] = (vertices[i + j * width] + vertices[(i + 1) + (j - 1) * width]) * 0.5f;
                quadVertices[quadIndex * 4 + 2] = (vertices[i + j * width] + vertices[(i - 1) + (j + 1) * width]) * 0.5f;
                quadVertices[quadIndex * 4 + 3] = (vertices[i + j * width] + vertices[(i + 1) + (j + 1) * width]) * 0.5f;

                quadUv[quadIndex * 4 + 0] = uv[i + j * width];
                quadUv[quadIndex * 4 + 1] = uv[i + j * width];
                quadUv[quadIndex * 4 + 2] = uv[i + j * width];
                quadUv[quadIndex * 4 + 3] = uv[i + j * width];
            }
        }

        var triangles = new int[quadWidth * quadHeight * 6];

        for (int i = 0; i < quadWidth * quadHeight; ++i)
        {
            triangles[i * 6 + 0] = i * 4 + 0;
            triangles[i * 6 + 1] = i * 4 + 1;
            triangles[i * 6 + 2] = i * 4 + 2;
            triangles[i * 6 + 3] = i * 4 + 1;
            triangles[i * 6 + 4] = i * 4 + 3;
            triangles[i * 6 + 5] = i * 4 + 2;
        }

        // Without the bounds, Unity decides whether to render this mesh or not based on the vertices calculated here.
        // This causes Unity not rendering the mesh transformed by the depth texture even when the transformed one
        // belongs to the viewport of the camera.
        var bounds = new Bounds(Vector3.zero, Vector3.one * 1000.0f);

        var mesh = new Mesh()
        {
            indexFormat = IndexFormat.UInt32,
            vertices    = quadVertices,
            uv          = quadUv,
            bounds      = bounds,
        };

        mesh.SetIndices(triangles, MeshTopology.Triangles, 0);

        return(mesh);
    }
Esempio n. 5
0
    private static Mesh CreateGeometryMesh(InitSenderPacketData initSenderPacketData)
    {
        int width  = initSenderPacketData.depthWidth;
        int height = initSenderPacketData.depthHeight;

        var vertices = new Vector3[width * height];
        var uv       = new Vector2[width * height];

        for (int i = 0; i < width; ++i)
        {
            for (int j = 0; j < height; ++j)
            {
                float[] xy    = new float[2];
                int     valid = 0;
                if (AzureKinectIntrinsicTransformation.Unproject(initSenderPacketData.depthIntrinsics,
                                                                 initSenderPacketData.depthMetricRadius,
                                                                 new float[2] {
                    i, j
                }, ref xy, ref valid))
                {
                    vertices[i + j * width] = new Vector3(xy[0], xy[1], 1.0f);
                }
                else
                {
                    vertices[i + j * width] = new Vector3(0.0f, 0.0f, 0.0f);
                }
                uv[i + j * width] = new Vector2(i / (float)(width - 1), j / (float)(height - 1));
            }
        }

        // Converting the point cloud version of vertices and uv into a quad version one.
        int quadWidth         = width - 2;
        int quadHeight        = height - 2;
        var quadPositions     = new Vector3[quadWidth * quadHeight];
        var quadUv            = new Vector2[quadWidth * quadHeight];
        var quadPositionSizes = new Vector2[quadWidth * quadHeight];

        for (int ii = 0; ii < quadWidth; ++ii)
        {
            for (int jj = 0; jj < quadHeight - 2; ++jj)
            {
                int i = ii + 1;
                int j = jj + 1;
                quadPositions[ii + jj * quadWidth]     = vertices[i + j * width];
                quadUv[ii + jj * quadWidth]            = uv[i + j * width];
                quadPositionSizes[ii + jj * quadWidth] = (vertices[(i + 1) + (j + 1) * width] - vertices[(i - 1) + (j - 1) * width]) * 0.5f;
            }
        }

        var triangles = new int[quadPositions.Length];

        for (int i = 0; i < triangles.Length; ++i)
        {
            triangles[i] = i;
        }

        // Without the bounds, Unity decides whether to render this mesh or not based on the vertices calculated here.
        // This causes Unity not rendering the mesh transformed by the depth texture even when the transformed one
        // belongs to the viewport of the camera.
        var bounds = new Bounds(Vector3.zero, Vector3.one * 1000.0f);

        var mesh = new Mesh()
        {
            indexFormat = IndexFormat.UInt32,
            vertices    = quadPositions,
            uv          = quadUv,
            uv2         = quadPositionSizes,
            bounds      = bounds,
        };

        mesh.SetIndices(triangles, MeshTopology.Points, 0);

        return(mesh);
    }
Esempio n. 6
0
 public void Setup(InitSenderPacketData initSenderPacketData)
 {
     meshFilter.mesh = CreateMesh(initSenderPacketData);
     //meshFilter.mesh = CreateGeometryMesh(calibration);
 }
Esempio n. 7
0
 public KinectReceiver(int receiverSessionId, IPEndPoint senderEndPoint, KinectOrigin kinectOrigin, InitSenderPacketData initPacketData)
 {
     this.receiverSessionId = receiverSessionId;
     this.senderEndPoint    = senderEndPoint;
     this.kinectOrigin      = kinectOrigin;
     videoMessageAssembler  = new VideoMessageAssembler(receiverSessionId, senderEndPoint);
     audioPacketReceiver    = new AudioPacketReceiver();
     textureGroupUpdater    = new TextureGroupUpdater(kinectOrigin.Screen.Material, initPacketData, receiverSessionId, senderEndPoint);
     heartbeatStopWatch     = Stopwatch.StartNew();
     receivedAnyStopWatch   = Stopwatch.StartNew();
 }
    public void Ping(UdpSocket udpSocket)
    {
        int senderSessionId = -1;
        int pingCount       = 0;

        while (true)
        {
            bool initialized = false;
            udpSocket.Send(PacketHelper.createPingReceiverPacketBytes());
            ++pingCount;
            UnityEngine.Debug.Log("Sent ping");

            //Thread.Sleep(100);
            Thread.Sleep(300);

            SocketError error = SocketError.WouldBlock;
            while (true)
            {
                var packet = udpSocket.Receive(out error);
                if (packet == null)
                {
                    break;
                }

                int cursor    = 0;
                int sessionId = BitConverter.ToInt32(packet, cursor);
                cursor += 4;

                var packetType = (SenderPacketType)packet[cursor];
                cursor += 1;
                if (packetType != SenderPacketType.Init)
                {
                    UnityEngine.Debug.Log($"A different kind of a packet received before an init packet: {packetType}");
                    continue;
                }

                senderSessionId = sessionId;

                var initSenderPacketData = InitSenderPacketData.Parse(packet);

                textureGroup.SetWidth(initSenderPacketData.depthWidth);
                textureGroup.SetHeight(initSenderPacketData.depthHeight);
                PluginHelper.InitTextureGroup();

                colorDecoder = new Vp8Decoder();
                depthDecoder = new TrvlDecoder(initSenderPacketData.depthWidth * initSenderPacketData.depthHeight);

                azureKinectScreen.Setup(initSenderPacketData);

                initialized = true;
                break;
            }
            if (initialized)
            {
                break;
            }

            if (pingCount == 10)
            {
                UnityEngine.Debug.Log("Tried pinging 10 times and failed to received an init packet...\n");
                return;
            }
        }

        this.udpSocket = udpSocket;
        var videoPacketDataQueue = new ConcurrentQueue <VideoSenderPacketData>();
        var fecPacketDataQueue   = new ConcurrentQueue <FecSenderPacketData>();
        var audioPacketDataQueue = new ConcurrentQueue <AudioSenderPacketData>();

        var taskThread = new Thread(() =>
        {
            var receiveSenderPacketTask    = new ReceiveSenderPacketTask();
            var reassembleVideoMessageTask = new ReassembleVideoMessageTask();
            var consumeAudioPacketTask     = new ConsumeAudioPacketTask();

            while (!receiverStopped)
            {
                receiveSenderPacketTask.Run(this,
                                            senderSessionId,
                                            videoPacketDataQueue,
                                            fecPacketDataQueue,
                                            audioPacketDataQueue);
                reassembleVideoMessageTask.Run(this, videoPacketDataQueue, fecPacketDataQueue);
                consumeAudioPacketTask.Run(this, audioPacketDataQueue);
            }

            receiverStopped = true;
        });

        taskThread.Start();
    }
    // Since calculation including Unproject() takes too much time,
    // this function is made to run as a coroutine that takes a break
    // every 100 ms.
    public IEnumerator SetupMesh(InitSenderPacketData initSenderPacketData)
    {
        int width  = initSenderPacketData.depthWidth;
        int height = initSenderPacketData.depthHeight;

        var vertices = new Vector3[width * height];
        var uv       = new Vector2[width * height];

        var stopWatch = Stopwatch.StartNew();

        for (int i = 0; i < width; ++i)
        {
            for (int j = 0; j < height; ++j)
            {
                float[] xy    = new float[2];
                int     valid = 0;
                if (KinectIntrinsicTransformation.Unproject(initSenderPacketData.depthIntrinsics,
                                                            initSenderPacketData.depthMetricRadius,
                                                            new float[2] {
                    i, j
                }, ref xy, ref valid))
                {
                    // Flip y since Azure Kinect's y axis is downwards.
                    // https://docs.microsoft.com/en-us/azure/kinect-dk/coordinate-systems
                    vertices[i + j * width] = new Vector3(xy[0], -xy[1], 1.0f);
                }
                else
                {
                    vertices[i + j * width] = new Vector3(0.0f, 0.0f, 0.0f);
                }
                uv[i + j * width] = new Vector2(i / (float)(width - 1), j / (float)(height - 1));
            }

            if (stopWatch.ElapsedMilliseconds > 100)
            {
                yield return(null);

                stopWatch = Stopwatch.StartNew();
            }
        }

        //print($"vertices[0]: {vertices[0]}"); // (-1.0, 1.0, 1.0): left-top
        //print($"vertices[last]: {vertices[vertices.Length - 1]}"); // (0.8, -0.6, 1.0): right-bottom

        const float SIZE_AMPLIFIER = 1.2f;
        int         quadWidth      = width - 2;
        int         quadHeight     = height - 2;
        var         quadVertices   = new Vector3[quadWidth * quadHeight];
        var         quadUv         = new Vector2[quadWidth * quadHeight];
        var         quadHalfSizes  = new Vector2[quadWidth * quadHeight];

        for (int ii = 0; ii < quadWidth; ++ii)
        {
            for (int jj = 0; jj < quadHeight; ++jj)
            {
                int i = ii + 1;
                int j = jj + 1;
                quadVertices[ii + jj * quadWidth] = vertices[i + j * width];
                quadUv[ii + jj * quadWidth]       = uv[i + j * width];
                // Trying to make both x and y to have a positive number. The first 0.5f is to make the size relevant to
                // the vertex in (i, j). The second one is to get the half size of it.
                quadHalfSizes[ii + jj * quadWidth] = (vertices[(i + 1) + (j - 1) * width] - vertices[(i - 1) + (j + 1) * width]) * 0.5f * 0.5f * SIZE_AMPLIFIER;
            }
        }

        //print($"quadSizes[0]: {quadSizes[0].x}, {quadSizes[0].y}"); // 0.002900749, 0.003067017

        var triangles = new int[quadWidth * quadHeight];

        for (int i = 0; i < quadWidth * quadHeight; ++i)
        {
            triangles[i] = i;
        }

        // 65.535 is equivalent to (2^16 - 1) / 1000, where (2^16 - 1) is to complement
        // the conversion happened in the texture-level from 0 ~ (2^16 - 1) to 0 ~ 1.
        // 1000 is the conversion of mm (the unit of Azure Kinect) to m (the unit of Unity3D).
        for (int i = 0; i < quadVertices.Length; ++i)
        {
            quadVertices[i] *= 65.535f;
        }

        for (int i = 0; i < quadHalfSizes.Length; ++i)
        {
            quadHalfSizes[i] *= 65.535f;
        }

        // Without the bounds, Unity decides whether to render this mesh or not based on the vertices calculated here.
        // This causes Unity not rendering the mesh transformed by the depth texture even when the transformed one
        // belongs to the viewport of the camera.
        var bounds = new Bounds(Vector3.zero, Vector3.one * 1000.0f);

        var mesh = new Mesh()
        {
            indexFormat = IndexFormat.UInt32,
            vertices    = quadVertices,
            uv          = quadUv,
            uv2         = quadHalfSizes,
            bounds      = bounds,
        };

        mesh.SetIndices(triangles, MeshTopology.Points, 0);

        meshFilter.mesh = mesh;
    }