private async void Connect()
    {
        UiVisibility = false;

        // The default IP address is 127.0.0.1.
        string ipAddress = ipAddressInputField.text;

        if (ipAddress.Length == 0)
        {
            ipAddress = "127.0.0.1";
        }

        // The default port is 7777.
        string portString = portInputField.text;
        int    port       = portString.Length != 0 ? int.Parse(portString) : 7777;

        string logString = string.Format("Try connecting to {0}:{1}...", ipAddress, port);

        Debug.Log(logString);
        statusText.text = logString;
        var receiver = new Receiver();

        if (await receiver.ConnectAsync(new IPEndPoint(IPAddress.Parse(ipAddress), port)))
        {
            this.receiver   = receiver;
            decoder         = new Vp8Decoder();
            statusText.text = string.Format("Connected to {0}:{1}!", ipAddress, port);
        }
        else
        {
            UiVisibility    = true;
            statusText.text = string.Format("Failed to connect to {0}:{1}.", ipAddress, port);
        }
    }
    public IEnumerator Prepare(Material azureKinectScreenMaterial, VideoSenderMessage videoMessage)
    {
        if (State != PrepareState.Unprepared)
        {
            throw new Exception("State has to be Unprepared to prepare TextureGroupUpdater.");
        }

        State = PrepareState.Preparing;

        textureSet.SetWidth(videoMessage.width);
        textureSet.SetHeight(videoMessage.height);
        TelepresenceToolkitPlugin.InitTextureGroup(textureSet.GetId());

        colorDecoder = new Vp8Decoder();
        depthDecoder = new TrvlDecoder(videoMessage.width * videoMessage.height);

        State = PrepareState.Prepared;

        while (!textureSet.IsInitialized())
        {
            yield return(null);
        }

        // TextureGroup includes Y, U, V, and a depth texture.
        azureKinectScreenMaterial.SetTexture("_YTex", textureSet.GetYTexture());
        azureKinectScreenMaterial.SetTexture("_UvTex", textureSet.GetUvTexture());
        azureKinectScreenMaterial.SetTexture("_DepthTex", textureSet.GetDepthTexture());

        State = PrepareState.Prepared;
    }
Пример #3
0
    public async void OnConnectButtonClicked()
    {
        UiVisibility = false;

        // The default IP address is 127.0.0.1.
        string ipAddress = ipAddressInputField.text;

        if (ipAddress.Length == 0)
        {
            ipAddress = "127.0.0.1";
        }

        // The default port is 7777.
        string portString = portInputField.text;
        int    port       = portString.Length != 0 ? int.Parse(portString) : 7777;

        Debug.LogFormat("Try connecting to {0}:{1}.", ipAddress, port);
        var receiver = new Receiver();

        if (await receiver.ConnectAsync(new IPEndPoint(IPAddress.Parse(ipAddress), port)))
        {
            QuadVisibility = true;
            this.receiver  = receiver;
            decoder        = new Vp8Decoder();
        }
        else
        {
            UiVisibility = true;
        }
    }
    public KinectReceiver(Material azureKinectScreenMaterial, AzureKinectScreen azureKinectScreen)
    {
        this.azureKinectScreenMaterial = azureKinectScreenMaterial;
        this.azureKinectScreen         = azureKinectScreen;

        textureGroup = new TextureGroup(Plugin.texture_group_reset());

        udpSocket         = null;
        receiverStopped   = false;
        videoMessageQueue = new ConcurrentQueue <Tuple <int, VideoSenderMessageData> >();
        lastVideoFrameId  = -1;

        colorDecoder = null;
        depthDecoder = null;
        preapared    = false;

        ringBuffer       = new RingBuffer((int)(KH_LATENCY_SECONDS * 2 * KH_BYTES_PER_SECOND / sizeof(float)));
        audioDecoder     = new AudioDecoder(KH_SAMPLE_RATE, KH_CHANNEL_COUNT);
        lastAudioFrameId = -1;

        videoMessages  = new Dictionary <int, VideoSenderMessageData>();
        frameStopWatch = Stopwatch.StartNew();
    }
Пример #5
0
    public TextureGroupUpdater(Material azureKinectScreenMaterial, InitSenderPacketData initPacketData, int sessionId, IPEndPoint endPoint)
    {
        this.azureKinectScreenMaterial = azureKinectScreenMaterial;

        textureGroup = new TextureGroup(Plugin.texture_group_create());
        UnityEngine.Debug.Log($"textureGroup id: {textureGroup.GetId()}");

        lastVideoFrameId = -1;

        prepared = false;

        videoMessages  = new Dictionary <int, VideoSenderMessageData>();
        frameStopWatch = Stopwatch.StartNew();

        textureGroup.SetWidth(initPacketData.depthWidth);
        textureGroup.SetHeight(initPacketData.depthHeight);
        PluginHelper.InitTextureGroup(textureGroup.GetId());

        colorDecoder = new Vp8Decoder();
        depthDecoder = new TrvlDecoder(initPacketData.depthWidth * initPacketData.depthHeight);

        this.sessionId = sessionId;
        this.endPoint  = endPoint;
    }
    void Update()
    {
        // Space key resets the scene to be placed in front of the camera.
        if (Input.GetKeyDown(KeyCode.Space))
        {
            ResetView();
        }

        // Sends virtual keyboards strokes to the TextMeshes for the IP address and the port.
        AbsorbInput();

        // If texture is not created, create and assign them to quads.
        if (textureGroup == null)
        {
            // Check whether the native plugin has Direct3D textures that
            // can be connected to Unity textures.
            if (Plugin.texture_group_get_y_texture_view().ToInt64() != 0)
            {
                // TextureGroup includes Y, U, V, and a depth texture.
                textureGroup = new TextureGroup(Plugin.texture_group_get_width(),
                                                Plugin.texture_group_get_height());

                azureKinectScreenMaterial.SetTexture("_YTex", textureGroup.YTexture);
                azureKinectScreenMaterial.SetTexture("_UTex", textureGroup.UTexture);
                azureKinectScreenMaterial.SetTexture("_VTex", textureGroup.VTexture);
                azureKinectScreenMaterial.SetTexture("_DepthTex", textureGroup.DepthTexture);

                print("textureGroup intialized");
            }
        }

        if (receiver == null)
        {
            return;
        }

        while (initPacketQueue.TryDequeue(out byte[] packet))
        {
            int cursor = 0;
            //int sessionId = BitConverter.ToInt32(packet, cursor);
            cursor += 4;

            //var packetType = packet[cursor];
            cursor += 1;

            var calibration = ManagerHelper.ReadAzureKinectCalibrationFromMessage(packet, cursor);

            Plugin.texture_group_set_width(calibration.DepthCamera.Width);
            Plugin.texture_group_set_height(calibration.DepthCamera.Height);
            PluginHelper.InitTextureGroup();

            colorDecoder = new Vp8Decoder();
            depthDecoder = new TrvlDecoder(calibration.DepthCamera.Width * calibration.DepthCamera.Height);

            azureKinectScreen.Setup(calibration);
        }

        while (frameMessageQueue.TryDequeue(out FrameMessage frameMessage))
        {
            frameMessages.Add(frameMessage);
        }

        frameMessages.Sort((x, y) => x.FrameId.CompareTo(y.FrameId));

        if (frameMessages.Count == 0)
        {
            return;
        }

        int?beginIndex = null;

        // If there is a key frame, use the most recent one.
        for (int i = frameMessages.Count - 1; i >= 0; --i)
        {
            if (frameMessages[i].Keyframe)
            {
                beginIndex = i;
                break;
            }
        }

        // When there is no key frame, go through all the frames if the first
        // FrameMessage is the one right after the previously rendered one.
        if (!beginIndex.HasValue)
        {
            if (frameMessages[0].FrameId == lastFrameId + 1)
            {
                beginIndex = 0;
            }
            else
            {
                // Wait for more frames if there is way to render without glitches.
                return;
            }
        }

        // ffmpegFrame and trvlFrame are guaranteed to be non-null
        // since the existence of beginIndex's value.
        FFmpegFrame ffmpegFrame = null;
        TrvlFrame   trvlFrame   = null;
        TimeSpan    packetCollectionTime;

        var decoderStopWatch = Stopwatch.StartNew();

        for (int i = beginIndex.Value; i < frameMessages.Count; ++i)
        {
            var frameMessage = frameMessages[i];
            lastFrameId = frameMessage.FrameId;

            packetCollectionTime = frameMessage.PacketCollectionTime;

            var colorEncoderFrame = frameMessage.GetColorEncoderFrame();
            var depthEncoderFrame = frameMessage.GetDepthEncoderFrame();

            IntPtr colorEncoderFrameBytes = Marshal.AllocHGlobal(colorEncoderFrame.Length);
            Marshal.Copy(colorEncoderFrame, 0, colorEncoderFrameBytes, colorEncoderFrame.Length);
            ffmpegFrame = colorDecoder.Decode(colorEncoderFrameBytes, colorEncoderFrame.Length);
            Marshal.FreeHGlobal(colorEncoderFrameBytes);


            IntPtr depthEncoderFrameBytes = Marshal.AllocHGlobal(depthEncoderFrame.Length);
            Marshal.Copy(depthEncoderFrame, 0, depthEncoderFrameBytes, depthEncoderFrame.Length);
            trvlFrame = depthDecoder.Decode(depthEncoderFrameBytes, frameMessage.Keyframe);
            Marshal.FreeHGlobal(depthEncoderFrameBytes);
        }
        decoderStopWatch.Stop();
        var decoderTime = decoderStopWatch.Elapsed;

        frameStopWatch.Stop();
        var frameTime = frameStopWatch.Elapsed;

        frameStopWatch = Stopwatch.StartNew();

        print($"id: {lastFrameId}, packet collection time: {packetCollectionTime.TotalMilliseconds}," +
              $"decoder time: {decoderTime.TotalMilliseconds}, frame time: {frameTime.TotalMilliseconds}");

        receiver.Send(lastFrameId, (float)packetCollectionTime.TotalMilliseconds, (float)decoderTime.TotalMilliseconds,
                      (float)frameTime.TotalMilliseconds, summaryPacketCount);
        summaryPacketCount = 0;

        // Invokes a function to be called in a render thread.
        if (textureGroup != null)
        {
            Plugin.texture_group_set_ffmpeg_frame(ffmpegFrame.Ptr);
            Plugin.texture_group_set_depth_pixels(trvlFrame.Ptr);
            PluginHelper.UpdateTextureGroup();
        }

        frameMessages = new List <FrameMessage>();
    }
    public void Ping(UdpSocket udpSocket)
    {
        int senderSessionId = -1;
        int pingCount       = 0;

        while (true)
        {
            bool initialized = false;
            udpSocket.Send(PacketHelper.createPingReceiverPacketBytes());
            ++pingCount;
            UnityEngine.Debug.Log("Sent ping");

            //Thread.Sleep(100);
            Thread.Sleep(300);

            SocketError error = SocketError.WouldBlock;
            while (true)
            {
                var packet = udpSocket.Receive(out error);
                if (packet == null)
                {
                    break;
                }

                int cursor    = 0;
                int sessionId = BitConverter.ToInt32(packet, cursor);
                cursor += 4;

                var packetType = (SenderPacketType)packet[cursor];
                cursor += 1;
                if (packetType != SenderPacketType.Init)
                {
                    UnityEngine.Debug.Log($"A different kind of a packet received before an init packet: {packetType}");
                    continue;
                }

                senderSessionId = sessionId;

                var initSenderPacketData = InitSenderPacketData.Parse(packet);

                textureGroup.SetWidth(initSenderPacketData.depthWidth);
                textureGroup.SetHeight(initSenderPacketData.depthHeight);
                PluginHelper.InitTextureGroup();

                colorDecoder = new Vp8Decoder();
                depthDecoder = new TrvlDecoder(initSenderPacketData.depthWidth * initSenderPacketData.depthHeight);

                azureKinectScreen.Setup(initSenderPacketData);

                initialized = true;
                break;
            }
            if (initialized)
            {
                break;
            }

            if (pingCount == 10)
            {
                UnityEngine.Debug.Log("Tried pinging 10 times and failed to received an init packet...\n");
                return;
            }
        }

        this.udpSocket = udpSocket;
        var videoPacketDataQueue = new ConcurrentQueue <VideoSenderPacketData>();
        var fecPacketDataQueue   = new ConcurrentQueue <FecSenderPacketData>();
        var audioPacketDataQueue = new ConcurrentQueue <AudioSenderPacketData>();

        var taskThread = new Thread(() =>
        {
            var receiveSenderPacketTask    = new ReceiveSenderPacketTask();
            var reassembleVideoMessageTask = new ReassembleVideoMessageTask();
            var consumeAudioPacketTask     = new ConsumeAudioPacketTask();

            while (!receiverStopped)
            {
                receiveSenderPacketTask.Run(this,
                                            senderSessionId,
                                            videoPacketDataQueue,
                                            fecPacketDataQueue,
                                            audioPacketDataQueue);
                reassembleVideoMessageTask.Run(this, videoPacketDataQueue, fecPacketDataQueue);
                consumeAudioPacketTask.Run(this, audioPacketDataQueue);
            }

            receiverStopped = true;
        });

        taskThread.Start();
    }