void Update() { // Space key resets the scene to be placed in front of the camera. if (Input.GetKeyDown(KeyCode.Space)) { ResetView(); } // Sends virtual keyboards strokes to the TextMeshes for the IP address and the port. AbsorbInput(); // If texture is not created, create and assign them to quads. if (textureGroup == null) { // Check whether the native plugin has Direct3D textures that // can be connected to Unity textures. if (Plugin.texture_group_get_y_texture_view().ToInt64() != 0) { // TextureGroup includes Y, U, V, and a depth texture. textureGroup = new TextureGroup(Plugin.texture_group_get_width(), Plugin.texture_group_get_height()); azureKinectScreenMaterial.SetTexture("_YTex", textureGroup.YTexture); azureKinectScreenMaterial.SetTexture("_UTex", textureGroup.UTexture); azureKinectScreenMaterial.SetTexture("_VTex", textureGroup.VTexture); azureKinectScreenMaterial.SetTexture("_DepthTex", textureGroup.DepthTexture); print("textureGroup intialized"); } } if (receiver == null) { return; } while (initPacketQueue.TryDequeue(out byte[] packet)) { int cursor = 0; //int sessionId = BitConverter.ToInt32(packet, cursor); cursor += 4; //var packetType = packet[cursor]; cursor += 1; var calibration = ManagerHelper.ReadAzureKinectCalibrationFromMessage(packet, cursor); Plugin.texture_group_set_width(calibration.DepthCamera.Width); Plugin.texture_group_set_height(calibration.DepthCamera.Height); PluginHelper.InitTextureGroup(); colorDecoder = new Vp8Decoder(); depthDecoder = new TrvlDecoder(calibration.DepthCamera.Width * calibration.DepthCamera.Height); azureKinectScreen.Setup(calibration); } while (frameMessageQueue.TryDequeue(out FrameMessage frameMessage)) { frameMessages.Add(frameMessage); } frameMessages.Sort((x, y) => x.FrameId.CompareTo(y.FrameId)); if (frameMessages.Count == 0) { return; } int?beginIndex = null; // If there is a key frame, use the most recent one. for (int i = frameMessages.Count - 1; i >= 0; --i) { if (frameMessages[i].Keyframe) { beginIndex = i; break; } } // When there is no key frame, go through all the frames if the first // FrameMessage is the one right after the previously rendered one. if (!beginIndex.HasValue) { if (frameMessages[0].FrameId == lastFrameId + 1) { beginIndex = 0; } else { // Wait for more frames if there is way to render without glitches. return; } } // ffmpegFrame and trvlFrame are guaranteed to be non-null // since the existence of beginIndex's value. FFmpegFrame ffmpegFrame = null; TrvlFrame trvlFrame = null; TimeSpan packetCollectionTime; var decoderStopWatch = Stopwatch.StartNew(); for (int i = beginIndex.Value; i < frameMessages.Count; ++i) { var frameMessage = frameMessages[i]; lastFrameId = frameMessage.FrameId; packetCollectionTime = frameMessage.PacketCollectionTime; var colorEncoderFrame = frameMessage.GetColorEncoderFrame(); var depthEncoderFrame = frameMessage.GetDepthEncoderFrame(); IntPtr colorEncoderFrameBytes = Marshal.AllocHGlobal(colorEncoderFrame.Length); Marshal.Copy(colorEncoderFrame, 0, colorEncoderFrameBytes, colorEncoderFrame.Length); ffmpegFrame = colorDecoder.Decode(colorEncoderFrameBytes, colorEncoderFrame.Length); Marshal.FreeHGlobal(colorEncoderFrameBytes); IntPtr depthEncoderFrameBytes = Marshal.AllocHGlobal(depthEncoderFrame.Length); Marshal.Copy(depthEncoderFrame, 0, depthEncoderFrameBytes, depthEncoderFrame.Length); trvlFrame = depthDecoder.Decode(depthEncoderFrameBytes, frameMessage.Keyframe); Marshal.FreeHGlobal(depthEncoderFrameBytes); } decoderStopWatch.Stop(); var decoderTime = decoderStopWatch.Elapsed; frameStopWatch.Stop(); var frameTime = frameStopWatch.Elapsed; frameStopWatch = Stopwatch.StartNew(); print($"id: {lastFrameId}, packet collection time: {packetCollectionTime.TotalMilliseconds}," + $"decoder time: {decoderTime.TotalMilliseconds}, frame time: {frameTime.TotalMilliseconds}"); receiver.Send(lastFrameId, (float)packetCollectionTime.TotalMilliseconds, (float)decoderTime.TotalMilliseconds, (float)frameTime.TotalMilliseconds, summaryPacketCount); summaryPacketCount = 0; // Invokes a function to be called in a render thread. if (textureGroup != null) { Plugin.texture_group_set_ffmpeg_frame(ffmpegFrame.Ptr); Plugin.texture_group_set_depth_pixels(trvlFrame.Ptr); PluginHelper.UpdateTextureGroup(); } frameMessages = new List <FrameMessage>(); }
public void Ping(UdpSocket udpSocket) { int senderSessionId = -1; int pingCount = 0; while (true) { bool initialized = false; udpSocket.Send(PacketHelper.createPingReceiverPacketBytes()); ++pingCount; UnityEngine.Debug.Log("Sent ping"); //Thread.Sleep(100); Thread.Sleep(300); SocketError error = SocketError.WouldBlock; while (true) { var packet = udpSocket.Receive(out error); if (packet == null) { break; } int cursor = 0; int sessionId = BitConverter.ToInt32(packet, cursor); cursor += 4; var packetType = (SenderPacketType)packet[cursor]; cursor += 1; if (packetType != SenderPacketType.Init) { UnityEngine.Debug.Log($"A different kind of a packet received before an init packet: {packetType}"); continue; } senderSessionId = sessionId; var initSenderPacketData = InitSenderPacketData.Parse(packet); textureGroup.SetWidth(initSenderPacketData.depthWidth); textureGroup.SetHeight(initSenderPacketData.depthHeight); PluginHelper.InitTextureGroup(); colorDecoder = new Vp8Decoder(); depthDecoder = new TrvlDecoder(initSenderPacketData.depthWidth * initSenderPacketData.depthHeight); azureKinectScreen.Setup(initSenderPacketData); initialized = true; break; } if (initialized) { break; } if (pingCount == 10) { UnityEngine.Debug.Log("Tried pinging 10 times and failed to received an init packet...\n"); return; } } this.udpSocket = udpSocket; var videoPacketDataQueue = new ConcurrentQueue <VideoSenderPacketData>(); var fecPacketDataQueue = new ConcurrentQueue <FecSenderPacketData>(); var audioPacketDataQueue = new ConcurrentQueue <AudioSenderPacketData>(); var taskThread = new Thread(() => { var receiveSenderPacketTask = new ReceiveSenderPacketTask(); var reassembleVideoMessageTask = new ReassembleVideoMessageTask(); var consumeAudioPacketTask = new ConsumeAudioPacketTask(); while (!receiverStopped) { receiveSenderPacketTask.Run(this, senderSessionId, videoPacketDataQueue, fecPacketDataQueue, audioPacketDataQueue); reassembleVideoMessageTask.Run(this, videoPacketDataQueue, fecPacketDataQueue); consumeAudioPacketTask.Run(this, audioPacketDataQueue); } receiverStopped = true; }); taskThread.Start(); }