static void CreateMaterial(TextureGroup group, string path, bool setSelected) { // Create the material and fill it with textures from the group. Material newMat; // Check if we need and emissive material. bool needsEmission = false; foreach (KeyValuePair <string, Texture2D> texture in group.textures) { if (texture.Key == "_EmissionMap") { needsEmission = true; } } if (!needsEmission) { newMat = new Material(Shader.Find("Standard")); } else { newMat = new Material(GetSettingsFile().emissiveMatTemplate); } newMat.name = group.Name; // Assign textures and create asset. foreach (KeyValuePair <string, Texture2D> texture in group.textures) { newMat.SetTexture(texture.Key, texture.Value); } AssetDatabase.CreateAsset(newMat, AssetDatabase.GenerateUniqueAssetPath(path)); if (setSelected) { Selection.activeObject = newMat; } }
/// <summary>Construct an instance.</summary> /// <param name="info">The asset info file to be read in or created. Holds path information.</param> /// <param name="relativeDirPath">The relative path to the assetinfo file.</param> /// <param name="direction">The direction to set the animation.</param> public AssetSheet(AssetInfo info, string relativeDirPath, Direction direction = Direction.down) { this.assetInfo = info; this.textures = new TextureGroup(info, relativeDirPath, direction); this.path = relativeDirPath; this.index = 0; }
public void TestTextureAdd() { var texture = new Texture(0, 0, TextureRotation.Any); var textureGroup = new TextureGroup(); textureGroup.Add(texture); Assert.IsTrue(textureGroup.Items.Contains(texture)); }
public void TestTextureAddedEvent() { var texture = new Texture(0, 0, TextureRotation.Any); var textureGroup = new TextureGroup(); var wasTextureAdded = false; textureGroup.ItemAdded += addedTexture => wasTextureAdded = addedTexture == texture; textureGroup.Add(texture); Assert.IsTrue(wasTextureAdded); }
private static List <TextureGroup> GetTextureGroups(ref List <Texture2D> texturesInFolder) { // Get groups from texturesInFolder, using textures name & path. List <TextureGroup> textureGroups = new List <TextureGroup>(); AutoMaterialSettings suffixesFile = GetSettingsFile(); foreach (Texture2D textureInFolder in texturesInFolder) { foreach (SuffixGroup knownSuffix in suffixesFile.suffixGroups) { // See if the texture has any known suffix. string resultSuffix = ""; foreach (string databaseSuffix in knownSuffix.textureSuffixes) { if (textureInFolder.name.HasSuffix(databaseSuffix)) { resultSuffix = databaseSuffix; } } // The texture has a known suffix, store it in a group. if (resultSuffix != "") { string textureName = textureInFolder.name.NameWithoutSuffix(resultSuffix); string textureFolderPath = Path.GetDirectoryName(AssetDatabase.GetAssetPath(textureInFolder)); bool wasStored = false; foreach (TextureGroup texGroup in textureGroups) { if (texGroup.Name == textureName && texGroup.Path == textureFolderPath) { if (!texGroup.textures.ContainsKey(knownSuffix.materialKeyword)) // (quick fix) In case there is two textures with the same name. { texGroup.textures.Add(knownSuffix.materialKeyword, textureInFolder); } wasStored = true; } } if (!wasStored) { TextureGroup newGroup = new TextureGroup(new Dictionary <string, Texture2D>() { { knownSuffix.materialKeyword, textureInFolder } }, textureName, textureFolderPath); textureGroups.Add(newGroup); } } } } return(textureGroups); }
public KinectReceiver(Material azureKinectScreenMaterial, AzureKinectScreen azureKinectScreen) { this.azureKinectScreenMaterial = azureKinectScreenMaterial; this.azureKinectScreen = azureKinectScreen; textureGroup = new TextureGroup(Plugin.texture_group_reset()); udpSocket = null; receiverStopped = false; videoMessageQueue = new ConcurrentQueue <Tuple <int, VideoSenderMessageData> >(); lastVideoFrameId = -1; colorDecoder = null; depthDecoder = null; preapared = false; ringBuffer = new RingBuffer((int)(KH_LATENCY_SECONDS * 2 * KH_BYTES_PER_SECOND / sizeof(float))); audioDecoder = new AudioDecoder(KH_SAMPLE_RATE, KH_CHANNEL_COUNT); lastAudioFrameId = -1; videoMessages = new Dictionary <int, VideoSenderMessageData>(); frameStopWatch = Stopwatch.StartNew(); }
public TextureGroupUpdater(Material azureKinectScreenMaterial, InitSenderPacketData initPacketData, int sessionId, IPEndPoint endPoint) { this.azureKinectScreenMaterial = azureKinectScreenMaterial; textureGroup = new TextureGroup(Plugin.texture_group_create()); UnityEngine.Debug.Log($"textureGroup id: {textureGroup.GetId()}"); lastVideoFrameId = -1; prepared = false; videoMessages = new Dictionary <int, VideoSenderMessageData>(); frameStopWatch = Stopwatch.StartNew(); textureGroup.SetWidth(initPacketData.depthWidth); textureGroup.SetHeight(initPacketData.depthHeight); PluginHelper.InitTextureGroup(textureGroup.GetId()); colorDecoder = new Vp8Decoder(); depthDecoder = new TrvlDecoder(initPacketData.depthWidth * initPacketData.depthHeight); this.sessionId = sessionId; this.endPoint = endPoint; }
static void CompleteMaterial(Material mat, TextureGroup group, bool setSelected) { // If we have an emission texture, create a new material. foreach (KeyValuePair <string, Texture2D> texture in group.textures) { if (texture.Key == "_EmissionMap" && mat.GetTexture(texture.Key) == null) { Log(mat.name + " was overriden to set an emission texture."); string pathToOverride = group.Path + "\\" + mat.name + ".mat"; AssetDatabase.DeleteAsset(pathToOverride); CreateMaterial(group, pathToOverride, setSelected); return; } } // Assign textures where the material has none. foreach (KeyValuePair <string, Texture2D> texture in group.textures) { if (mat.GetTexture(texture.Key) == null) { mat.SetTexture(texture.Key, texture.Value); } } }
void Update() { // Space key resets the scene to be placed in front of the camera. if (Input.GetKeyDown(KeyCode.Space)) { ResetView(); } // Sends virtual keyboards strokes to the TextMeshes for the IP address and the port. AbsorbInput(); // If texture is not created, create and assign them to quads. if (textureGroup == null) { // Check whether the native plugin has Direct3D textures that // can be connected to Unity textures. if (Plugin.texture_group_get_y_texture_view().ToInt64() != 0) { // TextureGroup includes Y, U, V, and a depth texture. textureGroup = new TextureGroup(Plugin.texture_group_get_width(), Plugin.texture_group_get_height()); azureKinectScreenMaterial.SetTexture("_YTex", textureGroup.YTexture); azureKinectScreenMaterial.SetTexture("_UTex", textureGroup.UTexture); azureKinectScreenMaterial.SetTexture("_VTex", textureGroup.VTexture); azureKinectScreenMaterial.SetTexture("_DepthTex", textureGroup.DepthTexture); print("textureGroup intialized"); } } if (receiver == null) { return; } while (initPacketQueue.TryDequeue(out byte[] packet)) { int cursor = 0; //int sessionId = BitConverter.ToInt32(packet, cursor); cursor += 4; //var packetType = packet[cursor]; cursor += 1; var calibration = ManagerHelper.ReadAzureKinectCalibrationFromMessage(packet, cursor); Plugin.texture_group_set_width(calibration.DepthCamera.Width); Plugin.texture_group_set_height(calibration.DepthCamera.Height); PluginHelper.InitTextureGroup(); colorDecoder = new Vp8Decoder(); depthDecoder = new TrvlDecoder(calibration.DepthCamera.Width * calibration.DepthCamera.Height); azureKinectScreen.Setup(calibration); } while (frameMessageQueue.TryDequeue(out FrameMessage frameMessage)) { frameMessages.Add(frameMessage); } frameMessages.Sort((x, y) => x.FrameId.CompareTo(y.FrameId)); if (frameMessages.Count == 0) { return; } int?beginIndex = null; // If there is a key frame, use the most recent one. for (int i = frameMessages.Count - 1; i >= 0; --i) { if (frameMessages[i].Keyframe) { beginIndex = i; break; } } // When there is no key frame, go through all the frames if the first // FrameMessage is the one right after the previously rendered one. if (!beginIndex.HasValue) { if (frameMessages[0].FrameId == lastFrameId + 1) { beginIndex = 0; } else { // Wait for more frames if there is way to render without glitches. return; } } // ffmpegFrame and trvlFrame are guaranteed to be non-null // since the existence of beginIndex's value. FFmpegFrame ffmpegFrame = null; TrvlFrame trvlFrame = null; TimeSpan packetCollectionTime; var decoderStopWatch = Stopwatch.StartNew(); for (int i = beginIndex.Value; i < frameMessages.Count; ++i) { var frameMessage = frameMessages[i]; lastFrameId = frameMessage.FrameId; packetCollectionTime = frameMessage.PacketCollectionTime; var colorEncoderFrame = frameMessage.GetColorEncoderFrame(); var depthEncoderFrame = frameMessage.GetDepthEncoderFrame(); IntPtr colorEncoderFrameBytes = Marshal.AllocHGlobal(colorEncoderFrame.Length); Marshal.Copy(colorEncoderFrame, 0, colorEncoderFrameBytes, colorEncoderFrame.Length); ffmpegFrame = colorDecoder.Decode(colorEncoderFrameBytes, colorEncoderFrame.Length); Marshal.FreeHGlobal(colorEncoderFrameBytes); IntPtr depthEncoderFrameBytes = Marshal.AllocHGlobal(depthEncoderFrame.Length); Marshal.Copy(depthEncoderFrame, 0, depthEncoderFrameBytes, depthEncoderFrame.Length); trvlFrame = depthDecoder.Decode(depthEncoderFrameBytes, frameMessage.Keyframe); Marshal.FreeHGlobal(depthEncoderFrameBytes); } decoderStopWatch.Stop(); var decoderTime = decoderStopWatch.Elapsed; frameStopWatch.Stop(); var frameTime = frameStopWatch.Elapsed; frameStopWatch = Stopwatch.StartNew(); print($"id: {lastFrameId}, packet collection time: {packetCollectionTime.TotalMilliseconds}," + $"decoder time: {decoderTime.TotalMilliseconds}, frame time: {frameTime.TotalMilliseconds}"); receiver.Send(lastFrameId, (float)packetCollectionTime.TotalMilliseconds, (float)decoderTime.TotalMilliseconds, (float)frameTime.TotalMilliseconds, summaryPacketCount); summaryPacketCount = 0; // Invokes a function to be called in a render thread. if (textureGroup != null) { Plugin.texture_group_set_ffmpeg_frame(ffmpegFrame.Ptr); Plugin.texture_group_set_depth_pixels(trvlFrame.Ptr); PluginHelper.UpdateTextureGroup(); } frameMessages = new List <FrameMessage>(); }
void Update() { // Space key resets the scene to be placed in front of the camera. if (Input.GetKeyDown(KeyCode.Space)) { ResetView(); } // Sends virtual keyboards strokes to the TextMeshes for the IP address and the port. AbsorbInput(); // If texture is not created, create and assign them to quads. if (!textureCreated) { // Check whether the native plugin has Direct3D textures that // can be connected to Unity textures. if (Plugin.texture_group_get_y_texture_view().ToInt64() == 0) { return; } // TextureGroup includes Y, U, V, and a depth texture. var textureGroup = new TextureGroup(); screenMaterial.SetTexture("_YTex", textureGroup.YTexture); screenMaterial.SetTexture("_UTex", textureGroup.UTexture); screenMaterial.SetTexture("_VTex", textureGroup.VTexture); screenMaterial.SetTexture("_DepthTex", textureGroup.DepthTexture); textureCreated = true; } // Do not continue if there is no Receiever connected to a Sender. if (receiver == null) { return; } // Try receiving a message. byte[] message; try { message = receiver.Receive(); } catch (Exception e) { Debug.Log(e.Message); receiver = null; return; } // Continue only if there is a message. if (message == null) { return; } // Prepare the ScreenRenderer with the received KinectIntrinsics. if (message[0] == 0) { var kinectScreen = CreateKinectScreenFromIntrinsicsMessage(message); screenRenderer.SetKinectScreen(kinectScreen); } // When a Kinect frame got received. else if (message[0] == 1) { int cursor = 1; int frameId = BitConverter.ToInt32(message, cursor); cursor += 4; // Notice the Sender that the frame was received through the Receiver. receiver.Send(frameId); int vp8FrameSize = BitConverter.ToInt32(message, cursor); cursor += 4; // Marshal.AllocHGlobal, Marshal.Copy, and Marshal.FreeHGlobal are like // malloc, memcpy, and free of C. // This is required since vp8FrameBytes gets sent to a Vp8Decoder // inside the native plugin. IntPtr vp8FrameBytes = Marshal.AllocHGlobal(vp8FrameSize); Marshal.Copy(message, cursor, vp8FrameBytes, vp8FrameSize); var ffmpegFrame = decoder.Decode(vp8FrameBytes, vp8FrameSize); Plugin.texture_group_set_ffmpeg_frame(ffmpegFrame.Ptr); Marshal.FreeHGlobal(vp8FrameBytes); cursor += vp8FrameSize; int rvlFrameSize = BitConverter.ToInt32(message, cursor); cursor += 4; // Marshal.AllocHGlobal, Marshal.Copy, and Marshal.FreeHGlobal are like // malloc, memcpy, and free of C. // This is required since rvlFrameBytes gets sent to the native plugin. IntPtr rvlFrameBytes = Marshal.AllocHGlobal(rvlFrameSize); Marshal.Copy(message, cursor, rvlFrameBytes, rvlFrameSize); Plugin.texture_group_set_rvl_frame(rvlFrameBytes, rvlFrameSize); Marshal.FreeHGlobal(rvlFrameBytes); if (frameId % 100 == 0) { string logString = string.Format("Received frame {0} (vp8FrameSize: {1}, rvlFrameSize: {2}).", frameId, vp8FrameSize, rvlFrameSize); Debug.Log(logString); statusText.text = logString; } // Invokes a function to be called in a render thread. PluginHelper.UpdateTextureGroup(); } }
public void Delete(TextureGroup textureGroup) { foreach(TextureGroup tg in Textures) { if (tg == textureGroup) { Textures.Remove(tg); break; } } AssignNumbers(); }
public bool Validate() { TextureGroup current = new TextureGroup();; foreach(TextureGroup tg in Textures) { if (tg.Index == 0) { current = tg; continue; } if (!tg.TwoTextures && !current.TwoTextures && tg.Texture1 == current.Texture1) { ErrorMessage = current.Index + " " + tg.Index + " have same textures"; return false; } if (tg.TwoTextures && current.TwoTextures && tg.Texture1 == current.Texture1 && tg.Texture2 == current.Texture2) { ErrorMessage = current.Index + " " + tg.Index + " have same textures"; return false; } if (current.EndPoint > tg.StartPoint) { ErrorMessage = current.Index + " " + tg.Index + " have incorret intervals"; } current = tg; } return true; }
void Update() { // If texture is not created, create and assign them to quads. if (!textureCreated) { // Check whether the native plugin has Direct3D textures that // can be connected to Unity textures. if (Plugin.texture_group_get_y_texture_view().ToInt64() == 0) { return; } // TextureGroup includes Y, U, V, and a depth texture. var textureGroup = new TextureGroup(); yQuad.material.mainTexture = textureGroup.YTexture; uQuad.material.mainTexture = textureGroup.UTexture; vQuad.material.mainTexture = textureGroup.VTexture; colorQuad.material.SetTexture("_YTex", textureGroup.YTexture); colorQuad.material.SetTexture("_UTex", textureGroup.UTexture); colorQuad.material.SetTexture("_VTex", textureGroup.VTexture); depthQuad.material.mainTexture = textureGroup.DepthTexture; textureCreated = true; } // Do not continue if there is no Receiever connected to a Sender. if (receiver == null) { return; } // Try receiving a message. byte[] message; try { message = receiver.Receive(); } catch (Exception e) { Debug.Log(e.Message); receiver = null; return; } // Continue only if there is a message. if (message == null) { return; } // ReceiverDemo renders in 2D, therefore, no need to use intrinsics. if (message[0] == 0) { Debug.Log("Received intrinsics."); } // When a Kinect frame got received. else if (message[0] == 1) { int cursor = 1; int frameId = BitConverter.ToInt32(message, cursor); cursor += 4; // Notice the Sender that the frame was received through the Receiver. receiver.Send(frameId); int vp8FrameSize = BitConverter.ToInt32(message, cursor); cursor += 4; // Marshal.AllocHGlobal, Marshal.Copy, and Marshal.FreeHGlobal are like // malloc, memcpy, and free of C. // This is required since vp8FrameBytes gets sent to a Vp8Decoder // inside the native plugin. IntPtr vp8FrameBytes = Marshal.AllocHGlobal(vp8FrameSize); Marshal.Copy(message, cursor, vp8FrameBytes, vp8FrameSize); var ffmpegFrame = decoder.Decode(vp8FrameBytes, vp8FrameSize); Plugin.texture_group_set_ffmpeg_frame(ffmpegFrame.Ptr); Marshal.FreeHGlobal(vp8FrameBytes); cursor += vp8FrameSize; int rvlFrameSize = BitConverter.ToInt32(message, cursor); cursor += 4; // Marshal.AllocHGlobal, Marshal.Copy, and Marshal.FreeHGlobal are like // malloc, memcpy, and free of C. // This is required since rvlFrameBytes gets sent to the native plugin. IntPtr rvlFrameBytes = Marshal.AllocHGlobal(rvlFrameSize); Marshal.Copy(message, cursor, rvlFrameBytes, rvlFrameSize); Plugin.texture_group_set_rvl_frame(rvlFrameBytes, rvlFrameSize); Marshal.FreeHGlobal(rvlFrameBytes); if (frameId % 100 == 0) { Debug.LogFormat("Received frame {0} (vp8FrameSize: {1}, rvlFrameSize: {2}).", frameId, vp8FrameSize, rvlFrameSize); } // Invokes a function to be called in a render thread. PluginHelper.UpdateTextureGroup(); } }