public IEnumerator GLTFScenarios([ValueSource("ModelFilePaths")] string modelPath) { // Update the camera position Manifest.Model modelManifest = modelManifests[Path.GetFileNameWithoutExtension(modelPath)]; Manifest.Camera cam = modelManifest.Camera; Camera.main.transform.position = new Vector3(cam.Translation[0], cam.Translation[1], cam.Translation[2]); // Load the corresponding model gltfComponent.GLTFUri = GLTF_ASSETS_PATH + modelPath; yield return(gltfComponent.Load().AsCoroutine()); // Wait one frame for rendering to complete yield return(null); // Capture a render of the model Camera mainCamera = Camera.main; RenderTexture rt = new RenderTexture(IMAGE_SIZE, IMAGE_SIZE, 24); mainCamera.targetTexture = rt; Texture2D actualContents = new Texture2D(IMAGE_SIZE, IMAGE_SIZE, TextureFormat.RGB24, false); mainCamera.Render(); RenderTexture.active = rt; actualContents.ReadPixels(new Rect(0, 0, IMAGE_SIZE, IMAGE_SIZE), 0, 0); Color[] actualPixels = actualContents.GetPixels(); // Save the captured contents to a file byte[] pngActualfile = actualContents.EncodeToPNG(); string outputpath = Path.GetDirectoryName(modelPath); string outputfullpath = GLTF_SCENARIO_OUTPUT_PATH + outputpath; Directory.CreateDirectory(outputfullpath); string filename = Path.GetFileNameWithoutExtension(modelPath); string actualFilePath = outputfullpath + "/" + filename + "_ACTUAL.png"; File.WriteAllBytes(actualFilePath, pngActualfile); // Read the expected image // NOTE: Ideally this would use the expected image from Path.Combine(GLTF_ASSETS_PATH, modelManifest.SampleImageName), but the // current rendered image is not close enough to use this as a source of truth, so until they can be closer aligned we instead // generate an 'expected' image ourselves. string expectedFilePath = Path.Combine(outputfullpath, filename + "_EXPECTED.png"); #if ENABLE_THIS_BLOCK_TO_CREATE_EXPECTED_FILES File.WriteAllBytes(expectedFilePath, pngActualfile); #endif if (!File.Exists(expectedFilePath)) { Assert.Fail("Could not find expected image to compare against: '" + expectedFilePath + "'"); } byte[] expectedFileContents = File.ReadAllBytes(expectedFilePath); Texture2D expectedContents = new Texture2D(IMAGE_SIZE, IMAGE_SIZE, TextureFormat.RGB24, false); expectedContents.LoadImage(expectedFileContents); Color[] expectedPixels = expectedContents.GetPixels(); // Compare the capture against the expected image Assert.AreEqual(expectedPixels.Length, actualPixels.Length); string errormessage = "\r\nImage does not match expected within configured tolerance.\r\nExpectedPath: " + expectedFilePath + "\r\n ActualPath: " + actualFilePath; for (int i = 0; i < expectedPixels.Length; i++) { Assert.That(actualPixels[i], Is.EqualTo(expectedPixels[i]).Using(ColorEqualityComparer)); } }
public Animation_SkinType(List <string> imageList) { // There are no common properties in this model group that are reported in the readme. Model CreateModel(Action <List <Property>, Runtime.MeshPrimitive> setProperties) { var properties = new List <Property>(); List <Node> nodes = Nodes.CreateFoldingPlaneSkin("skinA", 2, 3); var animations = new List <Animation>(); Runtime.MeshPrimitive meshPrimitive = nodes[0].Mesh.MeshPrimitives.First(); var closeCameraTranslation = new Manifest.Camera(new Vector3(0.5f, 0.0f, 0.6f)); // Apply the common properties to the gltf. AnimateWithRotation(animations, nodes); // Apply the properties that are specific to this gltf. setProperties(properties, meshPrimitive); // Create the gltf object. return(new Model { Properties = properties, GLTF = CreateGLTF(() => new Scene { Nodes = nodes }, animations: animations), Animated = true, Camera = closeCameraTranslation, }); } void AnimateWithRotation(List <Animation> animations, List <Node> nodes) { animations.Add( new Animation { Channels = new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[1].Children.First(), Path = AnimationChannelTargetPath.Rotation, } } } } ); animations[0].Channels.First().Sampler = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(new[] { 0.0f, 1.0f, 2.0f, }), Output = Data.Create(new[] { Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), Quaternion.Identity, }), }; } void JointsAreByte(Runtime.MeshPrimitive meshPrimitive) { meshPrimitive.Joints.OutputType = DataType.UnsignedByte; } void JointsAreShort(Runtime.MeshPrimitive meshPrimitive) { meshPrimitive.Joints.OutputType = DataType.UnsignedShort; } void WeightsAreFloat(Runtime.MeshPrimitive meshPrimitive) { meshPrimitive.Weights.OutputType = DataType.Float; } void WeightsAreByte(Runtime.MeshPrimitive meshPrimitive) { meshPrimitive.Weights.OutputType = DataType.NormalizedUnsignedByte; } void WeightsAreShort(Runtime.MeshPrimitive meshPrimitive) { meshPrimitive.Weights.OutputType = DataType.NormalizedUnsignedShort; } Models = new List <Model> { CreateModel((properties, meshPrimitive) => { JointsAreByte(meshPrimitive); WeightsAreFloat(meshPrimitive); properties.Add(new Property(PropertyName.JointsComponentType, "Byte")); properties.Add(new Property(PropertyName.WeightComponentType, "Float")); }), CreateModel((properties, meshPrimitive) => { JointsAreByte(meshPrimitive); WeightsAreByte(meshPrimitive); properties.Add(new Property(PropertyName.JointsComponentType, "Byte")); properties.Add(new Property(PropertyName.WeightComponentType, "Byte")); }), CreateModel((properties, meshPrimitive) => { JointsAreByte(meshPrimitive); WeightsAreShort(meshPrimitive); properties.Add(new Property(PropertyName.JointsComponentType, "Byte")); properties.Add(new Property(PropertyName.WeightComponentType, "Short")); }), CreateModel((properties, meshPrimitive) => { JointsAreShort(meshPrimitive); WeightsAreFloat(meshPrimitive); properties.Add(new Property(PropertyName.JointsComponentType, "Short")); properties.Add(new Property(PropertyName.WeightComponentType, "Float")); }), }; GenerateUsedPropertiesList(); }
public Instancing(List <string> imageList) { var baseColorImageA = UseTexture(imageList, "BaseColor_A"); var baseColorImageB = UseTexture(imageList, "BaseColor_B"); var baseColorImageCube = UseTexture(imageList, "BaseColor_Cube"); var distantCamera = new Manifest.Camera(new Vector3(0.0f, 0.0f, 2.7f)); // There are no common properties in this model group that are reported in the readme. TextureInfo CreateTextureInfo(Image source) { return(new TextureInfo { Texture = new Texture { Source = source, } }); } Model CreateModel(Action <List <Property>, List <Node>, List <Animation> > setProperties, Action <Model> setCamera) { var properties = new List <Property>(); var animations = new List <Animation>(); var animated = true; var nodes = new List <Node>(); // Apply the properties that are specific to this gltf. setProperties(properties, nodes, animations); // If no animations are used, null out that property. if (!animations.Any()) { animations = null; animated = false; } // Create the gltf object. var model = new Model { Properties = properties, GLTF = CreateGLTF(() => new Scene { Nodes = nodes }, animations: animations), Animated = animated, }; setCamera(model); return(model); } var samplerInputLinear = Data.Create(new[] { 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, }); var samplerInputCurve = Data.Create(new[] { 0.0f, 0.5f, 1.0f, 2.0f, 4.0f, }); var samplerOutput = Data.Create(new[] { Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), }); var samplerOutputReverse = Data.Create(new[] { Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), }); Runtime.Material CreateMaterial(TextureInfo textureInfo) { return(new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = textureInfo } }); } void AddMeshPrimitivesToSingleNode(List <Node> nodes, List <Runtime.MeshPrimitive> meshPrimitives) { // If there are multiple mesh primitives, offset their position so they don't overlap. if (meshPrimitives.Count > 1) { meshPrimitives[0].Positions.Values = meshPrimitives[0].Positions.Values.Select(position => { return(new Vector3(position.X - 0.6f, position.Y, position.Z)); }); meshPrimitives[1].Positions.Values = meshPrimitives[1].Positions.Values.Select(position => { return(new Vector3(position.X + 0.6f, position.Y, position.Z)); }); } nodes.Add( new Node { Mesh = new Runtime.Mesh { MeshPrimitives = meshPrimitives } } ); } void AddMeshPrimitivesToMultipleNodes(List <Node> nodes, Runtime.MeshPrimitive meshPrimitives0, Runtime.MeshPrimitive meshPrimitives1) { nodes.AddRange(new[] { new Node { Translation = new Vector3(-0.6f, 0.0f, 0.0f), Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitives0 } } }, new Node { Translation = new Vector3(0.6f, 0.0f, 0.0f), Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitives1 } } } } ); } void AddAnimation(List <Animation> animations, List <Node> nodes, AnimationSampler sampler0, AnimationSampler sampler1, bool samplerInstanced) { animations.Add(new Animation { Channels = new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[0], Path = AnimationChannelTargetPath.Rotation, }, Sampler = sampler0 }, new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[1], Path = AnimationChannelTargetPath.Rotation, }, Sampler = sampler1 }, } }); } Models = new List <Model> { CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { // This non-standard set of texture coordinates is larger than the texture but not an exact multiple, so it allows texture sampler settings to be visible. meshPrimitive.TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(1.3f, 1.3f), new Vector2(-0.3f, 1.3f), new Vector2(-0.3f, -0.3f), new Vector2(1.3f, -0.3f), } ); } meshPrimitives[0].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[1].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[0].Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = new Sampler { WrapT = SamplerWrap.ClampToEdge, WrapS = SamplerWrap.ClampToEdge }; meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = new Sampler { WrapT = SamplerWrap.MirroredRepeat, WrapS = SamplerWrap.MirroredRepeat }; AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same image as their source.")); properties.Add(new Property(PropertyName.Difference, "The texture sampler `WrapT` and `WrapS` are set to `CLAMP_TO_EDGE` for one and `MIRRORED_REPEAT` for the other.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; meshPrimitives[0].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[1].Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); var sampler = new Sampler { WrapT = SamplerWrap.ClampToEdge, WrapS = SamplerWrap.ClampToEdge }; foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = sampler; // This non-standard set of texture coordinates is larger than the texture but not an exact multiple, so it allows texture sampler settings to be visible. meshPrimitive.TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(1.3f, 1.3f), new Vector2(-0.3f, 1.3f), new Vector2(-0.3f, -0.3f), new Vector2(1.3f, -0.3f), } ); } AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same sampler.")); properties.Add(new Property(PropertyName.Difference, "One texture uses image A while the other uses image B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(), MeshPrimitive.CreateSinglePlane() }; var texture = CreateTextureInfo(baseColorImageA); foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material = CreateMaterial(texture); } meshPrimitives[0].Material.PbrMetallicRoughness.BaseColorTexture = meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorTexture; meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorFactor = new Vector4(0.5f, 0.5f, 1.0f, 1.0f); AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same source image.")); properties.Add(new Property(PropertyName.Difference, "One material does not have a baseColorFactor and the other has a blue baseColorFactor.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; var material = CreateMaterial(CreateTextureInfo(baseColorImageA)); foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material = material; } // One of the primitives has a 'zoomed in' texture coordinate set. meshPrimitives[1].TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(0.9f, 0.9f), new Vector2(0.1f, 0.9f), new Vector2(0.1f, 0.1f), new Vector2(0.9f, 0.1f), } ); AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two primitives using the same material.")); properties.Add(new Property(PropertyName.Difference, "One primitive has texture coordinates that displays all of texture A, while the other primitive has textures coordinates that don't display the border.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(); meshPrimitive0.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitive1.Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); meshPrimitive0.Positions = meshPrimitive1.Positions; AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); properties.Add(new Property(PropertyName.Description, "Two primitives using the same accessors for the `POSITION` attribute.")); properties.Add(new Property(PropertyName.Difference, "One primitive uses texture A while the other primitive uses texture B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(includeIndices: false); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(); meshPrimitive0.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitive1.Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); meshPrimitive0.Indices = meshPrimitive1.Indices; AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); properties.Add(new Property(PropertyName.Description, "Two primitives using the same accessors for indices.")); properties.Add(new Property(PropertyName.Difference, "One primitive uses texture A while the other primitive uses texture B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive = MeshPrimitive.CreateSinglePlane(); meshPrimitive.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive, meshPrimitive); nodes[1].Mesh = nodes[0].Mesh; properties.Add(new Property(PropertyName.Description, "Two nodes using the same mesh.")); properties.Add(new Property(PropertyName.Difference, "The two nodes have different translations.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); // Adds just the node containing the mesh, dropping the data for a second set of joints. nodes.Add(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)[0]); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[2].Skin = nodes[0].Skin; // Offsets the position of both meshes so they don't overlap. nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => { return(new Vector3(position.X - 0.3f, position.Y, position.Z)); }); nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => { return(new Vector3(position.X + 0.3f, position.Y, position.Z)); }); properties.Add(new Property(PropertyName.Description, "Two nodes using the same skin.")); properties.Add(new Property(PropertyName.Difference, "The two mesh primitives have different `POSITION` values.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); // Adds just the node containing the mesh, dropping the data for a second set of joints. nodes.Add(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)[0]); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[2].Skin.Joints = nodes[0].Skin.Joints; // Creates new inverseBindMatrices for the second skin, rotating the flap further than the default value would. nodes[2].Skin.InverseBindMatrices = Data.Create(new[] { nodes[2].Skin.InverseBindMatrices.Values.First(), Matrix4x4.Multiply(nodes[2].Skin.InverseBindMatrices.Values.ElementAt(1), Matrix4x4.CreateRotationX(FloatMath.ToRadians(-30))), }); // Offsets the position of both meshes so they don't overlap. nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => new Vector3(position.X - 0.3f, position.Y, position.Z)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => new Vector3(position.X + 0.3f, position.Y, position.Z)); properties.Add(new Property(PropertyName.Description, "Two skins using the same joints.")); properties.Add(new Property(PropertyName.Difference, "The skin with texture B has inverseBindMatrices that fold twice as far as the skin with texture A.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[1].Translation = Vector3.Add((Vector3)nodes[1].Translation, new Vector3(-0.3f, 0.0f, 0.0f)); nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[3].Translation = Vector3.Add((Vector3)nodes[3].Translation, new Vector3(0.3f, 0.0f, 0.0f)); nodes[2].Skin.InverseBindMatrices = nodes[0].Skin.InverseBindMatrices; properties.Add(new Property(PropertyName.Description, "Two skins using the same inverseBindMatrices.")); properties.Add(new Property(PropertyName.Difference, "The base joint for the two skins have different translations.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; AddAnimation(animations, nodes, sampler, sampler, true); properties.Add(new Property(PropertyName.Description, "Two animation channels using the same sampler.")); properties.Add(new Property(PropertyName.Difference, "The two animation channels target different nodes.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutputReverse, }; AddAnimation(animations, nodes, sampler0, sampler1, false); properties.Add(new Property(PropertyName.Description, "Two animation samplers using the same input accessors.")); properties.Add(new Property(PropertyName.Difference, "The two animation samplers have different output values.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputCurve, Output = samplerOutput, }; AddAnimation(animations, nodes, sampler0, sampler1, false); properties.Add(new Property(PropertyName.Description, "Two animation samplers using the same output accessors.")); properties.Add(new Property(PropertyName.Difference, "The two animation samplers have different input values.")); }, (model) => { model.Camera = distantCamera; }), // To be implemented later. Needs to work as a type of interleaving. //CreateModel((properties, nodes, animations) => //{ // var meshPrimitives = new List<Runtime.MeshPrimitive> // { // MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), // MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) // }; // meshPrimitives[0].TexCoords0 = meshPrimitives[1].TexCoords0 = MeshPrimitive.GetSinglePlaneTextureCoordSets(); // meshPrimitives[0].Normals = meshPrimitives[1].Normals = MeshPrimitive.GetSinglePlaneNormals(); // foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) // { // meshPrimitive.BufferViewsInstanced = true; // meshPrimitive.Material = CreateMaterial(); // } // AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); // properties.Add(new Property(PropertyName.Description, "Two accessors using the same buffer view.")); //}, (model) => { model.Camera = null; }), }; GenerateUsedPropertiesList(); }
public Animation_Skin(List <string> imageList) { UseFigure(imageList, "skinA"); UseFigure(imageList, "skinB"); UseFigure(imageList, "skinC"); UseFigure(imageList, "skinD"); UseFigure(imageList, "skinE"); UseFigure(imageList, "skinF"); var closeCamera = new Manifest.Camera(new Vector3(0.5f, 0.0f, 0.6f)); var distantCamera = new Manifest.Camera(new Vector3(1.5f, 0.0f, 1.0f)); var skinBCamera = new Manifest.Camera(new Vector3(0.5f, 0.6f, 1.1f)); // There are no common properties in this model group that are reported in the readme. Model CreateModel(Action <List <Property>, List <Animation>, List <Node> > setProperties, Action <Model> setCamera, Action <glTFLoader.Schema.Gltf> postRuntimeChanges = null) { var properties = new List <Property>(); var nodes = new List <Node>(); var animations = new List <Animation>(); var animated = true; // There are no common properties in this model group. // Apply the properties that are specific to this gltf. setProperties(properties, animations, nodes); // If no animations are used, null out that property. if (!animations.Any()) { animations = null; animated = false; } // Create the gltf object. var model = new Model { Properties = properties, GLTF = CreateGLTF(() => new Scene { Nodes = nodes }, animations: animations), Animated = animated, }; if (postRuntimeChanges != null) { model.PostRuntimeChanges = postRuntimeChanges; } setCamera(model); return(model); } void AddRotationAnimationChannel(List <AnimationChannel> channelList, Node targetNode, Quaternion pitchValue, Quaternion restValue) { channelList.Add( new AnimationChannel { Target = new AnimationChannelTarget { Node = targetNode, Path = AnimationChannelTargetPath.Rotation, }, Sampler = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(new[] { 0.0f, 1.0f, 2.0f, }), Output = Data.Create(new[] { restValue, pitchValue, restValue, }), }, }); } Animation CreateFoldingAnimation(Node jointRootNode, List <AnimationChannel> channelList = null) { if (channelList == null) { channelList = new List <AnimationChannel>(); } Node nodeCheck = jointRootNode; float pitchValue = FloatMath.ToRadians(-90.0f); var nodeList = new List <Node> { jointRootNode, }; while (nodeCheck.Children != null) { foreach (var node in nodeCheck.Children) { nodeList.Add(node); } nodeCheck = nodeCheck.Children.First(); } for (var nodeIndex = 1; nodeIndex < nodeList.Count(); nodeIndex++) { float rotateValueModifier = 1.0f; if (nodeIndex == 1) { rotateValueModifier = 0.5f; } else if (nodeIndex % 2 == 0) { rotateValueModifier = -1.0f; } AddRotationAnimationChannel(channelList, nodeList[nodeIndex], Quaternion.CreateFromYawPitchRoll(0.0f, pitchValue * rotateValueModifier, 0.0f), Quaternion.Identity); } return(new Animation { Channels = channelList }); } Models = new List <Model> { CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } properties.Add(new Property(PropertyName.Description, "`skinA`.")); }, (model) => { model.Camera = closeCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } animations.Add(CreateFoldingAnimation(nodes[1])); properties.Add(new Property(PropertyName.Description, "`skinA` where `joint1` is animating with a rotation.")); }, (model) => { model.Camera = closeCamera; }), CreateModel((properties, animations, nodes) => { var tempNodeList = Nodes.CreateFoldingPlaneSkin("skinA", 2, 3); // Give the skin node a rotation tempNodeList[0].Rotation = Quaternion.CreateFromYawPitchRoll((FloatMath.Pi / 4.0f), 0.0f, 0.0f); // Create a new parent node and give it a rotation tempNodeList[0] = new Node { Name = "jointParent", Rotation = Quaternion.CreateFromYawPitchRoll((FloatMath.Pi / 4.0f), 0.0f, 0.0f), Children = new List <Node> { tempNodeList[0] } }; foreach (Node node in tempNodeList) { nodes.Add(node); } properties.Add(new Property(PropertyName.Description, "`skinA` where the skinned node has a transform and a parent node with a transform. Both transforms should be ignored.")); }, (model) => { model.Camera = closeCamera; }), // Removed Animation_Skin_03 due to a change in the spec that disallows this situation. // Left commented out because this will likely be re-added as a negative test in the future. // CreateModel((properties, animations, nodes) => //{ // foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) // { // nodes.Add(node); // } // properties.Add(new Property(PropertyName.Description, "`skinA`. The skin joints are not referenced by the scene nodes.")); // }, (model) => { model.Camera = closeCamera; }, (gltf) => {gltf.Scenes.First().Nodes = new []{0,};}), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } nodes[0].Skin.InverseBindMatrices = null; properties.Add(new Property(PropertyName.Description, "`skinA` without inverse bind matrices.")); }, (model) => { model.Camera = closeCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } animations.Add(CreateFoldingAnimation(nodes[1])); // Attach a node with a mesh to the end of the joint hierarchy Node nodeCheck = nodes[1]; while (nodeCheck.Children != null) { nodeCheck = nodeCheck.Children.First(); } nodeCheck.Children = new List <Node> { new Node { Mesh = Mesh.CreateTriangle() } }; properties.Add(new Property(PropertyName.Description, "`skinA` where `joint1` is animated with a rotation and `joint1` has a triangle mesh attached to it.")); }, (model) => { model.Camera = closeCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } // Create a set of positions for the second mesh that are offset from the first mesh. Runtime.MeshPrimitive originalMeshPrimitive = nodes[0].Mesh.MeshPrimitives.First(); var offsetPositions = new List <Vector3>(); foreach (Vector3 position in originalMeshPrimitive.Positions.Values) { var offsetPosition = position; offsetPosition.X += 0.6f; offsetPositions.Add(offsetPosition); } // Create a second mesh nodes.Add(new Node { Name = "plane2", Skin = nodes[0].Skin, Mesh = new Runtime.Mesh { MeshPrimitives = new[] { new Runtime.MeshPrimitive { Joints = originalMeshPrimitive.Joints, Weights = originalMeshPrimitive.Weights, Positions = Data.Create(offsetPositions), Indices = originalMeshPrimitive.Indices, Material = new Runtime.Material { DoubleSided = true, PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorFactor = new Vector4(0.0f, 0.0f, 1.0f, 1.0f) } } } } } }); properties.Add(new Property(PropertyName.Description, "`skinA` where there are two meshes sharing a single skin.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)) { nodes.Add(node); } // Make joint1 a root joint nodes.Add(nodes[1].Children.First()); nodes[1].Children = null; // Compensate for no longer inheriting from joint0 nodes[2].Rotation = Quaternion.Multiply((Quaternion)nodes[2].Rotation, (Quaternion)nodes[1].Rotation); nodes[2].Translation = null; nodes[0].Skin.InverseBindMatrices = Data.Create(new[] { nodes[0].Skin.InverseBindMatrices.Values.First(), Matrix4x4.Identity }); properties.Add(new Property(PropertyName.Description, "`skinA` where `joint1` is a root node and not a child of `joint0`.")); }, (model) => { model.Camera = closeCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreatePlaneWithSkinB()) { nodes.Add(node); } // Animate the joints Node nodeJoint0 = nodes[1]; Node nodeJoint1 = nodeJoint0.Children.First(); var channelList = new List <AnimationChannel>(); float rotationValue = FloatMath.ToRadians(-15.0f); AddRotationAnimationChannel(channelList, nodeJoint1, Quaternion.CreateFromYawPitchRoll(0.0f, 0.0f, rotationValue), Quaternion.CreateFromYawPitchRoll(0.0f, 0.0f, 0.0f)); animations.Add(new Animation { Channels = channelList }); properties.Add(new Property(PropertyName.Description, "`skinB` which is made up of two skins. `joint1` is referenced by both skins and is animating with a rotation.")); }, (model) => { model.Camera = skinBCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinC", 5, 5)) { nodes.Add(node); } // Rotate each joint node, except the root which already has the desired rotation Node nodeCheck = nodes[1].Children.First(); float rotationRadian = FloatMath.ToRadians(-10.0f); Quaternion rotation = Quaternion.CreateFromYawPitchRoll(0.0f, rotationRadian, 0.0f); nodeCheck.Rotation = rotation; while (nodeCheck.Children != null) { foreach (var node in nodeCheck.Children) { node.Rotation = rotation; } nodeCheck = nodeCheck.Children.First(); } // Rebuild the inverseBindMatrix for each joint (except the root) to work with the new rotation var inverseBindMatrixList = nodes[0].Skin.InverseBindMatrices.Values.Select((value, index) => { Matrix4x4.Invert(Matrix4x4.CreateRotationX(rotationRadian * (index + 1)), out Matrix4x4 invertedRotation); return(Matrix4x4.Multiply(value, invertedRotation)); }); properties.Add(new Property(PropertyName.Description, "`skinC` where all of the joints have a local rotation of -10 degrees, except the root which is rotated -90 degrees.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreateFoldingPlaneSkin("skinD", 5, 6, 3, false)) { nodes.Add(node); } animations.Add(CreateFoldingAnimation(nodes[1])); // Remove animation for the transform node animations[0].Channels = new List <AnimationChannel> { animations[0].Channels.First(), animations[0].Channels.ElementAt(1), animations[0].Channels.ElementAt(3), }; // Add the mesh to the transform node nodes[1].Children.First().Children.First().Children.First().Mesh = Mesh.CreateTriangle(); properties.Add(new Property(PropertyName.Description, "`skinD` where each joint is animating with a rotation. There is a transform node in the joint hierarchy that is not a joint. That node has a mesh attached to it in order to show its location.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, animations, nodes) => { foreach (Node node in Nodes.CreatePlaneWithSkinE()) { nodes.Add(node); } properties.Add(new Property(PropertyName.Description, "`skinE`.")); }, (model) => { model.Camera = distantCamera; }), // Removing this model for now, since no viewer currently supports models that have >4 jointweights per vertex. //CreateModel((properties, animations, nodes) => //{ // foreach (Runtime.Node node in Nodes.CreateFoldingPlaneSkin("skinF", 8, 9, vertexVerticalSpacingMultiplier: 0.5f)) // { // nodes.Add(node); // } // // Rotate each joint node, except the root which already has the desired rotation // Runtime.Node nodeCheck = nodes[1].Children.First(); // float rotationRadian = FloatMath.ConvertDegreesToRadians(-10.0f); // Quaternion rotationQuaternion = Quaternion.CreateFromYawPitchRoll(0.0f, rotationRadian, 0.0f); // nodeCheck.Rotation = rotationQuaternion; // while (nodeCheck.Children != null) // { // foreach (Runtime.Node node in nodeCheck.Children) // { // node.Rotation = rotationQuaternion; // } // nodeCheck = nodeCheck.Children.First(); // } // // Rebuild the inverseBindMatrix for each joint (except the root) to work with the new rotation // var skinJointList = (List<Runtime.SkinJoint>)nodes[0].Skin.SkinJoints; // for (var skinJointIndex = 1; skinJointIndex < skinJointList.Count(); skinJointIndex++) // { // Matrix4x4 translationInverseBindMatrix = skinJointList.ElementAt(skinJointIndex).InverseBindMatrix; // Matrix4x4.Invert(Matrix4x4.CreateRotationX(rotationRadian * (skinJointIndex + 1)) , out Matrix4x4 invertedRotation); // skinJointList.ElementAt(skinJointIndex).InverseBindMatrix = Matrix4x4.Multiply(translationInverseBindMatrix, invertedRotation); // } // // Rebuild weights to include every joint instead of just the ones with a weight > 0 // var weightList = (List<List<Runtime.JointWeight>>)nodes[0].Mesh.MeshPrimitives.First().VertexJointWeights; // for (var weightIndex = 0; weightIndex < weightList.Count(); weightIndex++) // { // var jointWeight = new List<Runtime.JointWeight>(); // for (var skinJointIndex = 0; skinJointIndex < skinJointList.Count; skinJointIndex++) // { // int weightToUse = 0; // // Set the weight to 1 if the skinJoint is at the same level as the vertex. // // Or Set the weight to 1 if the vertex is further out than the last skinjoint and the last skinjoint is being set. // if (skinJointIndex == (weightIndex / 2) || (((weightIndex / 2) > skinJointList.Count - 1) && (skinJointIndex == skinJointList.Count - 1)) ) // { // weightToUse = 1; // } // jointWeight.Add(new Runtime.JointWeight // { // Joint = skinJointList[skinJointIndex], // Weight = weightToUse, // }); // } // weightList[weightIndex] = jointWeight; // } // properties.Add(new Property(PropertyName.Description, "`skinF`. Each vertex has weights for more than four joints.")); //}, (model) => { model.Camera = distantCamera; }), CreateModel((properties, animations, nodes) => { var skinA1 = Nodes.CreateFoldingPlaneSkin("skinA", 2, 3); var skinA2 = Nodes.CreateFoldingPlaneSkin("skinA", 2, 3); // Set the same mesh on both nodes. skinA2[0].Mesh = skinA1[0].Mesh; // Offset one of the models so they aren't overlapping. Vector3 translation = skinA2[1].Translation.Value; skinA2[1].Translation = new Vector3(translation.X + 0.6f, translation.Y, translation.Z); foreach (Node node in skinA1) { nodes.Add(node); } foreach (Node node in skinA2) { nodes.Add(node); } properties.Add(new Property(PropertyName.Description, "Two instances of `skinA` sharing a mesh but with separate skins.")); }, (model) => { model.Camera = distantCamera; }), }; GenerateUsedPropertiesList(); }