private glTFLoader.Schema.Animation CreateAnimation(AoMEngineLibrary.Graphics.Model.Animation animation, int weightCount, Stream bufferStream) { var sampler = new AnimationSampler(); sampler.Interpolation = AnimationSampler.InterpolationEnum.LINEAR; CreateKeysBuffer(animation, bufferStream); sampler.Input = accessors.Count - 1; CreateWeightsBuffer(animation, weightCount, bufferStream); sampler.Output = accessors.Count - 1; var target = new AnimationChannelTarget(); target.Node = 0; target.Path = AnimationChannelTarget.PathEnum.weights; var channel = new AnimationChannel(); channel.Sampler = 0; channel.Target = target; var gltfAnimation = new glTFLoader.Schema.Animation(); gltfAnimation.Samplers = new[] { sampler }; gltfAnimation.Channels = new[] { channel }; return(gltfAnimation); }
void FitMeshToSkeleton() { AnimationClip animationClip = new AnimationClip(); animationClip.DynamicFrames.Add(new AnimationClip.KeyFrame()); for (int i = 0; i < _currentSkeleton.BoneCount; i++) { //animationClip.DynamicFrames[0].Rotation.Add(Quaternion.Identity); //animationClip.DynamicFrames[0].Position.Add(Vector3.Zero); animationClip.DynamicFrames[0].Rotation.Add(_currentSkeleton.Rotation[i]); animationClip.DynamicFrames[0].Position.Add(_currentSkeleton.Translation[i]); animationClip.RotationMappings.Add(new Filetypes.RigidModel.AnimationFile.AnimationBoneMapping(i)); animationClip.TranslationMappings.Add(new Filetypes.RigidModel.AnimationFile.AnimationBoneMapping(i)); } for (int i = 0; i < _currentSkeleton.BoneCount; i++) { var mappedIndex = _mapping.FirstOrDefault(x => x.OriginalValue == i); if (mappedIndex != null) { var parentBoneId = _currentSkeleton.GetParentBone(i); var parentBoneMapping = _mapping.FirstOrDefault(x => x.OriginalValue == parentBoneId); animationClip.DynamicFrames[0].Position[i] = _targetSkeleton.Translation[mappedIndex.NewValue]; animationClip.DynamicFrames[0].Rotation[i] = _targetSkeleton.Rotation[mappedIndex.NewValue]; } } _currentSkeleton.RebuildSkeletonMatrix(); MeshAnimationHelper meshAnimationHelper = new MeshAnimationHelper(_meshOwner, Matrix.Identity); var animationFrame = AnimationSampler.Sample(0, 0, _currentSkeleton, animationClip); _currentSkeleton.SetAnimationFrame(animationFrame); int vertexCount = _meshOwner.Geometry.VertexCount(); for (int i = 0; i < vertexCount; i++) { var vertTransform = meshAnimationHelper.GetVertexTransform(animationFrame, i); _meshOwner.Geometry.TransformVertex(i, (vertTransform)); } _meshOwner.Geometry.RebuildVertexBuffer(); }
public Instancing(List <string> imageList) { var baseColorImageA = UseTexture(imageList, "BaseColor_A"); var baseColorImageB = UseTexture(imageList, "BaseColor_B"); var baseColorImageCube = UseTexture(imageList, "BaseColor_Cube"); var distantCamera = new Manifest.Camera(new Vector3(0.0f, 0.0f, 2.7f)); // There are no common properties in this model group that are reported in the readme. TextureInfo CreateTextureInfo(Image source) { return(new TextureInfo { Texture = new Texture { Source = source, } }); } Model CreateModel(Action <List <Property>, List <Node>, List <Animation> > setProperties, Action <Model> setCamera) { var properties = new List <Property>(); var animations = new List <Animation>(); var animated = true; var nodes = new List <Node>(); // Apply the properties that are specific to this gltf. setProperties(properties, nodes, animations); // If no animations are used, null out that property. if (!animations.Any()) { animations = null; animated = false; } // Create the gltf object. var model = new Model { Properties = properties, GLTF = CreateGLTF(() => new Scene { Nodes = nodes }, animations: animations), Animated = animated, }; setCamera(model); return(model); } var samplerInputLinear = Data.Create(new[] { 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, }); var samplerInputCurve = Data.Create(new[] { 0.0f, 0.5f, 1.0f, 2.0f, 4.0f, }); var samplerOutput = Data.Create(new[] { Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), }); var samplerOutputReverse = Data.Create(new[] { Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(90.0f), 0.0f), Quaternion.Identity, Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f), }); Runtime.Material CreateMaterial(TextureInfo textureInfo) { return(new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = textureInfo } }); } void AddMeshPrimitivesToSingleNode(List <Node> nodes, List <Runtime.MeshPrimitive> meshPrimitives) { // If there are multiple mesh primitives, offset their position so they don't overlap. if (meshPrimitives.Count > 1) { meshPrimitives[0].Positions.Values = meshPrimitives[0].Positions.Values.Select(position => { return(new Vector3(position.X - 0.6f, position.Y, position.Z)); }); meshPrimitives[1].Positions.Values = meshPrimitives[1].Positions.Values.Select(position => { return(new Vector3(position.X + 0.6f, position.Y, position.Z)); }); } nodes.Add( new Node { Mesh = new Runtime.Mesh { MeshPrimitives = meshPrimitives } } ); } void AddMeshPrimitivesToMultipleNodes(List <Node> nodes, Runtime.MeshPrimitive meshPrimitives0, Runtime.MeshPrimitive meshPrimitives1) { nodes.AddRange(new[] { new Node { Translation = new Vector3(-0.6f, 0.0f, 0.0f), Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitives0 } } }, new Node { Translation = new Vector3(0.6f, 0.0f, 0.0f), Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitives1 } } } } ); } void AddAnimation(List <Animation> animations, List <Node> nodes, AnimationSampler sampler0, AnimationSampler sampler1, bool samplerInstanced) { animations.Add(new Animation { Channels = new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[0], Path = AnimationChannelTargetPath.Rotation, }, Sampler = sampler0 }, new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[1], Path = AnimationChannelTargetPath.Rotation, }, Sampler = sampler1 }, } }); } Models = new List <Model> { CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { // This non-standard set of texture coordinates is larger than the texture but not an exact multiple, so it allows texture sampler settings to be visible. meshPrimitive.TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(1.3f, 1.3f), new Vector2(-0.3f, 1.3f), new Vector2(-0.3f, -0.3f), new Vector2(1.3f, -0.3f), } ); } meshPrimitives[0].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[1].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[0].Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = new Sampler { WrapT = SamplerWrap.ClampToEdge, WrapS = SamplerWrap.ClampToEdge }; meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = new Sampler { WrapT = SamplerWrap.MirroredRepeat, WrapS = SamplerWrap.MirroredRepeat }; AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same image as their source.")); properties.Add(new Property(PropertyName.Difference, "The texture sampler `WrapT` and `WrapS` are set to `CLAMP_TO_EDGE` for one and `MIRRORED_REPEAT` for the other.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; meshPrimitives[0].Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitives[1].Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); var sampler = new Sampler { WrapT = SamplerWrap.ClampToEdge, WrapS = SamplerWrap.ClampToEdge }; foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material.PbrMetallicRoughness.BaseColorTexture.Texture.Sampler = sampler; // This non-standard set of texture coordinates is larger than the texture but not an exact multiple, so it allows texture sampler settings to be visible. meshPrimitive.TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(1.3f, 1.3f), new Vector2(-0.3f, 1.3f), new Vector2(-0.3f, -0.3f), new Vector2(1.3f, -0.3f), } ); } AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same sampler.")); properties.Add(new Property(PropertyName.Difference, "One texture uses image A while the other uses image B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(), MeshPrimitive.CreateSinglePlane() }; var texture = CreateTextureInfo(baseColorImageA); foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material = CreateMaterial(texture); } meshPrimitives[0].Material.PbrMetallicRoughness.BaseColorTexture = meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorTexture; meshPrimitives[1].Material.PbrMetallicRoughness.BaseColorFactor = new Vector4(0.5f, 0.5f, 1.0f, 1.0f); AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two textures using the same source image.")); properties.Add(new Property(PropertyName.Difference, "One material does not have a baseColorFactor and the other has a blue baseColorFactor.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitives = new List <Runtime.MeshPrimitive> { MeshPrimitive.CreateSinglePlane(), MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) }; var material = CreateMaterial(CreateTextureInfo(baseColorImageA)); foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) { meshPrimitive.Material = material; } // One of the primitives has a 'zoomed in' texture coordinate set. meshPrimitives[1].TexCoords0 = Data.Create <Vector2> ( new[] { new Vector2(0.9f, 0.9f), new Vector2(0.1f, 0.9f), new Vector2(0.1f, 0.1f), new Vector2(0.9f, 0.1f), } ); AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); properties.Add(new Property(PropertyName.Description, "Two primitives using the same material.")); properties.Add(new Property(PropertyName.Difference, "One primitive has texture coordinates that displays all of texture A, while the other primitive has textures coordinates that don't display the border.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(); meshPrimitive0.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitive1.Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); meshPrimitive0.Positions = meshPrimitive1.Positions; AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); properties.Add(new Property(PropertyName.Description, "Two primitives using the same accessors for the `POSITION` attribute.")); properties.Add(new Property(PropertyName.Difference, "One primitive uses texture A while the other primitive uses texture B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(includeIndices: false); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(); meshPrimitive0.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); meshPrimitive1.Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); meshPrimitive0.Indices = meshPrimitive1.Indices; AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); properties.Add(new Property(PropertyName.Description, "Two primitives using the same accessors for indices.")); properties.Add(new Property(PropertyName.Difference, "One primitive uses texture A while the other primitive uses texture B.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive = MeshPrimitive.CreateSinglePlane(); meshPrimitive.Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive, meshPrimitive); nodes[1].Mesh = nodes[0].Mesh; properties.Add(new Property(PropertyName.Description, "Two nodes using the same mesh.")); properties.Add(new Property(PropertyName.Difference, "The two nodes have different translations.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); // Adds just the node containing the mesh, dropping the data for a second set of joints. nodes.Add(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)[0]); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[2].Skin = nodes[0].Skin; // Offsets the position of both meshes so they don't overlap. nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => { return(new Vector3(position.X - 0.3f, position.Y, position.Z)); }); nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => { return(new Vector3(position.X + 0.3f, position.Y, position.Z)); }); properties.Add(new Property(PropertyName.Description, "Two nodes using the same skin.")); properties.Add(new Property(PropertyName.Difference, "The two mesh primitives have different `POSITION` values.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); // Adds just the node containing the mesh, dropping the data for a second set of joints. nodes.Add(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)[0]); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[2].Skin.Joints = nodes[0].Skin.Joints; // Creates new inverseBindMatrices for the second skin, rotating the flap further than the default value would. nodes[2].Skin.InverseBindMatrices = Data.Create(new[] { nodes[2].Skin.InverseBindMatrices.Values.First(), Matrix4x4.Multiply(nodes[2].Skin.InverseBindMatrices.Values.ElementAt(1), Matrix4x4.CreateRotationX(FloatMath.ToRadians(-30))), }); // Offsets the position of both meshes so they don't overlap. nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[0].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => new Vector3(position.X - 0.3f, position.Y, position.Z)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values = nodes[2].Mesh.MeshPrimitives.ElementAt(0).Positions.Values.Select(position => new Vector3(position.X + 0.3f, position.Y, position.Z)); properties.Add(new Property(PropertyName.Description, "Two skins using the same joints.")); properties.Add(new Property(PropertyName.Difference, "The skin with texture B has inverseBindMatrices that fold twice as far as the skin with texture A.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[0].Name = "plane0"; nodes[0].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageA)); nodes[0].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[1].Translation = Vector3.Add((Vector3)nodes[1].Translation, new Vector3(-0.3f, 0.0f, 0.0f)); nodes.AddRange(Nodes.CreateFoldingPlaneSkin("skinA", 2, 3)); nodes[2].Name = "plane1"; nodes[2].Mesh.MeshPrimitives.ElementAt(0).Material = CreateMaterial(CreateTextureInfo(baseColorImageB)); nodes[2].Mesh.MeshPrimitives.ElementAt(0).TexCoords0 = Data.Create(Nodes.GetSkinATexCoords()); nodes[3].Translation = Vector3.Add((Vector3)nodes[3].Translation, new Vector3(0.3f, 0.0f, 0.0f)); nodes[2].Skin.InverseBindMatrices = nodes[0].Skin.InverseBindMatrices; properties.Add(new Property(PropertyName.Description, "Two skins using the same inverseBindMatrices.")); properties.Add(new Property(PropertyName.Difference, "The base joint for the two skins have different translations.")); }, (model) => { model.Camera = null; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; AddAnimation(animations, nodes, sampler, sampler, true); properties.Add(new Property(PropertyName.Description, "Two animation channels using the same sampler.")); properties.Add(new Property(PropertyName.Difference, "The two animation channels target different nodes.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutputReverse, }; AddAnimation(animations, nodes, sampler0, sampler1, false); properties.Add(new Property(PropertyName.Description, "Two animation samplers using the same input accessors.")); properties.Add(new Property(PropertyName.Difference, "The two animation samplers have different output values.")); }, (model) => { model.Camera = distantCamera; }), CreateModel((properties, nodes, animations) => { var meshPrimitive0 = MeshPrimitive.CreateCube(); var meshPrimitive1 = MeshPrimitive.CreateCube(); var textureInfo = CreateTextureInfo(baseColorImageCube); meshPrimitive0.Material = CreateMaterial(textureInfo); meshPrimitive1.Material = CreateMaterial(textureInfo); AddMeshPrimitivesToMultipleNodes(nodes, meshPrimitive0, meshPrimitive1); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputLinear, Output = samplerOutput, }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = samplerInputCurve, Output = samplerOutput, }; AddAnimation(animations, nodes, sampler0, sampler1, false); properties.Add(new Property(PropertyName.Description, "Two animation samplers using the same output accessors.")); properties.Add(new Property(PropertyName.Difference, "The two animation samplers have different input values.")); }, (model) => { model.Camera = distantCamera; }), // To be implemented later. Needs to work as a type of interleaving. //CreateModel((properties, nodes, animations) => //{ // var meshPrimitives = new List<Runtime.MeshPrimitive> // { // MeshPrimitive.CreateSinglePlane(includeTextureCoords: false), // MeshPrimitive.CreateSinglePlane(includeTextureCoords: false) // }; // meshPrimitives[0].TexCoords0 = meshPrimitives[1].TexCoords0 = MeshPrimitive.GetSinglePlaneTextureCoordSets(); // meshPrimitives[0].Normals = meshPrimitives[1].Normals = MeshPrimitive.GetSinglePlaneNormals(); // foreach (Runtime.MeshPrimitive meshPrimitive in meshPrimitives) // { // meshPrimitive.BufferViewsInstanced = true; // meshPrimitive.Material = CreateMaterial(); // } // AddMeshPrimitivesToSingleNode(nodes, meshPrimitives); // properties.Add(new Property(PropertyName.Description, "Two accessors using the same buffer view.")); //}, (model) => { model.Camera = null; }), }; GenerateUsedPropertiesList(); }
private void _exportAnimation(UnityEngine.AnimationClip animationClip) { // var frameCount = (int)Math.Floor(animationClip.length * animationClip.frameRate) + 1; var curveBinds = UnityEditor.AnimationUtility.GetCurveBindings(animationClip); var ignoreCurves = new List <UnityEditor.EditorCurveBinding>(); var glTFAnimation = new GLTF.Schema.Animation() { Name = animationClip.name, Channels = new List <AnimationChannel>(), Samplers = new List <AnimationSampler>(), Extensions = new Dictionary <string, IExtension>() { { AnimationExtensionFactory.EXTENSION_NAME, new AnimationExtension() { frameRate = animationClip.frameRate, clips = new List <AnimationClip>() { new AnimationClip() { name = animationClip.name, playTimes = _getPlayTimes(animationClip), position = 0.0f, duration = (float)Math.Round(animationClip.length, 6), } } } }, }, }; var ext = glTFAnimation.Extensions[AnimationExtensionFactory.EXTENSION_NAME] as AnimationExtension; this._root.Animations.Add(glTFAnimation); // Input. var inputAccessor = new Accessor(); inputAccessor.Count = frameCount; inputAccessor.Type = GLTFAccessorAttributeType.SCALAR; inputAccessor.ComponentType = GLTFComponentType.Float; inputAccessor.BufferView = new BufferViewId { Id = 0, Root = _root }; this._root.Accessors.Add(inputAccessor); // Write input. for (var i = 0; i < frameCount; ++i) { //_bufferWriter.Write(Math.Round(Math.Min(animationClip.length * i / (frameCount - 1), animationClip.length), 6)); // TODO _bufferWriter.Write(i / animationClip.frameRate); } var MainTex_STy = new List <float>(); foreach (var curveBind in curveBinds) { // Curve has been parsed. if (ignoreCurves.Contains(curveBind)) { continue; } // No target. var animationTarget = _target.Find(curveBind.path); if (animationTarget == null) { continue; } // Create node. var nodeIndex = _animationTargets.IndexOf(animationTarget); if (nodeIndex < 0) { _animationTargets.Add(animationTarget); nodeIndex = _root.Nodes.Count; _root.Nodes.Add(new Node() { Name = _target == animationTarget ? "__root__" : animationTarget.name, }); if (animationTarget.transform.parent == _target) { _root.Scenes[0].Nodes.Add( new NodeId() { Id = nodeIndex, Root = _root, } ); } } // Output. var outputAccessor = new Accessor(); outputAccessor.Count = frameCount; outputAccessor.ComponentType = GLTFComponentType.Float; outputAccessor.BufferView = inputAccessor.BufferView; outputAccessor.ByteOffset = (int)_bufferWriter.BaseStream.Position; this._root.Accessors.Add(outputAccessor); // var animationSampler = new AnimationSampler() { Input = new AccessorId() { Id = this._root.Accessors.IndexOf(inputAccessor), Root = _root, }, Interpolation = InterpolationType.LINEAR, Output = new AccessorId() { Id = this._root.Accessors.IndexOf(outputAccessor), Root = _root, }, }; glTFAnimation.Samplers.Add(animationSampler); // var animationChannel = new AnimationChannel() { Sampler = new SamplerId() { Id = glTFAnimation.Samplers.IndexOf(animationSampler), Root = _root, }, Target = new AnimationChannelTarget() { Node = new NodeId() { Id = nodeIndex, Root = _root, } } }; glTFAnimation.Channels.Add(animationChannel); if (curveBind.type == typeof(Transform)) { var curveGroup = _getCurveGroup(curveBinds, curveBind); ignoreCurves.AddRange(curveGroup); switch (curveBind.propertyName) { case "m_LocalPosition.x": case "m_LocalPosition.y": case "m_LocalPosition.z": animationChannel.Target.Path = GLTFAnimationChannelPath.translation; outputAccessor.Type = GLTFAccessorAttributeType.VEC3; for (var i = 0; i < frameCount; ++i) { var time = i / animationClip.frameRate; var curveX = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[0]); var curveY = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[1]); var curveZ = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[2]); var value = curveX != null?curveX.Evaluate(time) : animationTarget.transform.localPosition.x; _bufferWriter.Write(value); value = curveY != null?curveY.Evaluate(time) : animationTarget.transform.localPosition.y; _bufferWriter.Write(value); value = curveZ != null?curveZ.Evaluate(time) : animationTarget.transform.localPosition.z; _bufferWriter.Write(value); } break; case "m_LocalRotation.x": case "m_LocalRotation.y": case "m_LocalRotation.z": case "m_LocalRotation.w": animationChannel.Target.Path = GLTFAnimationChannelPath.rotation; outputAccessor.Type = GLTFAccessorAttributeType.VEC4; for (var i = 0; i < frameCount; ++i) { var time = i / animationClip.frameRate; var curveX = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[0]); var curveY = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[1]); var curveZ = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[2]); var curveW = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[3]); var valueX = curveX != null?curveX.Evaluate(time) : animationTarget.transform.localRotation.x; var valueY = curveY != null?curveY.Evaluate(time) : animationTarget.transform.localRotation.y; var valueZ = curveZ != null?curveZ.Evaluate(time) : animationTarget.transform.localRotation.z; var valueW = curveW != null?curveW.Evaluate(time) : animationTarget.transform.localRotation.w; _bufferWriter.Write(valueX); _bufferWriter.Write(valueY); _bufferWriter.Write(valueZ); _bufferWriter.Write(valueW); } break; case "localEulerAnglesRaw.x": case "localEulerAnglesRaw.y": case "localEulerAnglesRaw.z": animationChannel.Target.Path = GLTFAnimationChannelPath.rotation; outputAccessor.Type = GLTFAccessorAttributeType.VEC4; for (var i = 0; i < frameCount; ++i) { var time = i / animationClip.frameRate; var curveX = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[0]); var curveY = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[1]); var curveZ = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[2]); var valueX = curveX != null?curveX.Evaluate(time) : animationTarget.transform.localEulerAngles.x; var valueY = curveY != null?curveY.Evaluate(time) : animationTarget.transform.localEulerAngles.y; var valueZ = curveZ != null?curveZ.Evaluate(time) : animationTarget.transform.localEulerAngles.z; var quaternion = Quaternion.Euler(valueX, valueY, valueZ); _bufferWriter.Write(quaternion.x); _bufferWriter.Write(quaternion.y); _bufferWriter.Write(quaternion.z); _bufferWriter.Write(quaternion.w); } break; case "m_LocalScale.x": case "m_LocalScale.y": case "m_LocalScale.z": animationChannel.Target.Path = GLTFAnimationChannelPath.scale; outputAccessor.Type = GLTFAccessorAttributeType.VEC3; for (var i = 0; i < frameCount; ++i) { var time = i / animationClip.frameRate; var curveX = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[0]); var curveY = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[1]); var curveZ = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveGroup[2]); var value = curveX != null?curveX.Evaluate(time) : animationTarget.transform.localScale.x; _bufferWriter.Write(value); value = curveY != null?curveY.Evaluate(time) : animationTarget.transform.localScale.y; _bufferWriter.Write(value); value = curveZ != null?curveZ.Evaluate(time) : animationTarget.transform.localScale.z; _bufferWriter.Write(value); } break; } } else { animationChannel.Target.Path = GLTFAnimationChannelPath.custom; outputAccessor.Type = GLTFAccessorAttributeType.SCALAR; var type = ""; var property = ""; var uri = ""; var needUpdate = -1; if (curveBind.type == typeof(GameObject)) { type = "paper.GameObject"; switch (curveBind.propertyName) { case "m_IsActive": property = "activeSelf"; animationSampler.Interpolation = InterpolationType.STEP; break; } // for (var i = 0; i < frameCount; ++i) // TODO // { // var time = animationClip.length * i / frameCountSO; // TODO // var curve = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveBind); // var value = curve.Evaluate(time); // _bufferWriter.Write(value); // } } else if (curveBind.type == typeof(UnityEngine.MeshRenderer)) { type = "egret3d.MeshRenderer"; uri = "materials/0/$/_uvTransform"; needUpdate = 1; // animationSampler.Interpolation = InterpolationType.STEP; switch (curveBind.propertyName) { case "material._MainTex_ST.z": property = "0"; break; case "material._MainTex_ST.w": property = "1"; break; case "material._MainTex_ST.x": property = "2"; break; case "material._MainTex_ST.y": property = "3"; break; } } else { Debug.Log("Unknown type and property." + curveBind.type.ToString() + curveBind.propertyName); } // Extensions. animationChannel.Extensions = new Dictionary <string, IExtension>() { { AnimationExtensionFactory.EXTENSION_NAME, new AnimationChannelExtension() { type = type, property = property, uri = uri, needUpdate = needUpdate, } }, }; for (var i = 0; i < frameCount; ++i) { var curve = UnityEditor.AnimationUtility.GetEditorCurve(animationClip, curveBind); if (curve != null) { var value = curve.Evaluate(i / animationClip.frameRate); if (curveBind.propertyName == "material._MainTex_ST.w") { if (i < MainTex_STy.Count) { _bufferWriter.Write(1.0f - value - MainTex_STy[i]); } else { _bufferWriter.Write(value); } } else { _bufferWriter.Write(value); if (curveBind.propertyName == "material._MainTex_ST.y") { MainTex_STy.Add(value); } } } } } } foreach (var evt in animationClip.events) { var glTFFrameEvent = new AnimationFrameEvent(); glTFFrameEvent.name = evt.functionName; glTFFrameEvent.position = evt.time; glTFFrameEvent.intVariable = evt.intParameter; glTFFrameEvent.floatVariable = evt.floatParameter; glTFFrameEvent.stringVariable = evt.stringParameter; ext.events.Add(glTFFrameEvent); } ext.events.Sort(); }
public Accessor_SparseType(List <string> imageList) { var baseColorTextureA = new Texture { Source = UseTexture(imageList, "BaseColor_A") }; var baseColorTextureB = new Texture { Source = UseTexture(imageList, "BaseColor_B") }; UseFigure(imageList, "SparseAccessor_Input"); UseFigure(imageList, "SparseAccessor_Output-Rotation"); UseFigure(imageList, "SparseAccessor_NoBufferView"); // There are no common properties in this model group that are reported in the readme. Model CreateModel(Action <List <Property>, Animation, List <Node> > setProperties) { var properties = new List <Property>(); var animated = true; var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(includeTextureCoords: false); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(includeTextureCoords: false); var nodes = new List <Node> { new Node { Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitive0 } } }, new Node { Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitive1 } } } }; var animation = new Animation(); var animations = new List <Animation> { animation }; // Apply the properties that are specific to this gltf. setProperties(properties, animation, nodes); // If no animations are used, null out that property. if (animation.Channels == null) { animations = null; animated = false; } // Create the gltf object. return(new Model { Properties = properties, GLTF = CreateGLTF ( () => new Scene { Nodes = nodes }, animations: animations ), Animated = animated, Camera = new Manifest.Camera(new Vector3(0.0f, 0.0f, 2.75f)) }); } var samplerInputLinear = new[] { 0.0f, 1.0f, 2.0f, }; var samplerInputSparse = 1.5f; var samplerOutputTranslationSparse = new Vector3(0.0f, 0.2f, 0.0f); var samplerOutputRotation = new[] { Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-45.0f), 0.0f), Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(45.0f), 0.0f), Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-45.0f), 0.0f), }; var samplerOutputRotationSparse = Quaternion.CreateFromYawPitchRoll(0.0f, FloatMath.ToRadians(-90.0f), 0.0f); List <AnimationChannel> CreateChannels(List <Node> nodes, AnimationSampler sampler0, AnimationSampler sampler1) { return(new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[0], }, Sampler = sampler0 }, new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[1], }, Sampler = sampler1 }, }); } void SetAnimationPaths(List <AnimationChannel> channels, AnimationChannelTargetPath path, List <Property> properties) { foreach (var channel in channels) { channel.Target.Path = path; } } void OffsetPositions(List <Node> nodes) { // Offsets the positions of the mesh primitives so they don't overlap. This is done because animation translations override node translations. nodes[0].Mesh.MeshPrimitives.First().Positions.Values = nodes[0].Mesh.MeshPrimitives.First().Positions.Values.Select(position => { return(new Vector3(position.X - 0.6f, position.Y, position.Z)); }); nodes[1].Mesh.MeshPrimitives.First().Positions.Values = nodes[1].Mesh.MeshPrimitives.First().Positions.Values.Select(position => { return(new Vector3(position.X + 0.6f, position.Y, position.Z)); }); } void SetTexture(List <Node> nodes) { nodes[0].Mesh.MeshPrimitives.First().Material = new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = new TextureInfo { Texture = baseColorTextureA } } }; nodes[1].Mesh.MeshPrimitives.First().Material = new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = new TextureInfo { Texture = baseColorTextureB } } }; nodes[0].Mesh.MeshPrimitives.First().TexCoords0 = Data.Create(MeshPrimitive.GetSinglePlaneTexCoords()); nodes[1].Mesh.MeshPrimitives.First().TexCoords0 = Data.Create(MeshPrimitive.GetSinglePlaneTexCoords()); } Models = new List <Model> { CreateModel((properties, animation, nodes) => { SetTexture(nodes); OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear, DataSparse.Create ( DataType.UnsignedByte, new Dictionary <int, float> { { 1, samplerInputSparse } } )), Output = Data.Create(samplerOutputRotation), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Rotation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Input")); properties.Add(new Property(PropertyName.IndicesType, sampler1.Input.Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, sampler1.Input.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See Figure 1")); }), CreateModel((properties, animation, nodes) => { SetTexture(nodes); OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear, DataSparse.Create ( DataType.UnsignedShort, new Dictionary <int, float> { { 1, samplerInputSparse } } )), Output = Data.Create(samplerOutputRotation), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Rotation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Input")); properties.Add(new Property(PropertyName.IndicesType, sampler1.Input.Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, sampler1.Input.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See Figure 1")); }), CreateModel((properties, animation, nodes) => { SetTexture(nodes); OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear, DataSparse.Create ( DataType.UnsignedInt, new Dictionary <int, float> { { 1, samplerInputSparse } } )), Output = Data.Create(samplerOutputRotation), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Rotation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Input")); properties.Add(new Property(PropertyName.IndicesType, sampler1.Input.Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, sampler1.Input.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See Figure 1")); }), CreateModel((properties, animation, nodes) => { SetTexture(nodes); OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation, DataType.NormalizedByte), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation, DataType.NormalizedByte, DataSparse.Create ( DataType.UnsignedByte, new Dictionary <int, Quaternion> { { 1, samplerOutputRotationSparse } } )), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Rotation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Output")); properties.Add(new Property(PropertyName.IndicesType, ((Data <Quaternion>)sampler1.Output).Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, sampler1.Output.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See Figure 2")); }), CreateModel((properties, animation, nodes) => { SetTexture(nodes); OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation, DataType.NormalizedShort), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputRotation, DataType.NormalizedShort, DataSparse.Create ( DataType.UnsignedByte, new Dictionary <int, Quaternion> { { 1, samplerOutputRotationSparse } } )), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Rotation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Output")); properties.Add(new Property(PropertyName.IndicesType, ((Data <Quaternion>)sampler1.Output).Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, sampler1.Output.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See Figure 2")); }), CreateModel((properties, animation, nodes) => { // Add extra vertexes that will be used by the sparse accessor. SetTexture(nodes); var positions = MeshPrimitive.GetSinglePlanePositions().Concat(new[] { new Vector3(0.25f, -0.5f, 0.0f), new Vector3(-0.25f, 0.5f, 0.0f), }); var texCoords = MeshPrimitive.GetSinglePlaneTexCoords().Concat(new[] { new Vector2(1.0f, 1.0f), new Vector2(0.0f, 0.0f), }); foreach (var node in nodes) { node.Mesh.MeshPrimitives.First().Positions.Values = positions; node.Mesh.MeshPrimitives.First().TexCoords0.Values = texCoords; } OffsetPositions(nodes); var meshPrimitiveIndices = nodes[1].Mesh.MeshPrimitives.First().Indices; meshPrimitiveIndices.Values = nodes[0].Mesh.MeshPrimitives.First().Indices.Values; meshPrimitiveIndices.Sparse = DataSparse.Create ( DataType.UnsignedByte, new Dictionary <int, int> { { 1, 4 }, { 5, 5 }, } ); properties.Add(new Property(PropertyName.SparseAccessor, "Mesh Primitive Indices")); properties.Add(new Property(PropertyName.IndicesType, meshPrimitiveIndices.Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, meshPrimitiveIndices.OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, ":white_check_mark:")); properties.Add(new Property(PropertyName.Description, "See the description for the Mesh Primitive Indices model in [Accessor_Sparse](../Accessor_Sparse/README.md).")); }), CreateModel((properties, animation, nodes) => { SetTexture(nodes); nodes.RemoveAt(0); var sampler = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(Enumerable.Repeat(default(Vector3), 3), DataSparse.Create ( DataType.UnsignedByte, new Dictionary <int, Vector3> { { 1, samplerOutputTranslationSparse } } )), }; var channels = new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[0], Path = AnimationChannelTargetPath.Translation }, Sampler = sampler }, }; animation.Channels = channels; properties.Add(new Property(PropertyName.SparseAccessor, "Output")); properties.Add(new Property(PropertyName.IndicesType, ((Data <Vector3>)sampler.Output).Sparse.IndicesOutputType.ToReadmeString())); properties.Add(new Property(PropertyName.ValueType, ((Data <Vector3>)sampler.Output).OutputType.ToReadmeString())); properties.Add(new Property(PropertyName.BufferView, "")); properties.Add(new Property(PropertyName.Description, "See Figure 3")); }), }; GenerateUsedPropertiesList(); }
public Accessor_Sparse(List <string> imageList) { var baseColorTextureA = new Texture { Source = UseTexture(imageList, "BaseColor_A") }; var baseColorTextureB = new Texture { Source = UseTexture(imageList, "BaseColor_B") }; UseFigure(imageList, "SparseAccessor_Input"); UseFigure(imageList, "SparseAccessor_Output-Translation"); // There are no common properties in this model group that are reported in the readme. Model CreateModel(Action <List <Property>, Animation, List <Node> > setProperties) { var properties = new List <Property>(); var animated = true; var meshPrimitive0 = MeshPrimitive.CreateSinglePlane(); var meshPrimitive1 = MeshPrimitive.CreateSinglePlane(); var nodes = new List <Node> { new Node { Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitive0 } } }, new Node { Mesh = new Runtime.Mesh { MeshPrimitives = new List <Runtime.MeshPrimitive> { meshPrimitive1 } } } }; SetTexture(nodes); var animation = new Animation(); var animations = new List <Animation> { animation }; // Apply the properties that are specific to this gltf. setProperties(properties, animation, nodes); // If no animations are used, null out that property. if (animation.Channels == null) { animations = null; animated = false; } // Create the gltf object. return(new Model { Properties = properties, GLTF = CreateGLTF ( () => new Scene { Nodes = nodes }, animations: animations ), Animated = animated, Camera = new Manifest.Camera(new Vector3(0.0f, 0.0f, 2.75f)) }); } var samplerInputLinear = new[] { 0.0f, 1.0f, 2.0f, }; var samplerOutputTranslation = new[] { new Vector3(0.0f, 0.3f, 0.0f), new Vector3(0.0f, -0.3f, 0.0f), new Vector3(0.0f, 0.3f, 0.0f), }; void SetTexture(List <Node> nodes) { nodes[0].Mesh.MeshPrimitives.First().Material = new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = new TextureInfo { Texture = baseColorTextureA }, } }; nodes[1].Mesh.MeshPrimitives.First().Material = new Runtime.Material { PbrMetallicRoughness = new PbrMetallicRoughness { BaseColorTexture = new TextureInfo { Texture = baseColorTextureB } } }; } void OffsetPositions(List <Node> nodes) { // Offsets the positions of the mesh primitives so they don't overlap. This is done because animation translations override node translations. nodes[0].Mesh.MeshPrimitives.First().Positions.Values = ((Vector3[])nodes[0].Mesh.MeshPrimitives.First().Positions.Values).Select(position => { return(new Vector3(position.X - 0.6f, position.Y, position.Z)); }); nodes[1].Mesh.MeshPrimitives.First().Positions.Values = ((Vector3[])nodes[1].Mesh.MeshPrimitives.First().Positions.Values).Select(position => { return(new Vector3(position.X + 0.6f, position.Y, position.Z)); }); } void OffsetNodeTranslations(List <Node> nodes) { // Gives each node a translation so they don't overlap, but can have the same values for position. nodes[0].Translation = new Vector3(-0.6f, 0.0f, 0.0f); nodes[1].Translation = new Vector3(0.6f, 0.0f, 0.0f); } List <AnimationChannel> CreateChannels(List <Node> nodes, AnimationSampler sampler0, AnimationSampler sampler1) { return(new List <AnimationChannel> { new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[0], }, Sampler = sampler0 }, new AnimationChannel { Target = new AnimationChannelTarget { Node = nodes[1], }, Sampler = sampler1 }, }); } void SetAnimationPaths(List <AnimationChannel> channels, AnimationChannelTargetPath path, List <Property> properties) { foreach (var channel in channels) { channel.Target.Path = path; } } Models = new List <Model> { CreateModel((properties, animation, nodes) => { OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputTranslation), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear, DataSparse.Create ( new Dictionary <int, float> { { 1, 1.5f } } )), Output = Data.Create(samplerOutputTranslation), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Translation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Animation Sampler Input")); properties.Add(new Property(PropertyName.Description, "See Figure 1")); }), CreateModel((properties, animation, nodes) => { OffsetPositions(nodes); var sampler0 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputTranslation), }; var sampler1 = new AnimationSampler { Interpolation = AnimationSamplerInterpolation.Linear, Input = Data.Create(samplerInputLinear), Output = Data.Create(samplerOutputTranslation, DataSparse.Create ( new Dictionary <int, Vector3> { { 1, new Vector3(0.0f, 0.2f, 0.0f) }, } )), }; var channels = CreateChannels(nodes, sampler0, sampler1); animation.Channels = channels; SetAnimationPaths(channels, AnimationChannelTargetPath.Translation, properties); properties.Add(new Property(PropertyName.SparseAccessor, "Animation Sampler Output")); properties.Add(new Property(PropertyName.Description, "See Figure 2")); }), CreateModel((properties, animation, nodes) => { OffsetNodeTranslations(nodes); nodes[1].Mesh.MeshPrimitives.First().Positions.Values = nodes[0].Mesh.MeshPrimitives.First().Positions.Values; nodes[1].Mesh.MeshPrimitives.First().Positions.Sparse = DataSparse.Create ( new Dictionary <int, Vector3> { { 0, new Vector3(0.25f, -0.5f, 0.0f) }, { 2, new Vector3(-0.25f, 0.5f, 0.0f) }, } ); properties.Add(new Property(PropertyName.SparseAccessor, "Positions")); properties.Add(new Property(PropertyName.Description, "Mesh B's sparse accessor overwrites the values of the top left and bottom right vertexes.")); }), CreateModel((properties, animation, nodes) => { // Add extra vertexes that will be used by the sparse accessor. var positions = MeshPrimitive.GetSinglePlanePositions().Concat(new[] { new Vector3(0.25f, -0.5f, 0.0f), new Vector3(-0.25f, 0.5f, 0.0f), }); var textureCoords = MeshPrimitive.GetSinglePlaneTexCoords().Concat(new[] { new Vector2(1.0f, 1.0f), new Vector2(0.0f, 0.0f), }); foreach (var node in nodes) { node.Mesh.MeshPrimitives.First().Positions.Values = positions; node.Mesh.MeshPrimitives.First().TexCoords0.Values = textureCoords; } OffsetNodeTranslations(nodes); nodes[1].Mesh.MeshPrimitives.First().Indices.Values = nodes[0].Mesh.MeshPrimitives.First().Indices.Values; nodes[1].Mesh.MeshPrimitives.First().Indices.Sparse = DataSparse.Create ( new Dictionary <int, int> { { 1, 4 }, { 5, 5 }, } ); properties.Add(new Property(PropertyName.SparseAccessor, "Mesh Primitive Indices")); properties.Add(new Property(PropertyName.Description, "Both meshes have six vertexes, but only four are used to make the visible mesh. " + "Mesh B's sparse accessor replaces the indices pointing at the top left and bottom right vertexes with ones pointing at the unused vertexes.")); }), }; GenerateUsedPropertiesList(); }
public AnimationClip GenerateMountAnimation(AnimationClip mountAnimation, AnimationClip riderAnimation) { Vector3 translationOffset = new Vector3((float)_animationSettings.Translation.X.Value, (float)_animationSettings.Translation.Y.Value, (float)_animationSettings.Translation.Z.Value); Vector3 rotationOffset = new Vector3((float)_animationSettings.Rotation.X.Value, (float)_animationSettings.Rotation.Y.Value, (float)_animationSettings.Rotation.Z.Value); var rotationOffsetMatrix = Quaternion.CreateFromYawPitchRoll(MathHelper.ToRadians(rotationOffset.X), MathHelper.ToRadians(rotationOffset.Y), MathHelper.ToRadians(rotationOffset.Z)); var newRiderAnim = riderAnimation.Clone(); newRiderAnim.MergeStaticAndDynamicFrames(); View3D.Animation.AnimationEditor.LoopAnimation(newRiderAnim, (int)_animationSettings.LoopCounter.Value); // Resample if (_animationSettings.FitAnimation) { newRiderAnim = View3D.Animation.AnimationEditor.ReSample(_riderSkeleton, newRiderAnim, mountAnimation.DynamicFrames.Count, mountAnimation.PlayTimeInSec); } newRiderAnim.StaticFrame = null; var maxFrameCount = Math.Min(mountAnimation.DynamicFrames.Count, newRiderAnim.DynamicFrames.Count); for (int i = 0; i < maxFrameCount; i++) { var mountFrame = AnimationSampler.Sample(i, 0, _mountSkeleton, new List <AnimationClip> { mountAnimation }); var mountBoneWorldMatrix = _mountVertexPositionResolver.GetVertexTransformWorld(mountFrame, _mountVertexId); mountBoneWorldMatrix.Decompose(out var _, out var mountVertexRot, out var mountVertexPos); // Make sure the rider moves along in the world with the same speed as the mount var mountMovement = mountFrame.BoneTransforms[0].Translation; newRiderAnim.DynamicFrames[i].Position[0] = mountMovement;// mountAnimation.DynamicFrames[i].Position[0]; newRiderAnim.DynamicFrames[i].Rotation[0] = Quaternion.Identity; var origianlRotation = Quaternion.Identity; if (_animationSettings.KeepRiderRotation) { var riderFrame = AnimationSampler.Sample(i, 0, _riderSkeleton, new List <AnimationClip> { newRiderAnim }); var riderBoneWorldmatrix = riderFrame.GetSkeletonAnimatedWorld(_riderSkeleton, _riderBoneIndex); riderBoneWorldmatrix.Decompose(out var _, out origianlRotation, out var _); } var originalPosition = newRiderAnim.DynamicFrames[i].Position[_riderBoneIndex]; var originalRotation = newRiderAnim.DynamicFrames[i].Rotation[_riderBoneIndex]; var newRiderPosition = mountVertexPos + translationOffset - mountFrame.BoneTransforms[0].Translation; var newRiderRotation = Quaternion.Multiply(Quaternion.Multiply(mountVertexRot, origianlRotation), rotationOffsetMatrix); var riderPositionDiff = newRiderPosition - originalPosition; var riderRotationDiff = newRiderRotation * Quaternion.Inverse(originalRotation); newRiderAnim.DynamicFrames[i].Position[_riderBoneIndex] = newRiderPosition; newRiderAnim.DynamicFrames[i].Rotation[_riderBoneIndex] = newRiderRotation; // Find all the bones at the same level (normally attachmentpoints) and move them as well var parentBoneIndex = _riderSkeleton.GetParentBone(_riderBoneIndex); if (parentBoneIndex != -1) { var childNodes = _riderSkeleton.GetChildBones(parentBoneIndex); for (int boneId = 0; boneId < childNodes.Count; boneId++) { var id = childNodes[boneId]; if (id == _riderBoneIndex) { continue; } newRiderAnim.DynamicFrames[i].Position[id] += riderPositionDiff; newRiderAnim.DynamicFrames[i].Rotation[id] = riderRotationDiff * newRiderAnim.DynamicFrames[i].Rotation[id]; } } } return(newRiderAnim); }