/// <summary> /// Parses a variable length list of parms on one line. /// </summary> /// <param name="lexer"></param> /// <param name="parms"></param> /// <param name="maxParms"></param> private float[] ParseParams(idLexer lexer, int maxParms) { idToken token; List <float> parms = new List <float>(); int count = 0; float tmp; while (true) { if ((token = lexer.ReadToken()) == null) { break; } else if (count == maxParms) { lexer.Error("too many parms on line"); break; } else { token.StripQuotes(); float.TryParse(token.ToString(), out tmp); parms.Add(tmp); count++; } } return(parms.ToArray()); }
private void ParseJoint(idLexer lexer, idMD5Joint joint, ref idJointQuaternion defaultPose) { // // parse name // joint.Name = lexer.ReadToken().ToString(); // // parse parent // int parentIndex = lexer.ParseInt(); if (parentIndex >= 0) { if (parentIndex >= (_joints.Length - 1)) { lexer.Error("Invalid parent for joint '{0}'", joint.Name); } joint.Parent = _joints[parentIndex]; } // // parse default pose // float[] tmp = lexer.Parse1DMatrix(3); defaultPose.Translation = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); defaultPose.Quaternion = new Quaternion(tmp[0], tmp[1], tmp[2], 0); defaultPose.Quaternion.W = idHelper.CalculateW(defaultPose.Quaternion); }
private void ParseParametric(idLexer lexer, idParticleParameter parm) { idToken token; if ((token = lexer.ReadToken()) == null) { lexer.Error("not enough parameters"); return; } if (token.IsNumeric == true) { // can have a to + 2nd parm. float tmp; float.TryParse(token.ToString(), out tmp); parm.From = tmp; parm.To = tmp; if ((token = lexer.ReadToken()) != null) { if (token.ToString().ToLower() == "to") { if ((token = lexer.ReadToken()) == null) { lexer.Error("missing second parameter"); return; } float.TryParse(token.ToString(), out tmp); parm.To = tmp; } else { lexer.UnreadToken = token; } } } else { parm.Table = (idDeclTable)idE.DeclManager.FindType(DeclType.Table, token.ToString(), false); } }
/// <summary> /// Used for initial loads, reloadModel, and reloading the data of purged models. /// </summary> /// <remarks> /// Upon exit, the model will absolutely be valid, but possibly as a default model. /// </remarks> public override void Load() { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } if (_purged == false) { Purge(); } _purged = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters); if (lexer.LoadFile(Name) == false) { MakeDefault(); return; } lexer.ExpectTokenString(VersionString); int version = lexer.ParseInt(); int count = 0; idToken token; if (version != Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, Version); } // // skip commandline // lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num joints lexer.ExpectTokenString("numJoints"); count = lexer.ParseInt(); _joints = new idMD5Joint[count]; _defaultPose = new idJointQuaternion[count]; idJointMatrix[] poseMat3 = new idJointMatrix[count]; // parse num meshes lexer.ExpectTokenString("numMeshes"); count = lexer.ParseInt(); if (count < 0) { lexer.Error("Invalid size: {0}", count); } _meshes = new idMD5Mesh[count]; // // parse joints // lexer.ExpectTokenString("joints"); lexer.ExpectTokenString("{"); int jointCount = _joints.Length; for (int i = 0; i < jointCount; i++) { idMD5Joint joint = _joints[i] = new idMD5Joint(); idJointQuaternion pose = new idJointQuaternion(); ParseJoint(lexer, joint, ref pose); poseMat3[i] = idJointMatrix.Zero; poseMat3[i].Rotation = Matrix.CreateFromQuaternion(pose.Quaternion); poseMat3[i].Translation = pose.Translation; if (joint.Parent != null) { int parentIndex = GetJointIndex(joint.Parent); pose.Quaternion = Quaternion.CreateFromRotationMatrix(poseMat3[i].ToMatrix() * Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); pose.Translation = Vector3.Transform(poseMat3[i].ToVector3() - poseMat3[parentIndex].ToVector3(), Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); } _defaultPose[i] = pose; } lexer.ExpectTokenString("}"); int meshCount = _meshes.Length; for (int i = 0; i < meshCount; i++) { lexer.ExpectTokenString("mesh"); _meshes[i] = new idMD5Mesh(); _meshes[i].Parse(lexer, poseMat3); } // // calculate the bounds of the model // CalculateBounds(poseMat3); // set the timestamp for reloadmodels idConsole.Warning("TODO: fileSystem->ReadFile( name, NULL, &timeStamp );"); }
public bool LoadAnimation(string fileName) { idToken token; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.NoStringConcatination); if (lexer.LoadFile(fileName) == false) { return(false); } Clear(); _name = fileName; lexer.ExpectTokenString(idRenderModel_MD5.VersionString); int version = lexer.ParseInt(); if (version != idRenderModel_MD5.Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, idRenderModel_MD5.Version); } // skip the commandline lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num frames lexer.ExpectTokenString("numFrames"); int frameCount = lexer.ParseInt(); if (frameCount <= 0) { lexer.Error("Invalid number of frames: {0}", frameCount); } // parse num joints lexer.ExpectTokenString("numJoints"); int jointCount = lexer.ParseInt(); if (jointCount <= 0) { lexer.Error("Invalid number of joints: {0}", jointCount); } // parse frame rate lexer.ExpectTokenString("frameRate"); _frameRate = lexer.ParseInt(); if (_frameRate < 0) { lexer.Error("Invalid frame rate: {0}", _frameRate); } // parse number of animated components lexer.ExpectTokenString("numAnimatedComponents"); _animatedComponentCount = lexer.ParseInt(); if ((_animatedComponentCount < 0) || (_animatedComponentCount > (jointCount * 6))) { lexer.Error("Invalid number of animated components: {0}", _animatedComponentCount); } // parse the hierarchy _jointInfo = new JointAnimationInfo[jointCount]; lexer.ExpectTokenString("hierarchy"); lexer.ExpectTokenString("{"); for (int i = 0; i < jointCount; i++) { token = lexer.ReadToken(); _jointInfo[i] = new JointAnimationInfo(); _jointInfo[i].NameIndex = idR.AnimManager.GetJointIndex(token.ToString()); // parse parent num _jointInfo[i].ParentIndex = lexer.ParseInt(); if (_jointInfo[i].ParentIndex >= i) { lexer.Error("Invalid parent num: {0}", _jointInfo[i].ParentIndex); } if ((i != 0) && (_jointInfo[i].ParentIndex < 0)) { lexer.Error("Animations may have only one root joint"); } // parse anim bits _jointInfo[i].AnimationBits = (AnimationBits)lexer.ParseInt(); if (((int)_jointInfo[i].AnimationBits & ~63) != 0) { lexer.Error("Invalid anim bits: {0}", _jointInfo[i].AnimationBits); } // parse first component _jointInfo[i].FirstComponent = lexer.ParseInt(); if ((_animatedComponentCount > 0) && ((_jointInfo[i].FirstComponent < 0) || (_jointInfo[i].FirstComponent >= _animatedComponentCount))) { lexer.Error("Invalid first component: {0}", _jointInfo[i].FirstComponent); } } lexer.ExpectTokenString("}"); // parse bounds lexer.ExpectTokenString("bounds"); lexer.ExpectTokenString("{"); _bounds = new idBounds[frameCount]; for (int i = 0; i < frameCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); _bounds[i] = new idBounds( new Vector3(tmp[0], tmp[1], tmp[2]), new Vector3(tmp2[0], tmp2[1], tmp2[2]) ); } lexer.ExpectTokenString("}"); // parse base frame _baseFrame = new idJointQuaternion[jointCount]; lexer.ExpectTokenString("baseframe"); lexer.ExpectTokenString("{"); for (int i = 0; i < jointCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); idCompressedQuaternion q = new idCompressedQuaternion(tmp2[0], tmp2[1], tmp2[2]); _baseFrame[i] = new idJointQuaternion(); _baseFrame[i].Translation = new Vector3(tmp[0], tmp[1], tmp[2]); _baseFrame[i].Quaternion = q.ToQuaternion(); } lexer.ExpectTokenString("}"); // parse frames _componentFrames = new float[_animatedComponentCount * frameCount]; int frameOffset = 0; for (int i = 0; i < frameCount; i++) { lexer.ExpectTokenString("frame"); int count = lexer.ParseInt(); if (count != i) { lexer.Error("Expected frame number {0}", i); } lexer.ExpectTokenString("{"); for (int j = 0; j < _animatedComponentCount; j++, frameOffset++) { _componentFrames[frameOffset] = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); } // get total move delta if (_animatedComponentCount == 0) { _totalDelta = Vector3.Zero; } else { int componentOffset = _jointInfo[0].FirstComponent; if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationX) == AnimationBits.TranslationX) { for (int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.X; } _totalDelta.X = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.X = 0; } if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationY) == AnimationBits.TranslationY) { for (int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Y; } _totalDelta.Y = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.Y = 0; } if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationZ) == AnimationBits.TranslationZ) { for (int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Z; } _totalDelta.Z = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; } else { _totalDelta.Z = 0; } } _baseFrame[0].Translation = Vector3.Zero; // we don't count last frame because it would cause a 1 frame pause at the end _animLength = ((frameCount - 1) * 1000 + _frameRate - 1) / _frameRate; // done return(true); }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "vertices") { ParseVertices(lexer, model); } else if (tokenLower == "edges") { ParseEdges(lexer, model); } else if (tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if (tokenLower == "polygons") { ParsePolygons(lexer, model); } else if (tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + * model->numEdges * sizeof(cm_edge_t) + * model->polygonMemory + * model->brushMemory + * model->numNodes * sizeof(cm_node_t) + * model->numPolygonRefs * sizeof(cm_polygonRef_t) + * model->numBrushRefs * sizeof(cm_brushRef_t);*/ return(true); }
private bool LoadCollisionModelFile(string name, ulong mapFileCRC) { // load it string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension); idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation); if (lexer.LoadFile(fileName) == false) { return(false); } idToken token; if (lexer.ExpectTokenString(TokenFileID) == false) { idConsole.Warning("{0} is not a CM file.", fileName); } else if (((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion)) { idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion); } else if ((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null) { idConsole.Warning("{0} has no map file CRC", fileName); } else { ulong crc = token.ToUInt64(); if ((mapFileCRC != 0) && (crc != mapFileCRC)) { idConsole.WriteLine("{0} is out of date", fileName); } else { // parse the file while (true) { if ((token = lexer.ReadToken()) == null) { break; } if (token.ToString().ToLower() == "collisionmodel") { if (ParseCollisionModel(lexer) == false) { return(false); } } else { lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token); } } return(true); } } return(false); }
public static idMapBrush ParseQ3(idLexer lexer, Vector3 origin) { int rotate; int[] shift = new int[2]; float[] scale = new float[2]; Vector3[] planePoints = new Vector3[3]; List <idMapBrushSide> sides = new List <idMapBrushSide>(); idMapBrushSide side; idToken token; do { if (lexer.CheckTokenString("}") == true) { break; } side = new idMapBrushSide(); sides.Add(side); // read the three point plane definition float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); float[] tmp3 = lexer.Parse1DMatrix(3); if ((tmp == null) || (tmp2 == null) || (tmp3 == null)) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side plane definition"); return(null); } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); // read the material token = lexer.ReadTokenOnLine(); if (token == null) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side material"); return(null); } // we have an implicit 'textures/' in the old format side.Material = "textures/" + token.ToString(); // read the texture shift, rotate and scale shift[0] = lexer.ParseInt(); shift[1] = lexer.ParseInt(); rotate = lexer.ParseInt(); scale[0] = lexer.ParseFloat(); scale[1] = lexer.ParseFloat(); side.TextureMatrix[0] = new Vector3(0.03125f, 0.0f, 0.0f); side.TextureMatrix[1] = new Vector3(0.0f, 0.03125f, 0.0f); side.Origin = origin; // Q2 allowed override of default flags and values, but we don't any more if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { } } } }while(true); idMapBrush brush = new idMapBrush(); for (int i = 0; i < sides.Count; i++) { brush.AddSide(sides[i]); } brush.Dict = new idDict(); return(brush); }
public static idMapBrush Parse(idLexer lexer, Vector3 origin, bool newFormat = true, float version = idMapFile.CurrentMapVersion) { idToken token; idMapBrushSide side; List <idMapBrushSide> sides = new List <idMapBrushSide>(); idDict dict = new idDict(); Vector3[] planePoints = new Vector3[3]; if (lexer.ExpectTokenString("{") == false) { return(null); } do { if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return(null); } if (token.ToString() == "}") { break; } // here we may have to jump over brush epairs ( only used in editor ) do { // if token is a brace if (token.ToString() == "(") { break; } // the token should be a key string for a key/value pair if (token.Type != TokenType.String) { lexer.Error("idMapBrush::Parse: unexpected {0}, expected ( or epair key string", token.ToString()); return(null); } string key = token.ToString(); if (((token = lexer.ReadTokenOnLine()) == null) || (token.Type != TokenType.String)) { lexer.Error("idMapBrush::Parse: expected epair value string not found"); return(null); } dict.Set(key, token.ToString()); // try to read the next key if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return(null); } }while(true); lexer.UnreadToken = token; side = new idMapBrushSide(); sides.Add(side); if (newFormat == true) { float[] tmp = lexer.Parse1DMatrix(4); if (tmp == null) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return(null); } else { side.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); } } else { // read the three point plane definition float[] tmp, tmp2, tmp3; if (((tmp = lexer.Parse1DMatrix(3)) == null) || ((tmp2 = lexer.Parse1DMatrix(3)) == null) || ((tmp3 = lexer.Parse1DMatrix(3)) == null)) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return(null); } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); } // read the texture matrix // this is odd, because the texmat is 2D relative to default planar texture axis float[,] tmp5 = lexer.Parse2DMatrix(2, 3); if (tmp5 == null) { lexer.Error("idMapBrush::Parse: unable to read brush side texture matrix"); return(null); } side.TextureMatrix[0] = new Vector3(tmp5[0, 0], tmp5[0, 1], tmp5[0, 2]); side.TextureMatrix[1] = new Vector3(tmp5[1, 0], tmp5[1, 1], tmp5[1, 2]); side.Origin = origin; // read the material if ((token = lexer.ReadTokenOnLine()) == null) { lexer.Error("idMapBrush::Parse: unable to read brush side material"); return(null); } // we had an implicit 'textures/' in the old format... if (version < 2.0f) { side.Material = "textures/" + token.ToString(); } else { side.Material = token.ToString(); } // Q2 allowed override of default flags and values, but we don't any more if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { } } } }while(true); if (lexer.ExpectTokenString("}") == false) { return(null); } idMapBrush brush = new idMapBrush(); foreach (idMapBrushSide s in sides) { brush.AddSide(s); } brush.Dict = dict; return(brush); }
/// <summary> /// Parses a variable length list of parms on one line. /// </summary> /// <param name="lexer"></param> /// <param name="parms"></param> /// <param name="maxParms"></param> private float[] ParseParams(idLexer lexer, int maxParms) { idToken token; List<float> parms = new List<float>(); int count = 0; float tmp; while(true) { if((token = lexer.ReadToken()) == null) { break; } else if(count == maxParms) { lexer.Error("too many parms on line"); break; } else { token.StripQuotes(); float.TryParse(token.ToString(), out tmp); parms.Add(tmp); count++; } } return parms.ToArray(); }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if (lexer.ExpectTokenString("{") == false) { return(null); } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if (token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return(null); } // Parse it float[] info; if (patchDef3 == true) { info = lexer.Parse1DMatrix(7); if (info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return(null); } } else { info = lexer.Parse1DMatrix(5); if (info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return(null); } } idMapPatch patch = new idMapPatch((int)info[0], (int)info[1]); if (version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if (patchDef3 == true) { patch.HorizontalSubdivisions = (int)info[2]; patch.VerticalSubdivisions = (int)info[3]; patch.ExplicitlySubdivided = true; } if ((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return(null); } // these were written out in the wrong order, IMHO if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return(null); } for (int j = 0; j < patch.Width; j++) { if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return(null); } for (int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if (v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return(null); } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return(null); } } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure"); return(null); } // read any key/value pairs while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if (token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return(patch); }
private bool ParseAnimation(idLexer lexer, int defaultAnimCount) { List <idMD5Anim> md5anims = new List <idMD5Anim>(); idMD5Anim md5anim; idAnim anim; AnimationFlags flags = new AnimationFlags(); idToken token; idToken realName = lexer.ReadToken(); if (realName == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string alias = realName.ToString(); int i; int count = _anims.Count; for (i = 0; i < count; i++) { if (_anims[i].FullName.Equals(alias, StringComparison.OrdinalIgnoreCase) == true) { break; } } if ((i < count) && (i >= defaultAnimCount)) { lexer.Warning("Duplicate anim '{0}'", realName); MakeDefault(); return(false); } if (i < defaultAnimCount) { anim = _anims[i]; } else { // create the alias associated with this animation anim = new idAnim(); _anims.Add(anim); } // random anims end with a number. find the numeric suffix of the animation. int len = alias.Length; for (i = len - 1; i > 0; i--) { if (Char.IsNumber(alias[i]) == false) { break; } } // check for zero length name, or a purely numeric name if (i <= 0) { lexer.Warning("Invalid animation name '{0}'", alias); MakeDefault(); return(false); } // remove the numeric suffix alias = alias.Substring(0, i + 1); // parse the anims from the string do { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } // lookup the animation md5anim = idR.AnimManager.GetAnimation(token.ToString()); if (md5anim == null) { lexer.Warning("Couldn't load anim '{0}'", token); return(false); } md5anim.CheckModelHierarchy(_model); if (md5anims.Count > 0) { // make sure it's the same length as the other anims if (md5anim.Length != md5anims[0].Length) { lexer.Warning("Anim '{0}' does not match length of anim '{1}'", md5anim.Name, md5anims[0].Name); MakeDefault(); return(false); } } // add it to our list md5anims.Add(md5anim); }while(lexer.CheckTokenString(",") == true); if (md5anims.Count == 0) { lexer.Warning("No animation specified"); MakeDefault(); return(false); } anim.SetAnimation(this, realName.ToString(), alias, md5anims.ToArray()); // parse any frame commands or animflags if (lexer.CheckTokenString("{") == true) { while (true) { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string tokenValue = token.ToString(); if (tokenValue == "}") { break; } else if (tokenValue == "prevent_idle_override") { flags.PreventIdleOverride = true; } else if (tokenValue == "random_cycle_start") { flags.RandomCycleStart = true; } else if (tokenValue == "ai_no_turn") { flags.AINoTurn = true; } else if (tokenValue == "anim_turn") { flags.AnimationTurn = true; } else if (tokenValue == "frame") { // create a frame command int frameIndex; string err; // make sure we don't have any line breaks while reading the frame command so the error line # will be correct if ((token = lexer.ReadTokenOnLine()) == null) { lexer.Warning("Missing frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type == TokenType.Punctuation) && (token.ToString() == "-")) { lexer.Warning("Invalid frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type != TokenType.Number) || (token.SubType == TokenSubType.Float)) { lexer.Error("expected integer value, found '{0}'", token); } // get the frame number frameIndex = token.ToInt32(); // put the command on the specified frame of the animation if ((err = anim.AddFrameCommand(this, frameIndex, lexer, null)) != null) { lexer.Warning(err.ToString()); MakeDefault(); return(false); } } else { lexer.Warning("Unknown command '{0}'", token); MakeDefault(); return(false); } } } // set the flags anim.Flags = flags; return(true); }
private idParticleStage ParseParticleStage(idLexer lexer) { idToken token; string tokenLower; idParticleStage stage = new idParticleStage(); stage.Default(); while (true) { if (lexer.HadError == true) { break; } else if ((token = lexer.ReadToken()) == null) { break; } else { tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "material") { token = lexer.ReadToken(); stage.Material = idE.DeclManager.FindMaterial(token.ToString()); } else if (tokenLower == "count") { stage.TotalParticles = lexer.ParseInt(); } else if (tokenLower == "time") { stage.ParticleLife = lexer.ParseFloat(); } else if (tokenLower == "cycles") { stage.Cycles = lexer.ParseFloat(); } else if (tokenLower == "timeoffset") { stage.TimeOffset = lexer.ParseFloat(); } else if (tokenLower == "deadtime") { stage.DeadTime = lexer.ParseFloat(); } else if (tokenLower == "randomdistribution") { stage.RandomDistribution = lexer.ParseBool(); } else if (tokenLower == "bunching") { stage.SpawnBunching = lexer.ParseFloat(); } else if (tokenLower == "distribution") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "rect") { stage.Distribution = ParticleDistribution.Rectangle; } else if (tokenLower == "cyclinder") { stage.Distribution = ParticleDistribution.Cyclinder; } else if (tokenLower == "sphere") { stage.Distribution = ParticleDistribution.Sphere; } else { lexer.Error("bad distribution type: {0}", token.ToString()); } stage.DistributionParameters = ParseParams(lexer, stage.DistributionParameters.Length); } else if (tokenLower == "direction") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "cone") { stage.Direction = ParticleDirection.Cone; } else if (tokenLower == "outward") { stage.Direction = ParticleDirection.Outward; } else { lexer.Error("bad direction type: {0}", token.ToString()); } stage.DirectionParameters = ParseParams(lexer, stage.DirectionParameters.Length); } else if (tokenLower == "orientation") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "view") { stage.Orientation = ParticleOrientation.View; } else if (tokenLower == "aimed") { stage.Orientation = ParticleOrientation.Aimed; } else if (tokenLower == "x") { stage.Orientation = ParticleOrientation.X; } else if (tokenLower == "y") { stage.Orientation = ParticleOrientation.Y; } else if (tokenLower == "z") { stage.Orientation = ParticleOrientation.Z; } else { lexer.Error("bad orientation type: {0}", token.ToString()); } stage.OrientationParameters = ParseParams(lexer, stage.OrientationParameters.Length); } else if (tokenLower == "custompath") { token = lexer.ReadToken(); tokenLower = tokenLower.ToLower().ToLower(); if (tokenLower == "standard") { stage.CustomPath = ParticleCustomPath.Standard; } else if (tokenLower == "helix") { stage.CustomPath = ParticleCustomPath.Helix; } else if (tokenLower == "flies") { stage.CustomPath = ParticleCustomPath.Flies; } else if (tokenLower == "spherical") { stage.CustomPath = ParticleCustomPath.Orbit; } else { lexer.Error("bad path type: {0}", token.ToString()); } stage.CustomPathParameters = ParseParams(lexer, stage.CustomPathParameters.Length); } else if (tokenLower == "speed") { ParseParametric(lexer, stage.Speed); } else if (tokenLower == "rotation") { ParseParametric(lexer, stage.RotationSpeed); } else if (tokenLower == "angle") { stage.InitialAngle = lexer.ParseFloat(); } else if (tokenLower == "entitycolor") { stage.EntityColor = lexer.ParseBool(); } else if (tokenLower == "size") { ParseParametric(lexer, stage.Size); } else if (tokenLower == "aspect") { ParseParametric(lexer, stage.Aspect); } else if (tokenLower == "fadein") { stage.FadeInFraction = lexer.ParseFloat(); } else if (tokenLower == "fadeout") { stage.FadeOutFraction = lexer.ParseFloat(); } else if (tokenLower == "fadeindex") { stage.FadeIndexFraction = lexer.ParseFloat(); } else if (tokenLower == "color") { stage.Color = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "fadecolor") { stage.FadeColor = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "offset") { stage.Offset = new Vector3(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "animationframes") { stage.AnimationFrames = lexer.ParseInt(); } else if (tokenLower == "animationrate") { stage.AnimationRate = lexer.ParseFloat(); } else if (tokenLower == "boundsexpansion") { stage.BoundsExpansion = lexer.ParseFloat(); } else if (tokenLower == "gravity") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "world") { stage.WorldGravity = true; } else { lexer.UnreadToken = token; } stage.Gravity = lexer.ParseFloat(); } else { lexer.Error("unknown token {0}", token.ToString()); } } } // derive values. stage.CycleTime = (int)(stage.ParticleLife + stage.DeadTime) * 1000; return(stage); }
private idParticleStage ParseParticleStage(idLexer lexer) { idToken token; string tokenLower; idParticleStage stage = new idParticleStage(); stage.Default(); while(true) { if(lexer.HadError == true) { break; } else if((token = lexer.ReadToken()) == null) { break; } else { tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "material") { token = lexer.ReadToken(); stage.Material = idE.DeclManager.FindMaterial(token.ToString()); } else if(tokenLower == "count") { stage.TotalParticles = lexer.ParseInt(); } else if(tokenLower == "time") { stage.ParticleLife = lexer.ParseFloat(); } else if(tokenLower == "cycles") { stage.Cycles = lexer.ParseFloat(); } else if(tokenLower == "timeoffset") { stage.TimeOffset = lexer.ParseFloat(); } else if(tokenLower == "deadtime") { stage.DeadTime = lexer.ParseFloat(); } else if(tokenLower == "randomdistribution") { stage.RandomDistribution = lexer.ParseBool(); } else if(tokenLower == "bunching") { stage.SpawnBunching = lexer.ParseFloat(); } else if(tokenLower == "distribution") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "rect") { stage.Distribution = ParticleDistribution.Rectangle; } else if(tokenLower == "cyclinder") { stage.Distribution = ParticleDistribution.Cyclinder; } else if(tokenLower == "sphere") { stage.Distribution = ParticleDistribution.Sphere; } else { lexer.Error("bad distribution type: {0}", token.ToString()); } stage.DistributionParameters = ParseParams(lexer, stage.DistributionParameters.Length); } else if(tokenLower == "direction") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "cone") { stage.Direction = ParticleDirection.Cone; } else if(tokenLower == "outward") { stage.Direction = ParticleDirection.Outward; } else { lexer.Error("bad direction type: {0}", token.ToString()); } stage.DirectionParameters = ParseParams(lexer, stage.DirectionParameters.Length); } else if(tokenLower == "orientation") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "view") { stage.Orientation = ParticleOrientation.View; } else if(tokenLower == "aimed") { stage.Orientation = ParticleOrientation.Aimed; } else if(tokenLower == "x") { stage.Orientation = ParticleOrientation.X; } else if(tokenLower == "y") { stage.Orientation = ParticleOrientation.Y; } else if(tokenLower == "z") { stage.Orientation = ParticleOrientation.Z; } else { lexer.Error("bad orientation type: {0}", token.ToString()); } stage.OrientationParameters = ParseParams(lexer, stage.OrientationParameters.Length); } else if(tokenLower == "custompath") { token = lexer.ReadToken(); tokenLower = tokenLower.ToLower().ToLower(); if(tokenLower == "standard") { stage.CustomPath = ParticleCustomPath.Standard; } else if(tokenLower == "helix") { stage.CustomPath = ParticleCustomPath.Helix; } else if(tokenLower == "flies") { stage.CustomPath = ParticleCustomPath.Flies; } else if(tokenLower == "spherical") { stage.CustomPath = ParticleCustomPath.Orbit; } else { lexer.Error("bad path type: {0}", token.ToString()); } stage.CustomPathParameters = ParseParams(lexer, stage.CustomPathParameters.Length); } else if(tokenLower == "speed") { ParseParametric(lexer, stage.Speed); } else if(tokenLower == "rotation") { ParseParametric(lexer, stage.RotationSpeed); } else if(tokenLower == "angle") { stage.InitialAngle = lexer.ParseFloat(); } else if(tokenLower == "entitycolor") { stage.EntityColor = lexer.ParseBool(); } else if(tokenLower == "size") { ParseParametric(lexer, stage.Size); } else if(tokenLower == "aspect") { ParseParametric(lexer, stage.Aspect); } else if(tokenLower == "fadein") { stage.FadeInFraction = lexer.ParseFloat(); } else if(tokenLower == "fadeout") { stage.FadeOutFraction = lexer.ParseFloat(); } else if(tokenLower == "fadeindex") { stage.FadeIndexFraction = lexer.ParseFloat(); } else if(tokenLower == "color") { stage.Color = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "fadecolor") { stage.FadeColor = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "offset") { stage.Offset = new Vector3(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "animationframes") { stage.AnimationFrames = lexer.ParseInt(); } else if(tokenLower == "animationrate") { stage.AnimationRate = lexer.ParseFloat(); } else if(tokenLower == "boundsexpansion") { stage.BoundsExpansion = lexer.ParseFloat(); } else if(tokenLower == "gravity") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "world") { stage.WorldGravity = true; } else { lexer.UnreadToken = token; } stage.Gravity = lexer.ParseFloat(); } else { lexer.Error("unknown token {0}", token.ToString()); } } } // derive values. stage.CycleTime = (int) (stage.ParticleLife + stage.DeadTime) * 1000; return stage; }
public void Parse(idLexer lexer, idJointMatrix[] joints) { lexer.ExpectTokenString("{"); // // parse name // if (lexer.CheckTokenString("name") == true) { lexer.ReadToken(); } // // parse shader // lexer.ExpectTokenString("shader"); idToken token = lexer.ReadToken(); string materialName = token.ToString(); _material = idE.DeclManager.FindMaterial(materialName); // // parse texture coordinates // lexer.ExpectTokenString("numverts"); int count = lexer.ParseInt(); if (count < 0) { lexer.Error("Invalid size: {0}", token.ToString()); } _texCoords = new Vector2[count]; int[] firstWeightForVertex = new int[count]; int[] weightCountForVertex = new int[count]; int maxWeight = 0; int coordCount = _texCoords.Length; _weightCount = 0; for (int i = 0; i < coordCount; i++) { lexer.ExpectTokenString("vert"); lexer.ParseInt(); float[] tmp = lexer.Parse1DMatrix(2); _texCoords[i] = new Vector2(tmp[0], tmp[1]); firstWeightForVertex[i] = lexer.ParseInt(); weightCountForVertex[i] = lexer.ParseInt(); if (weightCountForVertex[i] == 0) { lexer.Error("Vertex without any joint weights."); } _weightCount += weightCountForVertex[i]; if ((weightCountForVertex[i] + firstWeightForVertex[i]) > maxWeight) { maxWeight = weightCountForVertex[i] + firstWeightForVertex[i]; } } // // parse tris // lexer.ExpectTokenString("numtris"); _triangleCount = lexer.ParseInt(); if (_triangleCount < 0) { lexer.Error("Invalid size: {0}", _triangleCount); } int[] tris = new int[_triangleCount * 3]; for (int i = 0; i < _triangleCount; i++) { lexer.ExpectTokenString("tri"); lexer.ParseInt(); tris[i * 3 + 0] = lexer.ParseInt(); tris[i * 3 + 1] = lexer.ParseInt(); tris[i * 3 + 2] = lexer.ParseInt(); } // // parse weights // lexer.ExpectTokenString("numweights"); count = lexer.ParseInt(); if (count < 0) { lexer.Error("Invalid size: {0}", count); } if (maxWeight > count) { lexer.Warning("Vertices reference out of range weights in model ({0} of {1} weights).", maxWeight, count); } VertexWeight[] tempWeights = new VertexWeight[count]; for (int i = 0; i < count; i++) { lexer.ExpectTokenString("weight"); lexer.ParseInt(); int jointIndex = lexer.ParseInt(); if ((jointIndex < 0) || (jointIndex >= joints.Length)) { lexer.Error("Joint index out of range({0}): {1}", joints.Length, jointIndex); } tempWeights[i].JointIndex = jointIndex; tempWeights[i].JointWeight = lexer.ParseFloat(); float[] tmp = lexer.Parse1DMatrix(3); tempWeights[i].Offset = new Vector3(tmp[0], tmp[1], tmp[2]); } // create pre-scaled weights and an index for the vertex/joint lookup _scaledWeights = new Vector4[_weightCount]; _weightIndex = new int[_weightCount * 2]; count = 0; coordCount = _texCoords.Length; for (int i = 0; i < coordCount; i++) { int num = firstWeightForVertex[i]; int weightCount = weightCountForVertex[i]; for (int j = 0; j < weightCount; j++, num++, count++) { Vector3 tmp = tempWeights[num].Offset * tempWeights[num].JointWeight; _scaledWeights[count].X = tmp.X; _scaledWeights[count].Y = tmp.Y; _scaledWeights[count].Z = tmp.Z; _scaledWeights[count].W = tempWeights[num].JointWeight; _weightIndex[count * 2 + 0] = tempWeights[num].JointIndex; } _weightIndex[count * 2 - 1] = 1; } lexer.ExpectTokenString("}"); // update counters idConsole.Warning("TODO: idRenderModel_MD5 update counters"); /*c_numVerts += texCoords.Num(); * c_numWeights += numWeights; * c_numWeightJoints++; * for ( i = 0; i < numWeights; i++ ) { * c_numWeightJoints += weightIndex[i*2+1]; * }*/ // // build the information that will be common to all animations of this mesh: // silhouette edge connectivity and normal / tangent generation information // Vertex[] verts = new Vertex[_texCoords.Length]; int vertCount = verts.Length; for (int i = 0; i < vertCount; i++) { verts[i].TextureCoordinates = _texCoords[i]; } TransformVertices(verts, joints); idConsole.Warning("TODO: idMD5Mesh Deform"); //_deformInfo = idE.RenderSystem.BuildDeformInformation(verts, tris, _material.UseUnsmoothedTangents); }
public static idMapEntity Parse(idLexer lexer, bool isWordSpawn = false, float version = idMapFile.CurrentMapVersion) { idToken token; if ((token = lexer.ReadToken()) == null) { return(null); } if (token.ToString() != "{") { lexer.Error("idMapEntity.Parse: {{ not found, found {0}", token.ToString()); return(null); } idMapEntity mapEnt = new idMapEntity(); idMapBrush mapBrush = null; idMapPatch mapPatch = null; Vector3 origin = Vector3.Zero; bool worldEnt = false; string tokenValue; do { if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: EOF without closing brace"); return(null); } if (token.ToString() == "}") { break; } if (token.ToString() == "{") { // parse a brush or patch if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: unexpected EOF"); return(null); } if (worldEnt == true) { origin = Vector3.Zero; } tokenValue = token.ToString(); // if is it a brush: brush, brushDef, brushDef2, brushDef3 if (tokenValue.StartsWith("brush", StringComparison.OrdinalIgnoreCase) == true) { mapBrush = idMapBrush.Parse(lexer, origin, (tokenValue.Equals("brushDef2", StringComparison.OrdinalIgnoreCase) || tokenValue.Equals("brushDef3", StringComparison.OrdinalIgnoreCase)), version); if (mapBrush == null) { return(null); } mapEnt.AddPrimitive(mapBrush); } // if is it a patch: patchDef2, patchDef3 else if (tokenValue.StartsWith("patch", StringComparison.OrdinalIgnoreCase) == true) { mapPatch = idMapPatch.Parse(lexer, origin, tokenValue.Equals("patchDef3", StringComparison.OrdinalIgnoreCase), version); if (mapPatch == null) { return(null); } mapEnt.AddPrimitive(mapPatch); } // assume it's a brush in Q3 or older style else { lexer.UnreadToken = token; mapBrush = idMapBrush.ParseQ3(lexer, origin); if (mapBrush == null) { return(null); } mapEnt.AddPrimitive(mapBrush); } } else { // parse a key / value pair string key = token.ToString(); token = lexer.ReadTokenOnLine(); string value = token.ToString(); // strip trailing spaces that sometimes get accidentally added in the editor value = value.Trim(); key = key.Trim(); mapEnt.Dict.Set(key, value); if (key.Equals("origin", StringComparison.OrdinalIgnoreCase) == true) { // scanf into doubles, then assign, so it is idVec size independent string[] parts = value.Split(' '); float.TryParse(parts[0], out origin.X); float.TryParse(parts[1], out origin.Y); float.TryParse(parts[2], out origin.Z); } else if ((key.Equals("classname", StringComparison.OrdinalIgnoreCase) == true) && (value.Equals("worldspawn", StringComparison.OrdinalIgnoreCase) == true)) { worldEnt = true; } } }while(true); return(mapEnt); }
private void ParseParametric(idLexer lexer, idParticleParameter parm) { idToken token; if((token = lexer.ReadToken()) == null) { lexer.Error("not enough parameters"); return; } if(token.IsNumeric == true) { // can have a to + 2nd parm. float tmp; float.TryParse(token.ToString(), out tmp); parm.From = tmp; parm.To = tmp; if((token = lexer.ReadToken()) != null) { if(token.ToString().ToLower() == "to") { if((token = lexer.ReadToken()) == null) { lexer.Error("missing second parameter"); return; } float.TryParse(token.ToString(), out tmp); parm.To = tmp; } else { lexer.UnreadToken = token; } } } else { parm.Table = (idDeclTable) idE.DeclManager.FindType(DeclType.Table, token.ToString(), false); } }