private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for (int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if (token.Type == TokenType.Number) { b.Contents = (ContentFlags)token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for (int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
public void Parse(idLexer lexer, idJointMatrix[] joints) { lexer.ExpectTokenString("{"); // // parse name // if (lexer.CheckTokenString("name") == true) { lexer.ReadToken(); } // // parse shader // lexer.ExpectTokenString("shader"); idToken token = lexer.ReadToken(); string materialName = token.ToString(); _material = idE.DeclManager.FindMaterial(materialName); // // parse texture coordinates // lexer.ExpectTokenString("numverts"); int count = lexer.ParseInt(); if (count < 0) { lexer.Error("Invalid size: {0}", token.ToString()); } _texCoords = new Vector2[count]; int[] firstWeightForVertex = new int[count]; int[] weightCountForVertex = new int[count]; int maxWeight = 0; int coordCount = _texCoords.Length; _weightCount = 0; for (int i = 0; i < coordCount; i++) { lexer.ExpectTokenString("vert"); lexer.ParseInt(); float[] tmp = lexer.Parse1DMatrix(2); _texCoords[i] = new Vector2(tmp[0], tmp[1]); firstWeightForVertex[i] = lexer.ParseInt(); weightCountForVertex[i] = lexer.ParseInt(); if (weightCountForVertex[i] == 0) { lexer.Error("Vertex without any joint weights."); } _weightCount += weightCountForVertex[i]; if ((weightCountForVertex[i] + firstWeightForVertex[i]) > maxWeight) { maxWeight = weightCountForVertex[i] + firstWeightForVertex[i]; } } // // parse tris // lexer.ExpectTokenString("numtris"); _triangleCount = lexer.ParseInt(); if (_triangleCount < 0) { lexer.Error("Invalid size: {0}", _triangleCount); } int[] tris = new int[_triangleCount * 3]; for (int i = 0; i < _triangleCount; i++) { lexer.ExpectTokenString("tri"); lexer.ParseInt(); tris[i * 3 + 0] = lexer.ParseInt(); tris[i * 3 + 1] = lexer.ParseInt(); tris[i * 3 + 2] = lexer.ParseInt(); } // // parse weights // lexer.ExpectTokenString("numweights"); count = lexer.ParseInt(); if (count < 0) { lexer.Error("Invalid size: {0}", count); } if (maxWeight > count) { lexer.Warning("Vertices reference out of range weights in model ({0} of {1} weights).", maxWeight, count); } VertexWeight[] tempWeights = new VertexWeight[count]; for (int i = 0; i < count; i++) { lexer.ExpectTokenString("weight"); lexer.ParseInt(); int jointIndex = lexer.ParseInt(); if ((jointIndex < 0) || (jointIndex >= joints.Length)) { lexer.Error("Joint index out of range({0}): {1}", joints.Length, jointIndex); } tempWeights[i].JointIndex = jointIndex; tempWeights[i].JointWeight = lexer.ParseFloat(); float[] tmp = lexer.Parse1DMatrix(3); tempWeights[i].Offset = new Vector3(tmp[0], tmp[1], tmp[2]); } // create pre-scaled weights and an index for the vertex/joint lookup _scaledWeights = new Vector4[_weightCount]; _weightIndex = new int[_weightCount * 2]; count = 0; coordCount = _texCoords.Length; for (int i = 0; i < coordCount; i++) { int num = firstWeightForVertex[i]; int weightCount = weightCountForVertex[i]; for (int j = 0; j < weightCount; j++, num++, count++) { Vector3 tmp = tempWeights[num].Offset * tempWeights[num].JointWeight; _scaledWeights[count].X = tmp.X; _scaledWeights[count].Y = tmp.Y; _scaledWeights[count].Z = tmp.Z; _scaledWeights[count].W = tempWeights[num].JointWeight; _weightIndex[count * 2 + 0] = tempWeights[num].JointIndex; } _weightIndex[count * 2 - 1] = 1; } lexer.ExpectTokenString("}"); // update counters idConsole.Warning("TODO: idRenderModel_MD5 update counters"); /*c_numVerts += texCoords.Num(); * c_numWeights += numWeights; * c_numWeightJoints++; * for ( i = 0; i < numWeights; i++ ) { * c_numWeightJoints += weightIndex[i*2+1]; * }*/ // // build the information that will be common to all animations of this mesh: // silhouette edge connectivity and normal / tangent generation information // Vertex[] verts = new Vertex[_texCoords.Length]; int vertCount = verts.Length; for (int i = 0; i < vertCount; i++) { verts[i].TextureCoordinates = _texCoords[i]; } TransformVertices(verts, joints); idConsole.Warning("TODO: idMD5Mesh Deform"); //_deformInfo = idE.RenderSystem.BuildDeformInformation(verts, tris, _material.UseUnsmoothedTangents); }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "vertices") { ParseVertices(lexer, model); } else if (tokenLower == "edges") { ParseEdges(lexer, model); } else if (tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if (tokenLower == "polygons") { ParsePolygons(lexer, model); } else if (tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + * model->numEdges * sizeof(cm_edge_t) + * model->polygonMemory + * model->brushMemory + * model->numNodes * sizeof(cm_node_t) + * model->numPolygonRefs * sizeof(cm_polygonRef_t) + * model->numBrushRefs * sizeof(cm_brushRef_t);*/ return(true); }
public static idMapBrush ParseQ3(idLexer lexer, Vector3 origin) { int rotate; int[] shift = new int[2]; float[] scale = new float[2]; Vector3[] planePoints = new Vector3[3]; List <idMapBrushSide> sides = new List <idMapBrushSide>(); idMapBrushSide side; idToken token; do { if (lexer.CheckTokenString("}") == true) { break; } side = new idMapBrushSide(); sides.Add(side); // read the three point plane definition float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); float[] tmp3 = lexer.Parse1DMatrix(3); if ((tmp == null) || (tmp2 == null) || (tmp3 == null)) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side plane definition"); return(null); } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); // read the material token = lexer.ReadTokenOnLine(); if (token == null) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side material"); return(null); } // we have an implicit 'textures/' in the old format side.Material = "textures/" + token.ToString(); // read the texture shift, rotate and scale shift[0] = lexer.ParseInt(); shift[1] = lexer.ParseInt(); rotate = lexer.ParseInt(); scale[0] = lexer.ParseFloat(); scale[1] = lexer.ParseFloat(); side.TextureMatrix[0] = new Vector3(0.03125f, 0.0f, 0.0f); side.TextureMatrix[1] = new Vector3(0.0f, 0.03125f, 0.0f); side.Origin = origin; // Q2 allowed override of default flags and values, but we don't any more if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { if (lexer.ReadTokenOnLine() != null) { } } } }while(true); idMapBrush brush = new idMapBrush(); for (int i = 0; i < sides.Count; i++) { brush.AddSide(sides[i]); } brush.Dict = new idDict(); return(brush); }
/// <summary> /// /// </summary> /// <remarks> /// Normally this will use a .reg file instead of a .map file if it exists, /// which is what the game and dmap want, but the editor will want to always /// load a .map file. /// </remarks> /// <param name="fileName">Does not require an extension.</param> /// <param name="ignoreRegion"></param> /// <param name="osPath"></param> /// <returns></returns> public bool Parse(string fileName, bool ignoreRegion = false, bool osPath = false) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } _hasPrimitiveData = false; _name = Path.Combine(Path.GetDirectoryName(fileName), Path.GetFileNameWithoutExtension(fileName)); string fullName = _name; // no string concatenation for epairs and allow path names for materials idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); idMapEntity mapEnt; if (ignoreRegion == false) { // try loading a .reg file first lexer.LoadFile(fullName + ".reg", osPath); } if (lexer.IsLoaded == false) { // now try a .map file lexer.LoadFile(fullName + ".map", osPath); if (lexer.IsLoaded == false) { // didn't get anything at all return(false); } } _version = idMapFile.OldMapVersion; _fileTime = lexer.FileTime; _entities.Clear(); if (lexer.CheckTokenString("Version") == true) { _version = lexer.ReadTokenOnLine().ToFloat(); } while (true) { if ((mapEnt = idMapEntity.Parse(lexer, (_entities.Count == 0), _version)) == null) { break; } _entities.Add(mapEnt); } idConsole.Warning("TODO: SetGeometryCRC();"); // if the map has a worldspawn if (_entities.Count > 0) { // "removeEntities" "classname" can be set in the worldspawn to remove all entities with the given classname foreach (KeyValuePair <string, string> removeEntities in _entities[0].Dict.MatchPrefix("removeEntities")) { RemoveEntities(removeEntities.Value); } // "overrideMaterial" "material" can be set in the worldspawn to reset all materials string material; int entityCount = _entities.Count; int primitiveCount = 0; int sideCount = 0; if ((material = (_entities[0].Dict.GetString("overrideMaterial", ""))) != string.Empty) { for (int i = 0; i < entityCount; i++) { mapEnt = _entities[i]; primitiveCount = mapEnt.Primitives.Count; for (int j = 0; j < primitiveCount; j++) { idMapPrimitive mapPrimitive = mapEnt.GetPrimitive(j); switch (mapPrimitive.Type) { case MapPrimitiveType.Brush: idMapBrush mapBrush = (idMapBrush)mapPrimitive; sideCount = mapBrush.SideCount; for (int k = 0; k < sideCount; k++) { mapBrush.GetSide(k).Material = material; } break; case MapPrimitiveType.Patch: idConsole.Warning("TODO: PATCH"); // TODO: ((idMapPatch) mapPrimitive).Material = material; break; } } } } // force all entities to have a name key/value pair if (_entities[0].Dict.GetBool("forceEntityNames") == true) { for (int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if (mapEnt.Dict.ContainsKey("name") == false) { mapEnt.Dict.Set("name", string.Format("{0}{1}", mapEnt.Dict.GetString("classname", "forcedName"), i)); } } } // move the primitives of any func_group entities to the worldspawn if (_entities[0].Dict.GetBool("moveFuncGroups") == true) { for (int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if (mapEnt.Dict.GetString("classname").ToLower() == "func_group") { _entities[0].Primitives.AddRange(mapEnt.Primitives); mapEnt.Primitives.Clear(); } } } } _hasPrimitiveData = true; return(true); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); int defaultAnimationCount = 0; idToken token; idToken token2; string tokenValue; string fileName; string extension; int count; idMD5Joint[] md5Joints; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if (tokenValue == "}") { break; } if (tokenValue == "inherit") { idConsole.WriteLine("TODO: inherit"); /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * * const idDeclModelDef *copy = static_cast<const idDeclModelDef *>( declManager->FindType( DECL_MODELDEF, token2, false ) ); * if ( !copy ) { * common->Warning( "Unknown model definition '%s'", token2.c_str() ); * } else if ( copy->GetState() == DS_DEFAULTED ) { * common->Warning( "inherited model definition '%s' defaulted", token2.c_str() ); * MakeDefault(); * return false; * } else { * CopyDecl( copy ); * numDefaultAnims = anims.Num(); * }*/ } else if (tokenValue == "skin") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } _skin = idE.DeclManager.FindSkin(token2.ToString()); if (_skin == null) { lexer.Warning("Skin '{0}' not found", token2.ToString()); MakeDefault(); return(false); } } else if (tokenValue == "mesh") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } fileName = token2.ToString(); extension = Path.GetExtension(fileName); if (extension != idRenderModel_MD5.MeshExtension) { lexer.Warning("Invalid model for MD5 mesh"); MakeDefault(); return(false); } _model = idE.RenderModelManager.FindModel(fileName); if (_model == null) { lexer.Warning("Model '{0}' not found", fileName); MakeDefault(); return(false); } else if (_model.IsDefault == true) { lexer.Warning("Model '{0}' defaulted", fileName); MakeDefault(); return(false); } // get the number of joints count = _model.JointCount; if (count == 0) { lexer.Warning("Model '{0}' has no joints", fileName); } // set up the joint hierarchy md5Joints = _model.Joints; _joints = new JointInfo[count]; _jointParents = new int[count]; _channelJoints = new int[(int)AnimationChannel.Count][]; _channelJoints[0] = new int[count]; for (int i = 0; i < count; i++) { _joints[i] = new JointInfo(); _joints[i].Channel = AnimationChannel.All; _joints[i].Index = i; if (md5Joints[i].Parent != null) { _joints[i].ParentIndex = _model.GetJointIndex(md5Joints[i].Parent); } else { _joints[i].ParentIndex = -1; } _jointParents[i] = _joints[i].ParentIndex; _channelJoints[0][i] = i; } } else if (tokenValue == "remove") { idConsole.Warning("TODO: remove"); // removes any anims whos name matches /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * num = 0; * for( i = 0; i < anims.Num(); i++ ) { * if ( ( token2 == anims[ i ]->Name() ) || ( token2 == anims[ i ]->FullName() ) ) { * delete anims[ i ]; * anims.RemoveIndex( i ); * if ( i >= numDefaultAnims ) { * src.Warning( "Anim '%s' was not inherited. Anim should be removed from the model def.", token2.c_str() ); * MakeDefault(); * return false; * } * i--; * numDefaultAnims--; * num++; * continue; * } * } * if ( !num ) { * src.Warning( "Couldn't find anim '%s' to remove", token2.c_str() ); * MakeDefault(); * return false; * }*/ } else if (tokenValue == "anim") { if (_model == null) { lexer.Warning("Must specify mesh before defining anims"); MakeDefault(); return(false); } else if (ParseAnimation(lexer, defaultAnimationCount) == false) { MakeDefault(); return(false); } } else if (tokenValue == "offset") { float[] tmp = lexer.Parse1DMatrix(3); if (tmp == null) { lexer.Warning("Expected vector following 'offset'"); MakeDefault(); return(false); } _offset = new Vector3(tmp[0], tmp[1], tmp[2]); } else if (tokenValue == "channel") { if (_model == null) { lexer.Warning("Must specify mesh before defining channels"); MakeDefault(); return(false); } // set the channel for a group of joints if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } if (lexer.CheckTokenString("(") == false) { lexer.Warning("Expected { after '{0}'", token2.ToString()); MakeDefault(); return(false); } int i; int channelCount = (int)AnimationChannel.Count; for (i = (int)AnimationChannel.All + 1; i < channelCount; i++) { if (ChannelNames[i].Equals(token2.ToString(), StringComparison.OrdinalIgnoreCase) == true) { break; } } if (i >= channelCount) { lexer.Warning("Unknown channel '{0}'", token2.ToString()); MakeDefault(); return(false); } int channel = i; StringBuilder jointNames = new StringBuilder(); string token2Value; while (lexer.CheckTokenString(")") == false) { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } token2Value = token2.ToString(); jointNames.Append(token2Value); if ((token2Value != "*") && (token2Value != "-")) { jointNames.Append(" "); } } int[] jointList = GetJointList(jointNames.ToString()); int jointLength = jointList.Length; List <int> channelJoints = new List <int>(); for (count = i = 0; i < jointLength; i++) { int jointIndex = jointList[i]; if (_joints[jointIndex].Channel != AnimationChannel.All) { lexer.Warning("Join '{0}' assigned to multiple channels", _model.GetJointName(jointIndex)); continue; } _joints[jointIndex].Channel = (AnimationChannel)channel; channelJoints.Add(jointIndex); } _channelJoints[channel] = channelJoints.ToArray(); } else { lexer.Warning("unknown token '{0}'", token.ToString()); MakeDefault(); return(false); } } return(true); }
private bool ParseAnimation(idLexer lexer, int defaultAnimCount) { List <idMD5Anim> md5anims = new List <idMD5Anim>(); idMD5Anim md5anim; idAnim anim; AnimationFlags flags = new AnimationFlags(); idToken token; idToken realName = lexer.ReadToken(); if (realName == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string alias = realName.ToString(); int i; int count = _anims.Count; for (i = 0; i < count; i++) { if (_anims[i].FullName.Equals(alias, StringComparison.OrdinalIgnoreCase) == true) { break; } } if ((i < count) && (i >= defaultAnimCount)) { lexer.Warning("Duplicate anim '{0}'", realName); MakeDefault(); return(false); } if (i < defaultAnimCount) { anim = _anims[i]; } else { // create the alias associated with this animation anim = new idAnim(); _anims.Add(anim); } // random anims end with a number. find the numeric suffix of the animation. int len = alias.Length; for (i = len - 1; i > 0; i--) { if (Char.IsNumber(alias[i]) == false) { break; } } // check for zero length name, or a purely numeric name if (i <= 0) { lexer.Warning("Invalid animation name '{0}'", alias); MakeDefault(); return(false); } // remove the numeric suffix alias = alias.Substring(0, i + 1); // parse the anims from the string do { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } // lookup the animation md5anim = idR.AnimManager.GetAnimation(token.ToString()); if (md5anim == null) { lexer.Warning("Couldn't load anim '{0}'", token); return(false); } md5anim.CheckModelHierarchy(_model); if (md5anims.Count > 0) { // make sure it's the same length as the other anims if (md5anim.Length != md5anims[0].Length) { lexer.Warning("Anim '{0}' does not match length of anim '{1}'", md5anim.Name, md5anims[0].Name); MakeDefault(); return(false); } } // add it to our list md5anims.Add(md5anim); }while(lexer.CheckTokenString(",") == true); if (md5anims.Count == 0) { lexer.Warning("No animation specified"); MakeDefault(); return(false); } anim.SetAnimation(this, realName.ToString(), alias, md5anims.ToArray()); // parse any frame commands or animflags if (lexer.CheckTokenString("{") == true) { while (true) { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string tokenValue = token.ToString(); if (tokenValue == "}") { break; } else if (tokenValue == "prevent_idle_override") { flags.PreventIdleOverride = true; } else if (tokenValue == "random_cycle_start") { flags.RandomCycleStart = true; } else if (tokenValue == "ai_no_turn") { flags.AINoTurn = true; } else if (tokenValue == "anim_turn") { flags.AnimationTurn = true; } else if (tokenValue == "frame") { // create a frame command int frameIndex; string err; // make sure we don't have any line breaks while reading the frame command so the error line # will be correct if ((token = lexer.ReadTokenOnLine()) == null) { lexer.Warning("Missing frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type == TokenType.Punctuation) && (token.ToString() == "-")) { lexer.Warning("Invalid frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type != TokenType.Number) || (token.SubType == TokenSubType.Float)) { lexer.Error("expected integer value, found '{0}'", token); } // get the frame number frameIndex = token.ToInt32(); // put the command on the specified frame of the animation if ((err = anim.AddFrameCommand(this, frameIndex, lexer, null)) != null) { lexer.Warning(err.ToString()); MakeDefault(); return(false); } } else { lexer.Warning("Unknown command '{0}'", token); MakeDefault(); return(false); } } } // set the flags anim.Flags = flags; return(true); }