ExpectTokenType() public method

public ExpectTokenType ( TokenType type, TokenSubType subType ) : idToken
type TokenType
subType TokenSubType
return idToken
Example #1
0
		public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion)
		{
			if(lexer.ExpectTokenString("{") == false)
			{
				return null;
			}

			// read the material (we had an implicit 'textures/' in the old format...)
			idToken token = lexer.ReadToken();

			if(token == null)
			{
				lexer.Error("idMapPatch::Parse: unexpected EOF");
				return null;
			}

			// Parse it
			float[] info;

			if(patchDef3 == true)
			{
				info = lexer.Parse1DMatrix(7);

				if(info == null)
				{
					lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info");
					return null;
				}
			} 
			else 
			{
				info = lexer.Parse1DMatrix(5);

				if(info == null)
				{
					lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info");
					return null;
				}
			}

			idMapPatch patch = new idMapPatch((int) info[0], (int) info[1]);

			if(version < 2.0f)
			{
				patch.Material = "textures/" + token.ToString();
			}
			else
			{
				patch.Material = token.ToString();
			}

			if(patchDef3 == true)
			{
				patch.HorizontalSubdivisions = (int) info[2];
				patch.VerticalSubdivisions = (int) info[3];
				patch.ExplicitlySubdivided = true;
			}

			if((patch.Width < 0) || (patch.Height < 0))
			{
				lexer.Error("idMapPatch::Parse: bad size");
				return null;
			}

			// these were written out in the wrong order, IMHO
			if(lexer.ExpectTokenString("(") == false)
			{
				lexer.Error("idMapPatch::Parse: bad patch vertex data");
				return null;
			}

			for(int j = 0; j < patch.Width; j++)
			{
				if(lexer.ExpectTokenString("(") == false)
				{
					lexer.Error("idMapPatch::Parse: bad vertex row data");
					return null;
				}

				for(int i = 0; i < patch.Height; i++)
				{
					float[] v = lexer.Parse1DMatrix(5);

					if(v == null)
					{
						lexer.Error("idMapPatch::Parse: bad vertex column data");
						return null;
					}

					Vertex vert = new Vertex();
					vert.Position.X = v[0] - origin.X;
					vert.Position.Y = v[1] - origin.Y;
					vert.Position.Z = v[2] - origin.Z;
					vert.TextureCoordinates = new Vector2(v[3], v[4]);

					patch.SetVertex(i * patch.Width + j, vert);
				}

				if(lexer.ExpectTokenString(")") == false)
				{
					lexer.Error("idMapPatch::Parse: unable to parse patch control points");
					return null;
				}
			}

			if(lexer.ExpectTokenString(")") == false)
			{
				lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure" );
				return null;
			}

			// read any key/value pairs
			while((token = lexer.ReadToken()) != null)
			{
				if(token.ToString() == "}")
				{
					lexer.ExpectTokenString("}");
					break;
				}

				if(token.Type == TokenType.String)
				{
					string key = token.ToString();
					token = lexer.ExpectTokenType(TokenType.String, 0);

					patch.Dict.Set(key, token.ToString());
				}
			}
			
			return patch;
		}
		private bool ParseCollisionModel(idLexer lexer) 
		{
			CollisionModel model = new CollisionModel();

			_models[_modelCount++] = model;

			// parse the file
			idToken token = lexer.ExpectTokenType(TokenType.String, 0);
			string tokenLower;

			model.Name = token.ToString();
			lexer.ExpectTokenString("{");

			while(lexer.CheckTokenString("}") == false)
			{
				token = lexer.ReadToken();
				tokenLower = token.ToString().ToLower();

				if(tokenLower == "vertices")
				{
					ParseVertices(lexer, model);
				}
				else if(tokenLower == "edges")
				{
					ParseEdges(lexer, model);
				}
				else if(tokenLower == "nodes")
				{
					lexer.ExpectTokenString("{");
					model.Node = ParseNodes(lexer, model, null);
					lexer.ExpectTokenString("}");
				}
				else if(tokenLower == "polygons")
				{
					ParsePolygons(lexer, model);
				}
				else if(tokenLower == "brushes")
				{
					ParseBrushes(lexer, model);
				}
				else
				{
					lexer.Error("ParseCollisionModel: bad token \"{0}\"", token);
				}
			}

			// calculate edge normals
			_checkCount++;

			idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);");

			// get model bounds from brush and polygon bounds
			model.Bounds = GetNodeBounds(model.Node);

			// get model contents
			model.Contents = GetNodeContents(model.Node);

			idConsole.Warning("TODO: used memory");

			// total memory used by this model
			/*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) +
								model->numEdges * sizeof(cm_edge_t) +
								model->polygonMemory +
								model->brushMemory +
									model->numNodes * sizeof(cm_node_t) +
								model->numPolygonRefs * sizeof(cm_polygonRef_t) +
								model->numBrushRefs * sizeof(cm_brushRef_t);*/

			return true;
		}
		private void ParsePolygons(idLexer lexer, CollisionModel model)
		{
			idToken token = lexer.CheckTokenType(TokenType.Number, 0);
			float[] tmp;
			Vector3 normal;

			lexer.ExpectTokenString("{");

			while(lexer.CheckTokenString("}") == false)
			{
				// parse polygon
				int edgeCount = lexer.ParseInt();

				CollisionModelPolygon p = new CollisionModelPolygon();
				p.Material = _traceModelMaterial;
				p.Contents = ContentFlags.All;
				p.Edges = new int[edgeCount];

				lexer.ExpectTokenString("(");

				for(int i = 0; i < edgeCount; i++)
				{
					p.Edges[i] = lexer.ParseInt();
				}

				lexer.ExpectTokenString(")");

				tmp = lexer.Parse1DMatrix(3);
				normal = new Vector3(tmp[0], tmp[1], tmp[2]);

				p.Plane.Normal = normal;
				p.Plane.D = lexer.ParseFloat();

				tmp = lexer.Parse1DMatrix(3);
				p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]);

				tmp = lexer.Parse1DMatrix(3);
				p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]);

				token = lexer.ExpectTokenType(TokenType.String, 0);

				// get material
				p.Material = idE.DeclManager.FindMaterial(token.ToString());
				p.Contents = p.Material.ContentFlags;
				p.CheckCount = 0;

				// filter polygon into tree
				FilterPolygonIntoTree(model, model.Node, p);
			}
		}
		private bool LoadCollisionModelFile(string name, ulong mapFileCRC)
		{
			// load it
			string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension);

			idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation);
			
			if(lexer.LoadFile(fileName) == false)
			{
				return false;
			}

			idToken token;

			if(lexer.ExpectTokenString(TokenFileID) == false)
			{
				idConsole.Warning("{0} is not a CM file.", fileName);
			}
			else if(((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion))
			{
				idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion);
			}
			else if((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null)
			{
				idConsole.Warning("{0} has no map file CRC", fileName);
			}
			else
			{
				ulong crc = token.ToUInt64();

				if((mapFileCRC != 0) && (crc != mapFileCRC))
				{
					idConsole.WriteLine("{0} is out of date", fileName);
				}
				else
				{
					// parse the file
					while(true)
					{
						if((token = lexer.ReadToken()) == null)
						{
							break;
						}

						if(token.ToString().ToLower() == "collisionmodel")
						{
							if(ParseCollisionModel(lexer) == false)
							{
								return false;
							}
						}
						else
						{
							lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token);
						}
					}

					return true;
				}
			}
			
			return false;
		}