public CheckTokenType ( TokenType type, TokenSubType subType ) : |
||
type | TokenType | |
subType | TokenSubType | |
Résultat |
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for(int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if(token.Type == TokenType.Number) { b.Contents = (ContentFlags) token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for(int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
/// <summary> /// Takes a string and breaks it up into arg tokens. /// </summary> /// <param name="text"></param> /// <param name="keepAsStrings">true to only seperate tokens from whitespace and comments, ignoring punctuation.</param> public void TokenizeString(string text, bool keepAsStrings) { // clear previous args. _args = new string[] { }; if(text.Length == 0) { return; } idLexer lexer = new idLexer(); lexer.LoadMemory(text, "idCmdSystem.TokenizeString"); lexer.Options = LexerOptions.NoErrors | LexerOptions.NoWarnings | LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowIPAddresses | ((keepAsStrings == true) ? LexerOptions.OnlyStrings : 0); idToken token = null, number = null; List<string> newArgs = new List<string>(); int len = 0, totalLength = 0; string tokenValue; while(true) { if(newArgs.Count == idE.MaxCommandArgs) { break; // this is usually something malicious. } if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if((keepAsStrings == false) && (tokenValue == "-")) { // check for negative numbers. if((number = lexer.CheckTokenType(TokenType.Number, 0)) != null) { token.Set("-" + number); } } // check for cvar expansion if(tokenValue == "$") { if((token = lexer.ReadToken()) == null) { break; } if(idE.CvarSystem.IsInitialized == true) { token.Set(idE.CvarSystem.GetString(token.ToString())); } else { token.Set("<unknown>"); } } tokenValue = token.ToString(); len = tokenValue.Length; totalLength += len + 1; // regular token newArgs.Add(tokenValue); } _args = newArgs.ToArray(); }