private bool DollarDirective_EvalInt() { long value; double tmp; if (DollarEvaluate(out value, out tmp, true) == false) { return(false); } idToken token = new idToken(); token.Line = _scriptStack.Peek().LineNumber; token.Set(value.ToString()); token.Type = TokenType.Number; token.SubType = TokenSubType.Integer | TokenSubType.Long | TokenSubType.Decimal | TokenSubType.ValuesValid; token.SetInteger((ulong)idMath.Abs(value)); token.SetFloat((ulong)idMath.Abs(value)); UnreadSourceToken(token); if (value < 0) { UnreadSignToken(); } return(true); }
private bool Directive_Eval() { long value; double tmp; if (Evaluate(out value, out tmp, true) == false) { return(false); } idLexer script = _scriptStack.Peek(); idToken token = new idToken(); token.Line = script.LineNumber; token.Append(value.ToString()); token.Type = TokenType.Number; token.SubType = TokenSubType.Integer | TokenSubType.Long | TokenSubType.Decimal; UnreadSourceToken(token); if (value < 0) { UnreadSignToken(); } return(true); }
private bool Directive_EvalFloat() { double value; long tmp; if (Evaluate(out tmp, out value, false) == false) { return(false); } idLexer script = _scriptStack.Peek(); idToken token = new idToken(); token.Line = script.LineNumber; token.Append(idMath.Abs((float)value).ToString("00")); token.Type = TokenType.Number; token.SubType = TokenSubType.Float | TokenSubType.Long | TokenSubType.Decimal; UnreadSourceToken(token); if (value < 0) { UnreadSignToken(); } return(true); }
private void AppendToken(idToken token) { if (_builder.Length > 0) { _builder.AppendFormat(" {0}", token.ToString()); } else { _builder.Append(token.ToString()); } }
private void AppendToken(idToken token) { if(_builder.Length > 0) { _builder.AppendFormat(" {0}", token.ToString()); } else { _builder.Append(token.ToString()); } }
private void UnreadSignToken() { idToken token = new idToken(); token.Line = _scriptStack.Peek().LineNumber; token.WhiteSpaceStartPosition = 0; token.WhiteSpaceEndPosition = 0; token.LinesCrossed = 0; token.Flags = 0; token.Set("-"); token.Type = TokenType.Punctuation; idConsole.Warning("TODO: UnreadSignToken"); // TODO: token.SubType = LexerPunctuationID.Subtract; UnreadSourceToken(token); }
private void AppendToken(StringBuilder b, idToken token) { if(b.Length > 0) { b.Append(' '); } b.Append(token.ToFloat()); }
private bool Directive_EvalFloat() { double value; long tmp; if(Evaluate(out tmp, out value, false) == false) { return false; } idLexer script = _scriptStack.Peek(); idToken token = new idToken(); token.Line = script.LineNumber; token.Append(idMath.Abs((float) value).ToString("00")); token.Type = TokenType.Number; token.SubType = TokenSubType.Float | TokenSubType.Long | TokenSubType.Decimal; UnreadSourceToken(token); if(value < 0) { UnreadSignToken(); } return true; }
public idToken(idToken token) { throw new Exception("blaaaaaaaaaah"); }
private bool ReadString(idToken token, char quote) { char ch; int tmpScriptPosition; int tmpLine; if(quote == '"') { token.Type = TokenType.String; } else { token.Type = TokenType.Literal; } // leading quote _scriptPosition++; while(true) { // if there is an escape character and escape characters are allowed. if((GetBufferCharacter(_scriptPosition) == '\\') && ((_options & LexerOptions.NoStringEscapeCharacters) == 0)) { if(ReadEscapeCharacter(out ch) == false) { return false; } token.Append(ch); } // if a trailing quote else if(GetBufferCharacter(_scriptPosition) == quote) { // step over the quote _scriptPosition++; // if consecutive strings should not be concatenated if(((_options & LexerOptions.NoStringConcatination) == LexerOptions.NoStringConcatination) && (((_options & LexerOptions.AllowBackslashStringConcatination) == 0) || (quote != '"'))) { break; } tmpScriptPosition = _scriptPosition; tmpLine = _line; // read white space between possible two consecutive strings if(ReadWhiteSpace() == false) { _scriptPosition = tmpScriptPosition; _line = tmpLine; break; } if((_options & LexerOptions.NoStringConcatination) == LexerOptions.NoStringConcatination) { if(GetBufferCharacter(_scriptPosition) != '\\') { _scriptPosition = tmpScriptPosition; _line = tmpLine; break; } // step over the '\\' _scriptPosition++; if((ReadWhiteSpace() == false) || (GetBufferCharacter(_scriptPosition) != quote)) { Error("expecting string after '\\' terminated line"); return false; } } // if there's no leading qoute if(GetBufferCharacter(_scriptPosition) != quote) { _scriptPosition = tmpScriptPosition; _line = tmpLine; break; } // step over the new leading quote _scriptPosition++; } else { if(GetBufferCharacter(_scriptPosition) == '\0') { Error("missing trailing quote"); return false; } if(GetBufferCharacter(_scriptPosition) == '\n') { Error("newline inside string"); return false; } token.Append(GetBufferCharacter(_scriptPosition++)); } } if(token.Type == TokenType.Literal) { if((_options & LexerOptions.AllowMultiCharacterLiterals) == 0) { if(token.Length != 1) { Warning("literal is not one character long"); } } token.SubType = (TokenSubType) token.ToString()[0]; } else { // the sub type is the length of the string token.SubType = (TokenSubType) token.ToString().Length; } return true; }
private bool ReadNumber(idToken token) { token.Type = TokenType.Number; token.SubType = 0; token.SetInteger(0); token.SetFloat(0); char c = GetBufferCharacter(_scriptPosition); char c2 = GetBufferCharacter(_scriptPosition + 1); if((c == '0') && (c2 != '.')) { if((c2 == 'x') || (c2 == 'X')) { token.Append(GetBufferCharacter(_scriptPosition++)); token.Append(GetBufferCharacter(_scriptPosition++)); c = GetBufferCharacter(_scriptPosition); while(((c >= 0) && (c <= '9')) || ((c >= 'a') && (c <= 'f')) || ((c >= 'A') && (c <= 'F'))) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } token.SubType = TokenSubType.Hex | TokenSubType.Integer; } // check for a binary number else if((c2 == 'b') || (c2 == 'B')) { token.Append(GetBufferCharacter(_scriptPosition++)); token.Append(GetBufferCharacter(_scriptPosition++)); c = GetBufferCharacter(_scriptPosition); while((c == '0') || (c == '1')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } token.SubType = TokenSubType.Binary | TokenSubType.Integer; } // its an octal number else { token.Append(GetBufferCharacter(_scriptPosition++)); c = GetBufferCharacter(_scriptPosition); while((c >= '0') && (c <= '7')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } token.SubType = TokenSubType.Octal | TokenSubType.Integer; } } else { // decimal integer or floating point number or ip address int dot = 0; while(true) { if((c >= '0') && (c <= '9')) { } else if(c == '.') { dot++; } else { break; } token.Append(c); c = GetBufferCharacter(++_scriptPosition); } if((c == 'e') && (dot == 0)) { //We have scientific notation without a decimal point dot++; } // if a floating point number if(dot == 1) { token.SubType = TokenSubType.Decimal | TokenSubType.Float; // check for floating point exponent if(c == 'e') { //Append the e so that GetFloatValue code works token.Append(c); c = GetBufferCharacter(++_scriptPosition); if((c == '-') || (c == '+')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } while((c >= '0') || (c <= '9')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } } // check for floating point exception infinite 1.#INF or indefinite 1.#IND or NaN else if(c == '#') { c2 = (char) 4; if(CheckString("INF") == true) { token.SubType |= TokenSubType.Infinite; } else if(CheckString("IND") == true) { token.SubType |= TokenSubType.Indefinite; } else if(CheckString("NAN") == true) { token.SubType |= TokenSubType.NaN; } else if(CheckString("QNAN") == true) { token.SubType |= TokenSubType.NaN; c2++; } for(int i = 0; i < c2; i++) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } while((c >= '0') && (c <= '9')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } if((_options & LexerOptions.AllowFloatExceptions) == 0) { Error("parsed {0}", token); } } } else if(dot > 1) { if((_options & LexerOptions.AllowIPAddresses) == 0) { Error("more than one dot in number"); return false; } if(dot != 3) { Error("ip address should have three dots"); return false; } token.SubType = TokenSubType.IPAddress; } else { token.SubType = TokenSubType.Decimal | TokenSubType.Integer; } } if((token.SubType & TokenSubType.Float) == TokenSubType.Float) { if(c > ' ') { // single-precision: float if((c == 'f') || (c == 'F')) { token.SubType |= TokenSubType.SinglePrecision; _scriptPosition++; } // extended-precision: long double else if((c == 'l') || (c == 'L')) { token.SubType |= TokenSubType.ExtendedPrecision; _scriptPosition++; } // default is double-precision: double else { token.SubType |= TokenSubType.DoublePrecision; } } else { token.SubType |= TokenSubType.DoublePrecision; } } else if((token.SubType & TokenSubType.Integer) == TokenSubType.Integer) { if(c > ' ') { // default: signed long for(int i = 0; i < 2; i++) { // long integer if((c == 'l') || (c == 'L')) { token.SubType |= TokenSubType.Long; } // unsigned integer else if((c == 'u') || (c == 'U')) { token.SubType |= TokenSubType.Unsigned; } else { break; } c = GetBufferCharacter(++_scriptPosition); } } } else if((token.SubType & TokenSubType.IPAddress) == TokenSubType.IPAddress) { if(c == ':') { token.Append(c); c = GetBufferCharacter(++_scriptPosition); while((c >= '0') && (c <= '9')) { token.Append(c); c = GetBufferCharacter(++_scriptPosition); } token.SubType |= TokenSubType.IPPort; } } return true; }
private bool Directive_Define() { idToken token, t; ScriptDefinition define; if ((token = ReadLine()) == null) { Error("#define without name"); return(false); } else if (token.Type != TokenType.Name) { UnreadSourceToken(token); Error("expected name after #define, found '{0}'", token.ToString()); return(false); } // check if the define already exists if (_defineDict.TryGetValue(token.ToString(), out define) == true) { if ((define.Flags & DefineFlags.Fixed) == DefineFlags.Fixed) { Error("can't redefine '{0}'", token.ToString()); return(false); } Warning("redefinition of '{0}'", token.ToString()); // unread the define name before executing the #undef directive UnreadSourceToken(token); if (Directive_UnDefine() == false) { return(false); } // if the define was not removed (define->flags & DEFINE_FIXED) define = _defineDict[token.ToString()]; } // allocate define define = new ScriptDefinition(); define.Name = token.ToString(); define.Parameters = new idToken[] { }; define.Tokens = new idToken[] { }; // add the define to the source AddDefineToHash(define, _defineDict); // if nothing is defined, just return if ((token = ReadLine()) == null) { return(true); } // if it is a define with parameters if ((token.WhiteSpaceBeforeToken == 0) && (token.ToString() == "(")) { List <idToken> parameters = new List <idToken>(); // read the define parameters if (CheckTokenString(")") == false) { while (true) { if ((token = ReadLine()) == null) { Error("expected define parameter"); return(false); } // if it isn't a name else if (token.Type != TokenType.Name) { Error("invalid define parameter"); return(false); } else if (FindDefineParameter(define, token.ToString()) >= 0) { Error("two of the same define parameters"); return(false); } // add the define parm t = new idToken(token); t.ClearTokenWhiteSpace(); parameters.Add(t); // read next token if ((token = ReadLine()) == null) { Error("define parameters not terminated"); return(false); } if (token.ToString() == ")") { break; } // then it must be a comma if (token.ToString() != ",") { Error("define not terminated"); return(false); } } } define.Parameters = parameters.ToArray(); if ((token = ReadLine()) == null) { return(true); } } List <idToken> tokens = new List <idToken>(); do { t = new idToken(token); if ((t.Type == TokenType.Name) && (t.ToString() == define.Name)) { t.Flags |= TokenFlags.RecursiveDefine; Warning("recursive define (removed recursion)"); } t.ClearTokenWhiteSpace(); tokens.Add(t); }while((token = ReadLine()) != null); define.Tokens = tokens.ToArray(); if (define.Tokens.Length > 0) { // check for merge operators at the beginning or end if ((define.Tokens[0].ToString() == "##") || (define.Tokens[define.Tokens.Length - 1].ToString() == "##")) { Error("define with misplaced ##"); return(false); } } return(true); }
public idToken ReadToken() { idToken token; ScriptDefinition define; while (true) { if ((token = ReadSourceToken()) == null) { return(null); } // check for precompiler directives if ((token.Type == TokenType.Punctuation) && (token.ToString() == "#")) { // read the precompiler directive if (ReadDirective() == false) { return(null); } continue; } // if skipping source because of conditional compilation if (_skip > 0) { continue; } // recursively concatenate strings that are behind each other still resolving defines if ((token.Type == TokenType.String) && ((_scriptStack.Peek().Options & LexerOptions.NoStringConcatination) == 0)) { idToken newToken = ReadToken(); if (newToken != null) { if (newToken.Type == TokenType.String) { token.Append(newToken.ToString()); } else { UnreadSourceToken(newToken); } } } if ((_scriptStack.Peek().Options & LexerOptions.NoDollarPrecompilation) == 0) { // check for special precompiler directives if ((token.Type == TokenType.Punctuation) && (token.ToString() == "$")) { // read the precompiler directive if (ReadDollarDirective() == true) { continue; } } } // if the token is a name if ((token.Type == TokenType.Name) && ((token.Flags & TokenFlags.RecursiveDefine) == TokenFlags.RecursiveDefine)) { // check if the name is a define macro if (_defineDict.ContainsKey(token.ToString()) == true) { idConsole.Warning("TODO: expand defined macro"); // expand the defined macro // TODO /*if(ExpandDefineIntoSource(token, define) == false) * { * return null; * }*/ continue; } } // found a token return(token); } }
/// <summary> /// See if the current token matches one of the surface parameter bit flags. /// </summary> /// <param name="token"></param> /// <returns></returns> private bool CheckSurfaceParameter(idToken token) { string tokenLower = token.ToString().ToLower(); foreach(MaterialInfoParameter infoParameter in InfoParameters) { if(tokenLower == infoParameter.Name) { if((infoParameter.SurfaceFlags & Renderer.SurfaceFlags.TypeMask) == Renderer.SurfaceFlags.TypeMask) { // ensure we only have one surface type set _surfaceFlags &= ~SurfaceFlags.TypeMask; } _surfaceFlags |= infoParameter.SurfaceFlags; _contentFlags |= infoParameter.ContentFlags; if(infoParameter.ClearSolid == true) { _contentFlags &= ~ContentFlags.Solid; } return true; } } return false; }
private Texture2D ParseImageProgram(ref DateTime timeStamp, ref TextureDepth depth, bool parseOnly) { idToken token = _lexer.ReadToken(); AppendToken(token); string tokenLower = token.ToString().ToLower(); if (tokenLower == "heightmap") { MatchAndAppendToken(_lexer, "("); Texture2D tex = ParseImageProgram(_lexer, ref timeStamp, ref depth); if (tex == null) { return(null); } MatchAndAppendToken(_lexer, ","); token = _lexer.ReadToken(); AppendToken(token); float scale = token.ToFloat(); // process it if (tex != null) { idConsole.Warning("TODO: R_HeightmapToNormalMap( *pic, *width, *height, scale );"); depth = TextureDepth.Bump; } MatchAndAppendToken(_lexer, ")"); return(tex); } else if (tokenLower == "addnormals") { MatchAndAppendToken(_lexer, "("); /*byte *pic2; * int width2, height2;*/ Texture2D tex, tex2; if ((tex = ParseImageProgram(_lexer, ref timeStamp, ref depth)) == null) { return(null); } MatchAndAppendToken(_lexer, ","); if ((tex2 = ParseImageProgram(_lexer, ref timeStamp, ref depth)) == null) { tex.Dispose(); idConsole.Warning("TODO: content doesn't get unloaded, this texture will remain disposed for ever!"); return(null); } // process it if (tex != null) { // TODO: tex2.Dispose(); idConsole.Warning("TODO: content doesn't get unloaded, this texture will remain disposed for ever!"); depth = TextureDepth.Bump; idConsole.Warning("TODO: R_AddNormalMaps( *pic, *width, *height, pic2, width2, height2 );"); } MatchAndAppendToken(_lexer, ")"); return(tex); } else if (tokenLower == "smoothnormals") { idConsole.WriteLine("image program smoothnormals"); /*MatchAndAppendToken( src, "(" ); * * if ( !R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ) ) { * return false; * } * * if ( pic ) { * R_SmoothNormalMap( *pic, *width, *height ); * if ( depth ) { * depth = TD_BUMP; * } * } * * MatchAndAppendToken( src, ")" ); * return true;*/ return(null); } else if (tokenLower == "add") { idConsole.WriteLine("image program add"); /*byte *pic2; * int width2, height2; * * MatchAndAppendToken( src, "(" ); * * if ( !R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ) ) { * return false; * } * * MatchAndAppendToken( src, "," ); * * if ( !R_ParseImageProgram_r( src, pic ? &pic2 : NULL, &width2, &height2, timestamps, depth ) ) { * if ( pic ) { * R_StaticFree( *pic ); * pic = NULL; * } * return false; * } * * // process it * if ( pic ) { * R_ImageAdd( *pic, *width, *height, pic2, width2, height2 ); * R_StaticFree( pic2 ); * } * * MatchAndAppendToken( src, ")" ); * return true;*/ return(null); } else if (tokenLower == "scale") { idConsole.WriteLine("image program scale"); /*float scale[4]; * int i; * * MatchAndAppendToken( src, "(" ); * * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ); * * for ( i = 0 ; i < 4 ; i++ ) { * MatchAndAppendToken( src, "," ); * src.ReadToken( &token ); * AppendToken( token ); * scale[i] = token.GetFloatValue(); * } * * // process it * if ( pic ) { * R_ImageScale( *pic, *width, *height, scale ); * } * * MatchAndAppendToken( src, ")" ); * return true;*/ return(null); } else if (tokenLower == "invertalpha") { idConsole.WriteLine("image program invertalpha"); /*MatchAndAppendToken( src, "(" ); * * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ); * * // process it * if ( pic ) { * R_InvertAlpha( *pic, *width, *height ); * } * * MatchAndAppendToken( src, ")" ); * return true;*/ return(null); } else if (tokenLower == "invertcolor") { idConsole.WriteLine("image program invertcolor"); /*MatchAndAppendToken( src, "(" ); * * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ); * * // process it * if ( pic ) { * R_InvertColor( *pic, *width, *height ); * } * * MatchAndAppendToken( src, ")" ); * return true;*/ return(null); } else if (tokenLower == "makeintensity") { MatchAndAppendToken(_lexer, "("); Texture2D t = ParseImageProgram(ref timeStamp, ref depth, parseOnly); idConsole.Warning("TODO: makeintensity"); /*if(parseOnly == false) * { * // copy red to green, blue, and alpha * int c = width * height * 4; * * for(int i = 0; i < c; i += 4) * { * data[i + 1] = data[i + 2] = data[i + 3] = data[i]; * } * }*/ MatchAndAppendToken(_lexer, ")"); return(t); } else if (tokenLower == "makealpha") { MatchAndAppendToken(_lexer, "("); Texture2D tex = ParseImageProgram(_lexer, ref timeStamp, ref depth); // average RGB into alpha, then set RGB to white if (tex != null) { idConsole.Warning("TODO: average alpha image"); /*int c; * c = *width * *height * 4; * for ( i = 0 ; i < c ; i+=4 ) { * (*pic)[i+3] = ( (*pic)[i+0] + (*pic)[i+1] + (*pic)[i+2] ) / 3; * (*pic)[i+0] = * (*pic)[i+1] = * (*pic)[i+2] = 255; * }*/ } MatchAndAppendToken(_lexer, ")"); return(tex); } // if we are just parsing instead of loading or checking, don't do the R_LoadImage if (parseOnly == true) { return(null); } // load it as an image return(idE.ImageManager.LoadImage(token.ToString(), ref timeStamp, true)); }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if (lexer.ExpectTokenString("{") == false) { return(null); } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if (token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return(null); } // Parse it float[] info; if (patchDef3 == true) { info = lexer.Parse1DMatrix(7); if (info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return(null); } } else { info = lexer.Parse1DMatrix(5); if (info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return(null); } } idMapPatch patch = new idMapPatch((int)info[0], (int)info[1]); if (version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if (patchDef3 == true) { patch.HorizontalSubdivisions = (int)info[2]; patch.VerticalSubdivisions = (int)info[3]; patch.ExplicitlySubdivided = true; } if ((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return(null); } // these were written out in the wrong order, IMHO if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return(null); } for (int j = 0; j < patch.Width; j++) { if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return(null); } for (int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if (v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return(null); } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return(null); } } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure"); return(null); } // read any key/value pairs while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if (token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return(patch); }
private void UnreadSourceToken(idToken token) { _tokens.Push(token); }
private bool DollarDirective_EvalInt() { long value; double tmp; if(DollarEvaluate(out value, out tmp, true) == false) { return false; } idToken token = new idToken(); token.Line = _scriptStack.Peek().LineNumber; token.Set(value.ToString()); token.Type = TokenType.Number; token.SubType = TokenSubType.Integer | TokenSubType.Long | TokenSubType.Decimal | TokenSubType.ValuesValid; token.SetInteger((ulong) idMath.Abs(value)); token.SetFloat((ulong) idMath.Abs(value)); UnreadSourceToken(token); if(value < 0) { UnreadSignToken(); } return true; }
public void UnreadToken(idToken token) { UnreadSourceToken(token); }
private bool ReadName(idToken token) { char c; token.Type = TokenType.Name; do { token.Append(GetBufferCharacter(_scriptPosition++)); c = GetBufferCharacter(_scriptPosition); } while(((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || ((c >= '0') && (c <= '9')) || (c == '_') // if treating all tokens as strings, don't parse '-' as a seperate token || (((_options & LexerOptions.OnlyStrings) == LexerOptions.OnlyStrings) && (c == '-')) // if special path name characters are allowed || (((_options & LexerOptions.AllowPathNames) == LexerOptions.AllowPathNames) && ((c == '/') || (c == '\\') || (c == ':') || (c == '.')))); //the sub type is the length of the name token.SubType = (TokenSubType) token.ToString().Length; return true; }
private bool Directive_Define() { idToken token, t; ScriptDefinition define; if((token = ReadLine()) == null) { Error("#define without name"); return false; } else if(token.Type != TokenType.Name) { UnreadSourceToken(token); Error("expected name after #define, found '{0}'", token.ToString()); return false; } // check if the define already exists if(_defineDict.TryGetValue(token.ToString(), out define) == true) { if((define.Flags & DefineFlags.Fixed) == DefineFlags.Fixed) { Error("can't redefine '{0}'", token.ToString()); return false; } Warning("redefinition of '{0}'", token.ToString()); // unread the define name before executing the #undef directive UnreadSourceToken(token); if(Directive_UnDefine() == false) { return false; } // if the define was not removed (define->flags & DEFINE_FIXED) define = _defineDict[token.ToString()]; } // allocate define define = new ScriptDefinition(); define.Name = token.ToString(); define.Parameters = new idToken[] { }; define.Tokens = new idToken[] { }; // add the define to the source AddDefineToHash(define, _defineDict); // if nothing is defined, just return if((token = ReadLine()) == null) { return true; } // if it is a define with parameters if((token.WhiteSpaceBeforeToken == 0) && (token.ToString() == "(")) { List<idToken> parameters = new List<idToken>(); // read the define parameters if(CheckTokenString(")") == false) { while(true) { if((token = ReadLine()) == null) { Error("expected define parameter"); return false; } // if it isn't a name else if(token.Type != TokenType.Name) { Error("invalid define parameter"); return false; } else if(FindDefineParameter(define, token.ToString()) >= 0) { Error("two of the same define parameters"); return false; } // add the define parm t = new idToken(token); t.ClearTokenWhiteSpace(); parameters.Add(t); // read next token if((token = ReadLine()) == null) { Error("define parameters not terminated"); return false; } if(token.ToString() == ")") { break; } // then it must be a comma if(token.ToString() != ",") { Error("define not terminated"); return false; } } } define.Parameters = parameters.ToArray(); if((token = ReadLine()) == null) { return true; } } List<idToken> tokens = new List<idToken>(); do { t = new idToken(token); if((t.Type == TokenType.Name) && (t.ToString() == define.Name)) { t.Flags |= TokenFlags.RecursiveDefine; Warning("recursive define (removed recursion)"); } t.ClearTokenWhiteSpace(); tokens.Add(t); } while((token = ReadLine()) != null); define.Tokens = tokens.ToArray(); if(define.Tokens.Length > 0) { // check for merge operators at the beginning or end if((define.Tokens[0].ToString() == "##") || (define.Tokens[define.Tokens.Length - 1].ToString() == "##")) { Error("define with misplaced ##"); return false; } } return true; }
private bool ReadPunctuation(idToken token) { int i, l; string p; LexerPunctuation punc; int puncCount = _punctuation.Length; int bufferLength = _buffer.Length; int puncMarkLength; // TODO /*#ifdef PUNCTABLE for (n = idLexer::punctuationtable[(unsigned int)*(idLexer::script_p)]; n >= 0; n = idLexer::nextpunctuation[n]) { punc = &(idLexer::punctuations[n]); #else*/ for(i = 0; i < puncCount; i++) { punc = _punctuation[i]; /*#endif*/ p = punc.P; puncMarkLength = p.Length; // check for this punctuation in the script for(l = 0; ((l < puncMarkLength) && ((_scriptPosition + l) < bufferLength)); l++) { if(GetBufferCharacter(_scriptPosition + l) != p[l]) { break; } } if(l >= puncMarkLength) { for(i = 0; i < l; i++) { token.Append(p[i]); } _scriptPosition += l; token.Type = TokenType.Punctuation; token.SubType = (TokenSubType) punc.N; return true; } } return false; }
private bool Directive_Eval() { long value; double tmp; if(Evaluate(out value, out tmp, true) == false) { return false; } idLexer script = _scriptStack.Peek(); idToken token = new idToken(); token.Line = script.LineNumber; token.Append(value.ToString()); token.Type = TokenType.Number; token.SubType = TokenSubType.Integer | TokenSubType.Long | TokenSubType.Decimal; UnreadSourceToken(token); if(value < 0) { UnreadSignToken(); } return true; }
/// <summary> /// Reads the next token. /// </summary> /// <returns></returns> public idToken ReadToken() { idToken token = new idToken(); if(this.IsLoaded == false) { idConsole.Error("idLexer.ReadToken: no file loaded"); return null; } // if there is a token available (from unreadToken) if(_tokenAvailable == true) { _tokenAvailable = false; return _token; } // save script position _lastScriptPosition = _scriptPosition; // save line counter _lastLine = _line; // start of the white space _whiteSpaceStartPosition = _scriptPosition; token.WhiteSpaceStartPosition = _scriptPosition; // read white space before token if(ReadWhiteSpace() == false) { return null; } // end of the white space _whiteSpaceEndPosition = _scriptPosition; token.WhiteSpaceEndPosition = _scriptPosition; // line the token is on token.Line = _line; // number of lines crossed before token token.LinesCrossed = _line - _lastLine; token.Options = 0; char c = GetBufferCharacter(_scriptPosition); // if we're keeping everything as whitespace deliminated strings if((_options & LexerOptions.OnlyStrings) == LexerOptions.OnlyStrings) { // if there is a leading quote if((c == '"') || (c == '\'')) { if(ReadString(token, c) == false) { return null; } } else if(ReadName(token) == false) { return null; } } // if there is a number else if(((c >= '0') && (c <= '9')) || ((c == '.') && ((GetBufferCharacter(_scriptPosition + 1) >= '0') && (GetBufferCharacter(_scriptPosition + 1) <= '9')))) { if(ReadNumber(token) == false) { return null; } // if names are allowed to start with a number if((_options & LexerOptions.AllowNumberNames) == LexerOptions.AllowNumberNames) { c = GetBufferCharacter(_scriptPosition); if(((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || (c == '_')) { if(ReadName(token) == false) { return null; } } } } // if there is a leading quote else if((c == '"') || (c == '\'')) { if(ReadString(token, c) == false) { return null; } } // if there is a name else if(((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) || (c == '_')) { if(ReadName(token) == false) { return null; } } // names may also start with a slash when pathnames are allowed else if(((_options & LexerOptions.AllowPathNames) == LexerOptions.AllowPathNames) && ((c == '/') || (c == '\\') || (c == '.'))) { if(ReadName(token) == false) { return null; } } // check for punctuations else if(ReadPunctuation(token) == false) { Error("unknown punctuation {0}", c); return null; } // succesfully read a token return token; }
private bool ParseScriptEntry(idToken token, idScriptParser parser) { int count = (int) ScriptName.Count; string tokenLower = token.ToString().ToLower(); for(int i = 0; i < count; i++) { if(tokenLower == ScriptNames[i].ToLower()) { _scripts[i] = new idGuiScriptList(); return ParseScript(parser, _scripts[i]); } } return false; }