ToString() public method

public ToString ( ) : string
return string
 private void AppendToken(idToken token)
 {
     if (_builder.Length > 0)
     {
         _builder.AppendFormat(" {0}", token.ToString());
     }
     else
     {
         _builder.Append(token.ToString());
     }
 }
		private void AppendToken(idToken token)
		{
			if(_builder.Length > 0)
			{
				_builder.AppendFormat(" {0}", token.ToString());
			}
			else
			{
				_builder.Append(token.ToString());
			}
		}
示例#3
0
		private bool ReadString(idToken token, char quote)
		{
			char ch;
			int tmpScriptPosition;
			int tmpLine;

			if(quote == '"')
			{
				token.Type = TokenType.String;
			}
			else
			{
				token.Type = TokenType.Literal;
			}

			// leading quote
			_scriptPosition++;

			while(true)
			{
				// if there is an escape character and escape characters are allowed.
				if((GetBufferCharacter(_scriptPosition) == '\\') && ((_options & LexerOptions.NoStringEscapeCharacters) == 0))
				{
					if(ReadEscapeCharacter(out ch) == false)
					{
						return false;
					}

					token.Append(ch);
				}
				// if a trailing quote
				else if(GetBufferCharacter(_scriptPosition) == quote)
				{
					// step over the quote
					_scriptPosition++;

					// if consecutive strings should not be concatenated
					if(((_options & LexerOptions.NoStringConcatination) == LexerOptions.NoStringConcatination) 
						&& (((_options & LexerOptions.AllowBackslashStringConcatination) == 0) || (quote != '"')))
					{
						break;
					}

					tmpScriptPosition = _scriptPosition;
					tmpLine = _line;

					// read white space between possible two consecutive strings
					if(ReadWhiteSpace() == false)
					{
						_scriptPosition = tmpScriptPosition;
						_line = tmpLine;

						break;
					}

					if((_options & LexerOptions.NoStringConcatination) == LexerOptions.NoStringConcatination)
					{
						if(GetBufferCharacter(_scriptPosition) != '\\')
						{
							_scriptPosition = tmpScriptPosition;
							_line = tmpLine;

							break;
						}

						// step over the '\\'
						_scriptPosition++;

						if((ReadWhiteSpace() == false) || (GetBufferCharacter(_scriptPosition) != quote))
						{
							Error("expecting string after '\\' terminated line");
							return false;
						}
					}

					// if there's no leading qoute
					if(GetBufferCharacter(_scriptPosition) != quote)
					{
						_scriptPosition = tmpScriptPosition;
						_line = tmpLine;

						break;
					}

					// step over the new leading quote
					_scriptPosition++;
				}
				else
				{
					if(GetBufferCharacter(_scriptPosition) == '\0')
					{
						Error("missing trailing quote");
						return false;
					}

					if(GetBufferCharacter(_scriptPosition) == '\n')
					{
						Error("newline inside string");
						return false;
					}

					token.Append(GetBufferCharacter(_scriptPosition++));
				}
			}

			if(token.Type == TokenType.Literal)
			{
				if((_options & LexerOptions.AllowMultiCharacterLiterals) == 0)
				{
					if(token.Length != 1)
					{
						Warning("literal is not one character long");
					}
				}

				token.SubType = (TokenSubType) token.ToString()[0];
			}
			else
			{
				// the sub type is the length of the string
				token.SubType = (TokenSubType) token.ToString().Length;
			}

			return true;
		}
示例#4
0
		private bool ReadName(idToken token)
		{
			char c;
			token.Type = TokenType.Name;

			do
			{
				token.Append(GetBufferCharacter(_scriptPosition++));
				c = GetBufferCharacter(_scriptPosition);
			}
			while(((c >= 'a') && (c <= 'z'))
			|| ((c >= 'A') && (c <= 'Z'))
			|| ((c >= '0') && (c <= '9'))
			|| (c == '_')
				// if treating all tokens as strings, don't parse '-' as a seperate token
			|| (((_options & LexerOptions.OnlyStrings) == LexerOptions.OnlyStrings) && (c == '-'))
				// if special path name characters are allowed
			|| (((_options & LexerOptions.AllowPathNames) == LexerOptions.AllowPathNames) && ((c == '/') || (c == '\\') || (c == ':') || (c == '.'))));

			//the sub type is the length of the name
			token.SubType = (TokenSubType) token.ToString().Length;

			return true;
		}
示例#5
0
        private bool Directive_Define()
        {
            idToken          token, t;
            ScriptDefinition define;

            if ((token = ReadLine()) == null)
            {
                Error("#define without name");
                return(false);
            }
            else if (token.Type != TokenType.Name)
            {
                UnreadSourceToken(token);
                Error("expected name after #define, found '{0}'", token.ToString());

                return(false);
            }

            // check if the define already exists
            if (_defineDict.TryGetValue(token.ToString(), out define) == true)
            {
                if ((define.Flags & DefineFlags.Fixed) == DefineFlags.Fixed)
                {
                    Error("can't redefine '{0}'", token.ToString());
                    return(false);
                }

                Warning("redefinition of '{0}'", token.ToString());

                // unread the define name before executing the #undef directive
                UnreadSourceToken(token);

                if (Directive_UnDefine() == false)
                {
                    return(false);
                }

                // if the define was not removed (define->flags & DEFINE_FIXED)
                define = _defineDict[token.ToString()];
            }

            // allocate define
            define            = new ScriptDefinition();
            define.Name       = token.ToString();
            define.Parameters = new idToken[] { };
            define.Tokens     = new idToken[] { };

            // add the define to the source
            AddDefineToHash(define, _defineDict);

            // if nothing is defined, just return
            if ((token = ReadLine()) == null)
            {
                return(true);
            }


            // if it is a define with parameters
            if ((token.WhiteSpaceBeforeToken == 0) && (token.ToString() == "("))
            {
                List <idToken> parameters = new List <idToken>();

                // read the define parameters
                if (CheckTokenString(")") == false)
                {
                    while (true)
                    {
                        if ((token = ReadLine()) == null)
                        {
                            Error("expected define parameter");
                            return(false);
                        }
                        // if it isn't a name
                        else if (token.Type != TokenType.Name)
                        {
                            Error("invalid define parameter");
                            return(false);
                        }
                        else if (FindDefineParameter(define, token.ToString()) >= 0)
                        {
                            Error("two of the same define parameters");
                            return(false);
                        }

                        // add the define parm
                        t = new idToken(token);
                        t.ClearTokenWhiteSpace();

                        parameters.Add(t);

                        // read next token
                        if ((token = ReadLine()) == null)
                        {
                            Error("define parameters not terminated");
                            return(false);
                        }

                        if (token.ToString() == ")")
                        {
                            break;
                        }

                        // then it must be a comma
                        if (token.ToString() != ",")
                        {
                            Error("define not terminated");
                            return(false);
                        }
                    }
                }

                define.Parameters = parameters.ToArray();

                if ((token = ReadLine()) == null)
                {
                    return(true);
                }
            }

            List <idToken> tokens = new List <idToken>();

            do
            {
                t = new idToken(token);

                if ((t.Type == TokenType.Name) && (t.ToString() == define.Name))
                {
                    t.Flags |= TokenFlags.RecursiveDefine;
                    Warning("recursive define (removed recursion)");
                }

                t.ClearTokenWhiteSpace();

                tokens.Add(t);
            }while((token = ReadLine()) != null);

            define.Tokens = tokens.ToArray();

            if (define.Tokens.Length > 0)
            {
                // check for merge operators at the beginning or end
                if ((define.Tokens[0].ToString() == "##") || (define.Tokens[define.Tokens.Length - 1].ToString() == "##"))
                {
                    Error("define with misplaced ##");
                    return(false);
                }
            }

            return(true);
        }
示例#6
0
        public idToken ReadToken()
        {
            idToken          token;
            ScriptDefinition define;

            while (true)
            {
                if ((token = ReadSourceToken()) == null)
                {
                    return(null);
                }

                // check for precompiler directives
                if ((token.Type == TokenType.Punctuation) && (token.ToString() == "#"))
                {
                    // read the precompiler directive
                    if (ReadDirective() == false)
                    {
                        return(null);
                    }

                    continue;
                }

                // if skipping source because of conditional compilation
                if (_skip > 0)
                {
                    continue;
                }

                // recursively concatenate strings that are behind each other still resolving defines
                if ((token.Type == TokenType.String) && ((_scriptStack.Peek().Options & LexerOptions.NoStringConcatination) == 0))
                {
                    idToken newToken = ReadToken();

                    if (newToken != null)
                    {
                        if (newToken.Type == TokenType.String)
                        {
                            token.Append(newToken.ToString());
                        }
                        else
                        {
                            UnreadSourceToken(newToken);
                        }
                    }
                }

                if ((_scriptStack.Peek().Options & LexerOptions.NoDollarPrecompilation) == 0)
                {
                    // check for special precompiler directives
                    if ((token.Type == TokenType.Punctuation) && (token.ToString() == "$"))
                    {
                        // read the precompiler directive
                        if (ReadDollarDirective() == true)
                        {
                            continue;
                        }
                    }
                }

                // if the token is a name
                if ((token.Type == TokenType.Name) && ((token.Flags & TokenFlags.RecursiveDefine) == TokenFlags.RecursiveDefine))
                {
                    // check if the name is a define macro
                    if (_defineDict.ContainsKey(token.ToString()) == true)
                    {
                        idConsole.Warning("TODO: expand defined macro");
                        // expand the defined macro
                        // TODO

                        /*if(ExpandDefineIntoSource(token, define) == false)
                         * {
                         *      return null;
                         * }*/

                        continue;
                    }
                }

                // found a token
                return(token);
            }
        }
示例#7
0
		private bool Directive_Define()
		{
			idToken token, t;
			ScriptDefinition define;

			if((token = ReadLine()) == null)
			{
				Error("#define without name");
				return false;
			}
			else if(token.Type != TokenType.Name)
			{
				UnreadSourceToken(token);
				Error("expected name after #define, found '{0}'", token.ToString());

				return false;
			}

			// check if the define already exists
			if(_defineDict.TryGetValue(token.ToString(), out define) == true)
			{
				if((define.Flags & DefineFlags.Fixed) == DefineFlags.Fixed)
				{
					Error("can't redefine '{0}'", token.ToString());
					return false;
				}

				Warning("redefinition of '{0}'", token.ToString());

				// unread the define name before executing the #undef directive
				UnreadSourceToken(token);

				if(Directive_UnDefine() == false)
				{
					return false;
				}

				// if the define was not removed (define->flags & DEFINE_FIXED)
				define = _defineDict[token.ToString()];
			}

			// allocate define
			define = new ScriptDefinition();
			define.Name = token.ToString();
			define.Parameters = new idToken[] { };
			define.Tokens = new idToken[] { };

			// add the define to the source
			AddDefineToHash(define, _defineDict);

			// if nothing is defined, just return
			if((token = ReadLine()) == null)
			{
				return true;
			}


			// if it is a define with parameters
			if((token.WhiteSpaceBeforeToken == 0) && (token.ToString() == "("))
			{
				List<idToken> parameters = new List<idToken>();

				// read the define parameters
				if(CheckTokenString(")") == false)
				{
					while(true)
					{
						if((token = ReadLine()) == null)
						{
							Error("expected define parameter");
							return false;
						}
						// if it isn't a name
						else if(token.Type != TokenType.Name)
						{
							Error("invalid define parameter");
							return false;
						}
						else if(FindDefineParameter(define, token.ToString()) >= 0)
						{
							Error("two of the same define parameters");
							return false;
						}

						// add the define parm
						t = new idToken(token);
						t.ClearTokenWhiteSpace();

						parameters.Add(t);

						// read next token
						if((token = ReadLine()) == null)
						{
							Error("define parameters not terminated");
							return false;
						}

						if(token.ToString() == ")")
						{
							break;
						}

						// then it must be a comma
						if(token.ToString() != ",")
						{
							Error("define not terminated");
							return false;
						}
					}
				}

				define.Parameters = parameters.ToArray();

				if((token = ReadLine()) == null)
				{
					return true;
				}
			}

			List<idToken> tokens = new List<idToken>();

			do
			{
				t = new idToken(token);

				if((t.Type == TokenType.Name) && (t.ToString() == define.Name))
				{
					t.Flags |= TokenFlags.RecursiveDefine;
					Warning("recursive define (removed recursion)");
				}

				t.ClearTokenWhiteSpace();

				tokens.Add(t);
			}
			while((token = ReadLine()) != null);

			define.Tokens = tokens.ToArray();

			if(define.Tokens.Length > 0)
			{
				// check for merge operators at the beginning or end
				if((define.Tokens[0].ToString() == "##") || (define.Tokens[define.Tokens.Length - 1].ToString() == "##"))
				{
					Error("define with misplaced ##");
					return false;
				}
			}

			return true;
		}
示例#8
0
		/// <summary>
		/// See if the current token matches one of the surface parameter bit flags.
		/// </summary>
		/// <param name="token"></param>
		/// <returns></returns>
		private bool CheckSurfaceParameter(idToken token)
		{
			string tokenLower = token.ToString().ToLower();

			foreach(MaterialInfoParameter infoParameter in InfoParameters)
			{
				if(tokenLower == infoParameter.Name)
				{
					if((infoParameter.SurfaceFlags & Renderer.SurfaceFlags.TypeMask) == Renderer.SurfaceFlags.TypeMask)
					{
						// ensure we only have one surface type set
						_surfaceFlags &= ~SurfaceFlags.TypeMask;
					}

					_surfaceFlags |= infoParameter.SurfaceFlags;
					_contentFlags |= infoParameter.ContentFlags;

					if(infoParameter.ClearSolid == true)
					{
						_contentFlags &= ~ContentFlags.Solid;
					}

					return true;
				}
			}

			return false;
		}
        private Texture2D ParseImageProgram(ref DateTime timeStamp, ref TextureDepth depth, bool parseOnly)
        {
            idToken token = _lexer.ReadToken();

            AppendToken(token);

            string tokenLower = token.ToString().ToLower();

            if (tokenLower == "heightmap")
            {
                MatchAndAppendToken(_lexer, "(");

                Texture2D tex = ParseImageProgram(_lexer, ref timeStamp, ref depth);

                if (tex == null)
                {
                    return(null);
                }

                MatchAndAppendToken(_lexer, ",");
                token = _lexer.ReadToken();
                AppendToken(token);

                float scale = token.ToFloat();

                // process it
                if (tex != null)
                {
                    idConsole.Warning("TODO: R_HeightmapToNormalMap( *pic, *width, *height, scale );");
                    depth = TextureDepth.Bump;
                }

                MatchAndAppendToken(_lexer, ")");

                return(tex);
            }
            else if (tokenLower == "addnormals")
            {
                MatchAndAppendToken(_lexer, "(");

                /*byte	*pic2;
                 * int		width2, height2;*/

                Texture2D tex, tex2;

                if ((tex = ParseImageProgram(_lexer, ref timeStamp, ref depth)) == null)
                {
                    return(null);
                }

                MatchAndAppendToken(_lexer, ",");

                if ((tex2 = ParseImageProgram(_lexer, ref timeStamp, ref depth)) == null)
                {
                    tex.Dispose();

                    idConsole.Warning("TODO: content doesn't get unloaded, this texture will remain disposed for ever!");

                    return(null);
                }

                // process it
                if (tex != null)
                {
                    // TODO: tex2.Dispose();
                    idConsole.Warning("TODO: content doesn't get unloaded, this texture will remain disposed for ever!");

                    depth = TextureDepth.Bump;
                    idConsole.Warning("TODO: R_AddNormalMaps( *pic, *width, *height, pic2, width2, height2 );");
                }

                MatchAndAppendToken(_lexer, ")");

                return(tex);
            }
            else if (tokenLower == "smoothnormals")
            {
                idConsole.WriteLine("image program smoothnormals");

                /*MatchAndAppendToken( src, "(" );
                 *
                 * if ( !R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ) ) {
                 *      return false;
                 * }
                 *
                 * if ( pic ) {
                 *      R_SmoothNormalMap( *pic, *width, *height );
                 *      if ( depth ) {
                 * depth = TD_BUMP;
                 *      }
                 * }
                 *
                 * MatchAndAppendToken( src, ")" );
                 * return true;*/
                return(null);
            }
            else if (tokenLower == "add")
            {
                idConsole.WriteLine("image program add");

                /*byte	*pic2;
                 * int		width2, height2;
                 *
                 * MatchAndAppendToken( src, "(" );
                 *
                 * if ( !R_ParseImageProgram_r( src, pic, width, height, timestamps, depth ) ) {
                 *      return false;
                 * }
                 *
                 * MatchAndAppendToken( src, "," );
                 *
                 * if ( !R_ParseImageProgram_r( src, pic ? &pic2 : NULL, &width2, &height2, timestamps, depth ) ) {
                 *      if ( pic ) {
                 *              R_StaticFree( *pic );
                 * pic = NULL;
                 *      }
                 *      return false;
                 * }
                 *
                 * // process it
                 * if ( pic ) {
                 *      R_ImageAdd( *pic, *width, *height, pic2, width2, height2 );
                 *      R_StaticFree( pic2 );
                 * }
                 *
                 * MatchAndAppendToken( src, ")" );
                 * return true;*/

                return(null);
            }
            else if (tokenLower == "scale")
            {
                idConsole.WriteLine("image program scale");

                /*float	scale[4];
                 * int		i;
                 *
                 * MatchAndAppendToken( src, "(" );
                 *
                 * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth );
                 *
                 * for ( i = 0 ; i < 4 ; i++ ) {
                 *      MatchAndAppendToken( src, "," );
                 *      src.ReadToken( &token );
                 *      AppendToken( token );
                 *      scale[i] = token.GetFloatValue();
                 * }
                 *
                 * // process it
                 * if ( pic ) {
                 *      R_ImageScale( *pic, *width, *height, scale );
                 * }
                 *
                 * MatchAndAppendToken( src, ")" );
                 * return true;*/

                return(null);
            }
            else if (tokenLower == "invertalpha")
            {
                idConsole.WriteLine("image program invertalpha");

                /*MatchAndAppendToken( src, "(" );
                 *
                 * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth );
                 *
                 * // process it
                 * if ( pic ) {
                 *      R_InvertAlpha( *pic, *width, *height );
                 * }
                 *
                 * MatchAndAppendToken( src, ")" );
                 * return true;*/

                return(null);
            }
            else if (tokenLower == "invertcolor")
            {
                idConsole.WriteLine("image program invertcolor");

                /*MatchAndAppendToken( src, "(" );
                 *
                 * R_ParseImageProgram_r( src, pic, width, height, timestamps, depth );
                 *
                 * // process it
                 * if ( pic ) {
                 *      R_InvertColor( *pic, *width, *height );
                 * }
                 *
                 * MatchAndAppendToken( src, ")" );
                 * return true;*/

                return(null);
            }
            else if (tokenLower == "makeintensity")
            {
                MatchAndAppendToken(_lexer, "(");
                Texture2D t = ParseImageProgram(ref timeStamp, ref depth, parseOnly);

                idConsole.Warning("TODO: makeintensity");

                /*if(parseOnly == false)
                 * {
                 *      // copy red to green, blue, and alpha
                 *      int c = width * height * 4;
                 *
                 *      for(int i = 0; i < c; i += 4)
                 *      {
                 *              data[i + 1] = data[i + 2] = data[i + 3] = data[i];
                 *      }
                 * }*/

                MatchAndAppendToken(_lexer, ")");

                return(t);
            }
            else if (tokenLower == "makealpha")
            {
                MatchAndAppendToken(_lexer, "(");

                Texture2D tex = ParseImageProgram(_lexer, ref timeStamp, ref depth);

                // average RGB into alpha, then set RGB to white
                if (tex != null)
                {
                    idConsole.Warning("TODO: average alpha image");

                    /*int		c;
                     * c = *width * *height * 4;
                     * for ( i = 0 ; i < c ; i+=4 ) {
                     *      (*pic)[i+3] = ( (*pic)[i+0] + (*pic)[i+1] + (*pic)[i+2] ) / 3;
                     *      (*pic)[i+0] =
                     *      (*pic)[i+1] =
                     *      (*pic)[i+2] = 255;
                     * }*/
                }

                MatchAndAppendToken(_lexer, ")");

                return(tex);
            }

            // if we are just parsing instead of loading or checking, don't do the R_LoadImage
            if (parseOnly == true)
            {
                return(null);
            }

            // load it as an image
            return(idE.ImageManager.LoadImage(token.ToString(), ref timeStamp, true));
        }
示例#10
0
        public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion)
        {
            if (lexer.ExpectTokenString("{") == false)
            {
                return(null);
            }

            // read the material (we had an implicit 'textures/' in the old format...)
            idToken token = lexer.ReadToken();

            if (token == null)
            {
                lexer.Error("idMapPatch::Parse: unexpected EOF");
                return(null);
            }

            // Parse it
            float[] info;

            if (patchDef3 == true)
            {
                info = lexer.Parse1DMatrix(7);

                if (info == null)
                {
                    lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info");
                    return(null);
                }
            }
            else
            {
                info = lexer.Parse1DMatrix(5);

                if (info == null)
                {
                    lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info");
                    return(null);
                }
            }

            idMapPatch patch = new idMapPatch((int)info[0], (int)info[1]);

            if (version < 2.0f)
            {
                patch.Material = "textures/" + token.ToString();
            }
            else
            {
                patch.Material = token.ToString();
            }

            if (patchDef3 == true)
            {
                patch.HorizontalSubdivisions = (int)info[2];
                patch.VerticalSubdivisions   = (int)info[3];
                patch.ExplicitlySubdivided   = true;
            }

            if ((patch.Width < 0) || (patch.Height < 0))
            {
                lexer.Error("idMapPatch::Parse: bad size");
                return(null);
            }

            // these were written out in the wrong order, IMHO
            if (lexer.ExpectTokenString("(") == false)
            {
                lexer.Error("idMapPatch::Parse: bad patch vertex data");
                return(null);
            }

            for (int j = 0; j < patch.Width; j++)
            {
                if (lexer.ExpectTokenString("(") == false)
                {
                    lexer.Error("idMapPatch::Parse: bad vertex row data");
                    return(null);
                }

                for (int i = 0; i < patch.Height; i++)
                {
                    float[] v = lexer.Parse1DMatrix(5);

                    if (v == null)
                    {
                        lexer.Error("idMapPatch::Parse: bad vertex column data");
                        return(null);
                    }

                    Vertex vert = new Vertex();
                    vert.Position.X         = v[0] - origin.X;
                    vert.Position.Y         = v[1] - origin.Y;
                    vert.Position.Z         = v[2] - origin.Z;
                    vert.TextureCoordinates = new Vector2(v[3], v[4]);

                    patch.SetVertex(i * patch.Width + j, vert);
                }

                if (lexer.ExpectTokenString(")") == false)
                {
                    lexer.Error("idMapPatch::Parse: unable to parse patch control points");
                    return(null);
                }
            }

            if (lexer.ExpectTokenString(")") == false)
            {
                lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure");
                return(null);
            }

            // read any key/value pairs
            while ((token = lexer.ReadToken()) != null)
            {
                if (token.ToString() == "}")
                {
                    lexer.ExpectTokenString("}");
                    break;
                }

                if (token.Type == TokenType.String)
                {
                    string key = token.ToString();
                    token = lexer.ExpectTokenType(TokenType.String, 0);

                    patch.Dict.Set(key, token.ToString());
                }
            }

            return(patch);
        }
示例#11
0
		private bool ParseScriptEntry(idToken token, idScriptParser parser)
		{
			int count = (int) ScriptName.Count;
			string tokenLower = token.ToString().ToLower();

			for(int i = 0; i < count; i++)
			{
				if(tokenLower == ScriptNames[i].ToLower())
				{
					_scripts[i] = new idGuiScriptList();

					return ParseScript(parser, _scripts[i]);
				}
			}

			return false;
		}