Esempio n. 1
0
        private bool Directive_Eval()
        {
            long   value;
            double tmp;

            if (Evaluate(out value, out tmp, true) == false)
            {
                return(false);
            }

            idLexer script = _scriptStack.Peek();
            idToken token  = new idToken();

            token.Line = script.LineNumber;
            token.Append(value.ToString());
            token.Type    = TokenType.Number;
            token.SubType = TokenSubType.Integer | TokenSubType.Long | TokenSubType.Decimal;

            UnreadSourceToken(token);

            if (value < 0)
            {
                UnreadSignToken();
            }

            return(true);
        }
Esempio n. 2
0
        private void ParseEdges(idLexer lexer, CollisionModel model)
        {
            lexer.ExpectTokenString("{");

            int edgeCount = lexer.ParseInt();

            model.Edges = new CollisionModelEdge[edgeCount];

            for (int i = 0; i < edgeCount; i++)
            {
                lexer.ExpectTokenString("(");
                model.Edges[i].VertexCount = new int[] { lexer.ParseInt(), lexer.ParseInt() };
                lexer.ExpectTokenString(")");

                model.Edges[i].Side       = 0;
                model.Edges[i].SideSet    = 0;
                model.Edges[i].Internal   = (ushort)lexer.ParseInt();
                model.Edges[i].UserCount  = (ushort)lexer.ParseInt();
                model.Edges[i].Normal     = Vector3.Zero;
                model.Edges[i].CheckCount = 0;
                model.InternalEdgeCount  += model.Edges[i].Internal;
            }

            lexer.ExpectTokenString("}");
        }
Esempio n. 3
0
        private bool Directive_EvalFloat()
        {
            double value;
            long   tmp;

            if (Evaluate(out tmp, out value, false) == false)
            {
                return(false);
            }

            idLexer script = _scriptStack.Peek();
            idToken token  = new idToken();

            token.Line = script.LineNumber;
            token.Append(idMath.Abs((float)value).ToString("00"));
            token.Type    = TokenType.Number;
            token.SubType = TokenSubType.Float | TokenSubType.Long | TokenSubType.Decimal;

            UnreadSourceToken(token);

            if (value < 0)
            {
                UnreadSignToken();
            }

            return(true);
        }
        public Texture2D ParseImageProgram(string source, ref DateTime timeStamp, ref TextureDepth depth)
        {
            _lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames);
            _lexer.LoadMemory(source, source);

            return(ParseImageProgram(ref timeStamp, ref depth, false));
        }
Esempio n. 5
0
        private void ParseJoint(idLexer lexer, idMD5Joint joint, ref idJointQuaternion defaultPose)
        {
            //
            // parse name
            //
            joint.Name = lexer.ReadToken().ToString();

            //
            // parse parent
            //
            int parentIndex = lexer.ParseInt();

            if (parentIndex >= 0)
            {
                if (parentIndex >= (_joints.Length - 1))
                {
                    lexer.Error("Invalid parent for joint '{0}'", joint.Name);
                }

                joint.Parent = _joints[parentIndex];
            }

            //
            // parse default pose
            //
            float[] tmp = lexer.Parse1DMatrix(3);
            defaultPose.Translation = new Vector3(tmp[0], tmp[1], tmp[2]);

            tmp = lexer.Parse1DMatrix(3);
            defaultPose.Quaternion   = new Quaternion(tmp[0], tmp[1], tmp[2], 0);
            defaultPose.Quaternion.W = idHelper.CalculateW(defaultPose.Quaternion);
        }
Esempio n. 6
0
        /// <summary>
        /// Parses a variable length list of parms on one line.
        /// </summary>
        /// <param name="lexer"></param>
        /// <param name="parms"></param>
        /// <param name="maxParms"></param>
        private float[] ParseParams(idLexer lexer, int maxParms)
        {
            idToken      token;
            List <float> parms = new List <float>();
            int          count = 0;
            float        tmp;

            while (true)
            {
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }
                else if (count == maxParms)
                {
                    lexer.Error("too many parms on line");
                    break;
                }
                else
                {
                    token.StripQuotes();
                    float.TryParse(token.ToString(), out tmp);

                    parms.Add(tmp);
                    count++;
                }
            }

            return(parms.ToArray());
        }
        public Texture2D ParseImageProgram(idLexer lexer)
        {
            _lexer = lexer;

            DateTime     timeStamp = DateTime.Now;;
            TextureDepth depth     = TextureDepth.Default;

            return(ParseImageProgram(ref timeStamp, ref depth, true));
        }
Esempio n. 8
0
        public override bool Parse(string text)
        {
            idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");

            idToken token;
            string  tokenValue;

            while (true)
            {
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }

                tokenValue = token.ToString().ToLower();

                if (tokenValue == "}")
                {
                    break;
                }

                if (tokenValue == "audio")
                {
                    _audio = lexer.ReadToken().ToString();
                    idE.DeclManager.FindSound(_audio);
                }
                else if (tokenValue == "info")
                {
                    _info = lexer.ReadToken().ToString();
                }
                else if (tokenValue == "name")
                {
                    _videoName = lexer.ReadToken().ToString();
                }
                else if (tokenValue == "preview")
                {
                    _preview = lexer.ReadToken().ToString();
                }
                else if (tokenValue == "video")
                {
                    _video = lexer.ReadToken().ToString();
                    idE.DeclManager.FindMaterial(_video);
                }
            }

            if (lexer.HadError == true)
            {
                lexer.Warning("Video decl '{0}' had a parse error", this.Name);
                return(false);
            }

            return(true);
        }
Esempio n. 9
0
        private void ParseBrushes(idLexer lexer, CollisionModel model)
        {
            idToken             token = lexer.CheckTokenType(TokenType.Number, 0);
            int                 planeCount;
            CollisionModelBrush b;

            float[] tmp;

            lexer.ExpectTokenString("{");

            while (lexer.CheckTokenString("}") == false)
            {
                // parse brush
                planeCount = lexer.ParseInt();

                b          = new CollisionModelBrush();
                b.Contents = ContentFlags.All;
                b.Material = _traceModelMaterial;
                b.Planes   = new Plane[planeCount];

                lexer.ExpectTokenString("{");

                for (int i = 0; i < planeCount; i++)
                {
                    tmp = lexer.Parse1DMatrix(3);

                    b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]);
                    b.Planes[i].D      = lexer.ParseFloat();
                }

                lexer.ExpectTokenString("}");

                tmp          = lexer.Parse1DMatrix(3);
                b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]);

                tmp          = lexer.Parse1DMatrix(3);
                b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]);

                token = lexer.ReadToken();

                if (token.Type == TokenType.Number)
                {
                    b.Contents = (ContentFlags)token.ToInt32();                      // old .cm files use a single integer
                }
                else
                {
                    b.Contents = ContentsFromString(token.ToString());
                }

                b.CheckCount     = 0;
                b.PrimitiveCount = 0;

                // filter brush into tree
                FilterBrushIntoTree(model, model.Node, b);
            }
        }
Esempio n. 10
0
        private void MatchAndAppendToken(idLexer lexer, string match)
        {
            if (_lexer.ExpectTokenString(match) == false)
            {
                return;
            }

            // a matched token won't need a leading space
            _builder.Append(match);
        }
Esempio n. 11
0
        public bool Load(string fileName, bool clear)
        {
            if (clear == true)
            {
                Clear();
            }

            byte[] data = idE.FileSystem.ReadFile(fileName);

            if (data == null)
            {
                // let whoever called us deal with the failure (so sys_lang can be reset)
                return(false);
            }

            idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination);

            lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName);

            if (lexer.IsLoaded == false)
            {
                return(false);
            }

            idToken token, token2;

            lexer.ExpectTokenString("{");

            while ((token = lexer.ReadToken()) != null)
            {
                if (token.ToString() == "}")
                {
                    break;
                }
                else if ((token2 = lexer.ReadToken()) != null)
                {
                    if (token2.ToString() == "}")
                    {
                        break;
                    }

                    _regexReplaceIndex = 0;

                    // stock d3 language files contain sprintf formatters, we need to replace them
                    string val = token2.ToString();
                    val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler));

                    _elements.Add(token.ToString(), val);
                }
            }

            idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName);

            return(true);
        }
Esempio n. 12
0
        public override bool Parse(string text)
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            idLexer lexer = new idLexer(idDeclFile.LexerOptions);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");

            idToken token;
            string  tokenValue;

            idConsole.Warning("TODO: actual fx parsing, we only step over the block");

            while (true)
            {
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }

                tokenValue = token.ToString().ToLower();

                if (tokenValue == "}")
                {
                    break;
                }

                if (tokenValue == "bindto")
                {
                    token = lexer.ReadToken();

                    idConsole.Warning("TODO: FX: joint = token;");
                }
                else if (tokenValue == "{")
                {
                    idConsole.Warning("TODO: FX: idFXSingleAction action;");
                    ParseSingleAction(lexer /*, action*/);
                    // events.Append(action);
                    continue;
                }
            }

            if (lexer.HadError == true)
            {
                lexer.Warning("FX decl '{0}' had a parse error", this.Name);
                return(false);
            }
            return(true);
        }
Esempio n. 13
0
        private void ParsePolygons(idLexer lexer, CollisionModel model)
        {
            idToken token = lexer.CheckTokenType(TokenType.Number, 0);

            float[] tmp;
            Vector3 normal;

            lexer.ExpectTokenString("{");

            while (lexer.CheckTokenString("}") == false)
            {
                // parse polygon
                int edgeCount = lexer.ParseInt();

                CollisionModelPolygon p = new CollisionModelPolygon();
                p.Material = _traceModelMaterial;
                p.Contents = ContentFlags.All;
                p.Edges    = new int[edgeCount];

                lexer.ExpectTokenString("(");

                for (int i = 0; i < edgeCount; i++)
                {
                    p.Edges[i] = lexer.ParseInt();
                }

                lexer.ExpectTokenString(")");

                tmp    = lexer.Parse1DMatrix(3);
                normal = new Vector3(tmp[0], tmp[1], tmp[2]);

                p.Plane.Normal = normal;
                p.Plane.D      = lexer.ParseFloat();

                tmp          = lexer.Parse1DMatrix(3);
                p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]);

                tmp          = lexer.Parse1DMatrix(3);
                p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]);

                token = lexer.ExpectTokenType(TokenType.String, 0);

                // get material
                p.Material   = idE.DeclManager.FindMaterial(token.ToString());
                p.Contents   = p.Material.ContentFlags;
                p.CheckCount = 0;

                // filter polygon into tree
                FilterPolygonIntoTree(model, model.Node, p);
            }
        }
Esempio n. 14
0
        private void PushScript(idLexer script)
        {
            foreach (idLexer s in _scriptStack)
            {
                if (s.FileName.Equals(script.FileName, StringComparison.OrdinalIgnoreCase) == true)
                {
                    Warning("'{0}' recursively included", script.FileName);
                    return;
                }
            }

            // push the script on the script stack
            _scriptStack.Push(script);
        }
Esempio n. 15
0
        public virtual bool Parse(string text)
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            idLexer lexer = new idLexer(idDeclFile.LexerOptions);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");
            lexer.SkipBracedSection(false);

            return(true);
        }
Esempio n. 16
0
        private void ParseParametric(idLexer lexer, idParticleParameter parm)
        {
            idToken token;

            if ((token = lexer.ReadToken()) == null)
            {
                lexer.Error("not enough parameters");
                return;
            }

            if (token.IsNumeric == true)
            {
                // can have a to + 2nd parm.
                float tmp;
                float.TryParse(token.ToString(), out tmp);

                parm.From = tmp;
                parm.To   = tmp;

                if ((token = lexer.ReadToken()) != null)
                {
                    if (token.ToString().ToLower() == "to")
                    {
                        if ((token = lexer.ReadToken()) == null)
                        {
                            lexer.Error("missing second parameter");
                            return;
                        }

                        float.TryParse(token.ToString(), out tmp);
                        parm.To = tmp;
                    }
                    else
                    {
                        lexer.UnreadToken = token;
                    }
                }
            }
            else
            {
                parm.Table = (idDeclTable)idE.DeclManager.FindType(DeclType.Table, token.ToString(), false);
            }
        }
Esempio n. 17
0
        private void ParseVertices(idLexer lexer, CollisionModel model)
        {
            lexer.ExpectTokenString("{");

            int vertexCount = lexer.ParseInt();

            model.Vertices = new CollisionModelVertex[vertexCount];

            for (int i = 0; i < vertexCount; i++)
            {
                float[] tmp = lexer.Parse1DMatrix(3);

                model.Vertices[i].Point      = new Vector3(tmp[0], tmp[1], tmp[2]);
                model.Vertices[i].Side       = 0;
                model.Vertices[i].SideSet    = 0;
                model.Vertices[i].CheckCount = 0;
            }

            lexer.ExpectTokenString("}");
        }
Esempio n. 18
0
        private ContentFlags ContentsFromString(string str)
        {
            idLexer lexer = new idLexer();

            lexer.LoadMemory(str, "ContentsFromString");

            idToken      token;
            ContentFlags contents = ContentFlags.None;
            string       tmp;

            while ((token = lexer.ReadToken()) != null)
            {
                if (token.ToString() == ",")
                {
                    continue;
                }

                tmp = token.ToString();

                switch (tmp)
                {
                case "aas_solid":
                    tmp = "AasSolid";
                    break;

                case "aas_obstacle":
                    tmp = "AasObstacle";
                    break;

                case "flashlight_trigger":
                    tmp = "FlashlightTrigger";
                    break;
                }

                contents |= (ContentFlags)Enum.Parse(typeof(ContentFlags), tmp, true);
            }

            return(contents);
        }
Esempio n. 19
0
        /// <summary>
        /// Load the given source.
        /// </summary>
        /// <returns></returns>
        public bool LoadMemory(string content, string name)
        {
            if (_loaded == true)
            {
                idConsole.FatalError("idScriptParser::LoadMemory: another source already loaded");
                return(false);
            }

            idLexer script = new idLexer(_options);

            script.Punctuation = _punctuation;
            script.LoadMemory(content, name);

            if (script.IsLoaded == false)
            {
                return(false);
            }

            _fileName = name;

            _scriptStack.Clear();
            _indentStack.Clear();
            _tokens.Clear();

            _skip   = 0;
            _loaded = true;

            _scriptStack.Push(script);

            if (_defineDict == null)
            {
                _defines.Clear();
                _defineDict = new Dictionary <string, ScriptDefinition>(StringComparer.OrdinalIgnoreCase);

                AddGlobalDefinesToSource();
            }

            return(true);
        }
Esempio n. 20
0
        private CollisionModelNode ParseNodes(idLexer lexer, CollisionModel model, CollisionModelNode parent)
        {
            model.NodeCount++;

            lexer.ExpectTokenString("(");

            CollisionModelNode node = new CollisionModelNode();

            node.Parent        = parent;
            node.PlaneType     = lexer.ParseInt();
            node.PlaneDistance = lexer.ParseFloat();

            lexer.ExpectTokenString(")");

            if (node.PlaneType != -1)
            {
                node.Children[0] = ParseNodes(lexer, model, node);
                node.Children[1] = ParseNodes(lexer, model, node);
            }

            return(node);
        }
Esempio n. 21
0
        public override bool Parse(string text)
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            idLexer lexer = new idLexer(idDeclFile.LexerOptions);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");

            // deeper functions can set this, which will cause MakeDefault() to be called at the end
            _errorDuringParse = false;

            if ((ParseMaterial(lexer) == false) || (_errorDuringParse == true))
            {
                MakeDefault();
                return(false);
            }

            return(true);
        }
Esempio n. 22
0
        private bool Directive_Include()
        {
            idLexer script;
            idToken token;
            string  path;

            if ((token = ReadSourceToken()) == null)
            {
                Error("#include without file name");
                return(false);
            }
            else if (token.LinesCrossed > 0)
            {
                Error("#include without file name");
                return(false);
            }
            else if (token.Type == TokenType.String)
            {
                script = new idLexer();

                // try relative to the current file

                path = Path.Combine(Path.GetDirectoryName(_scriptStack.Peek().FileName), token.ToString());

                if (script.LoadFile(path, _osPath) == false)
                {
                    // try absolute path
                    path = token.ToString();

                    if (script.LoadFile(path, _osPath) == false)
                    {
                        // try from the include path
                        path = _includePath + token.ToString();

                        if (script.LoadFile(path, _osPath) == false)
                        {
                            script = null;
                        }
                    }
                }
            }
            else if ((token.Type == TokenType.Punctuation) && (token.ToString() == "<"))
            {
                path = _includePath;

                while ((token = ReadSourceToken()) != null)
                {
                    if (token.LinesCrossed > 0)
                    {
                        UnreadSourceToken(token);
                        break;
                    }
                    else if ((token.Type == TokenType.Punctuation) && (token.ToString() == ">"))
                    {
                        break;
                    }

                    path += token.ToString();
                }

                if (token.ToString() != ">")
                {
                    Warning("#include missing trailing >");
                }
                else if (path == string.Empty)
                {
                    Error("#include without file name between < >");
                    return(false);
                }
                else if ((_options & LexerOptions.NoBaseIncludes) == LexerOptions.NoBaseIncludes)
                {
                    return(true);
                }

                script = new idLexer();

                throw new Exception("ZZ");

                /*if(script.LoadFile(_includePath + path, _osPath) == false)
                 * {
                 *      script = null;
                 * }*/
            }
            else
            {
                Error("#include without file name");
                return(false);
            }

            if (script == null)
            {
                Error("file '{0}' not found", path);
                return(false);
            }

            script.Options     = _options;
            script.Punctuation = _punctuation;

            PushScript(script);

            return(true);
        }
Esempio n. 23
0
		/// <summary>
		/// Parses a variable length list of parms on one line.
		/// </summary>
		/// <param name="lexer"></param>
		/// <param name="parms"></param>
		/// <param name="maxParms"></param>
		private float[] ParseParams(idLexer lexer, int maxParms)
		{
			idToken token;
			List<float> parms = new List<float>();
			int count = 0;
			float tmp;

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}
				else if(count == maxParms)
				{
					lexer.Error("too many parms on line");
					break;
				}
				else
				{
					token.StripQuotes();
					float.TryParse(token.ToString(), out tmp);
					
					parms.Add(tmp);
					count++;
				}
			}

			return parms.ToArray();
		}
Esempio n. 24
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException(this.GetType().Name);
			}

			idToken token;
			string tokenLower;

			idLexer lexer = new idLexer(idDeclFile.LexerOptions);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			List<idParticleStage> stages = new List<idParticleStage>();

			_depthHack = 0.0f;

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenLower = token.ToString().ToLower();

				if(tokenLower == "}")
				{
					break;
				}
				else if(tokenLower == "{")
				{
					idParticleStage stage = ParseParticleStage(lexer);

					if(stage == null)
					{
						lexer.Warning("Particle stage parse failed");
						MakeDefault();

						return false;
					}

					stages.Add(stage);
				}
				else if(tokenLower == "depthhack")
				{
					_depthHack = lexer.ParseFloat();
				}
				else
				{
					lexer.Warning("bad token {0}", token.ToString());
					MakeDefault();

					return false;
				}
			}

			_stages = stages.ToArray();

			//
			// calculate the bounds
			//
			_bounds.Clear();

			int count = _stages.Length;

			for(int i = 0; i < count; i++)
			{
				idConsole.Warning("TODO: GetStageBounds");
				// TODO: GetStageBounds(stages[i]);
				_bounds += _stages[i].Bounds;
			}

			if(_bounds.Volume <= 0.1f)
			{
				_bounds = idBounds.Expand(idBounds.Zero, 8.0f);
			}

			return true;
		}
Esempio n. 25
0
		private idParticleStage ParseParticleStage(idLexer lexer)
		{
			idToken token;
			string tokenLower;
			
			idParticleStage stage = new idParticleStage();
			stage.Default();

			while(true)
			{
				if(lexer.HadError == true)
				{
					break;
				}
				else if((token = lexer.ReadToken()) == null)
				{
					break;
				}
				else
				{
					tokenLower = token.ToString().ToLower();

					if(tokenLower == "}")
					{
						break;
					}
					else if(tokenLower == "material")
					{
						token = lexer.ReadToken();
						stage.Material = idE.DeclManager.FindMaterial(token.ToString());
					}
					else if(tokenLower == "count")
					{
						stage.TotalParticles = lexer.ParseInt();
					}
					else if(tokenLower == "time")
					{
						stage.ParticleLife = lexer.ParseFloat();
					}
					else if(tokenLower == "cycles")
					{
						stage.Cycles = lexer.ParseFloat();
					}
					else if(tokenLower == "timeoffset")
					{
						stage.TimeOffset = lexer.ParseFloat();
					}
					else if(tokenLower == "deadtime")
					{
						stage.DeadTime = lexer.ParseFloat();
					}
					else if(tokenLower == "randomdistribution")
					{
						stage.RandomDistribution = lexer.ParseBool();
					}
					else if(tokenLower == "bunching")
					{
						stage.SpawnBunching = lexer.ParseFloat();
					}
					else if(tokenLower == "distribution")
					{
						token = lexer.ReadToken();
						tokenLower = token.ToString().ToLower();

						if(tokenLower == "rect")
						{
							stage.Distribution = ParticleDistribution.Rectangle;
						}
						else if(tokenLower == "cyclinder")
						{
							stage.Distribution = ParticleDistribution.Cyclinder;
						}
						else if(tokenLower == "sphere")
						{
							stage.Distribution = ParticleDistribution.Sphere;
						}
						else
						{
							lexer.Error("bad distribution type: {0}", token.ToString());
						}

						stage.DistributionParameters = ParseParams(lexer, stage.DistributionParameters.Length);
					}
					else if(tokenLower == "direction")
					{
						token = lexer.ReadToken();
						tokenLower = token.ToString().ToLower();

						if(tokenLower == "cone")
						{
							stage.Direction = ParticleDirection.Cone;
						}
						else if(tokenLower == "outward")
						{
							stage.Direction = ParticleDirection.Outward;
						}
						else
						{
							lexer.Error("bad direction type: {0}", token.ToString());
						}

						stage.DirectionParameters = ParseParams(lexer, stage.DirectionParameters.Length);
					}
					else if(tokenLower == "orientation")
					{
						token = lexer.ReadToken();
						tokenLower = token.ToString().ToLower();

						if(tokenLower == "view")
						{
							stage.Orientation = ParticleOrientation.View;
						}
						else if(tokenLower == "aimed")
						{
							stage.Orientation = ParticleOrientation.Aimed;
						}
						else if(tokenLower == "x")
						{
							stage.Orientation = ParticleOrientation.X;
						}
						else if(tokenLower == "y")
						{
							stage.Orientation = ParticleOrientation.Y;
						}
						else if(tokenLower == "z")
						{
							stage.Orientation = ParticleOrientation.Z;
						}
						else 
						{
							lexer.Error("bad orientation type: {0}", token.ToString());
						}

						stage.OrientationParameters = ParseParams(lexer, stage.OrientationParameters.Length);
					}
					else if(tokenLower == "custompath")
					{
						token = lexer.ReadToken();
						tokenLower = tokenLower.ToLower().ToLower();

						if(tokenLower == "standard")
						{
							stage.CustomPath = ParticleCustomPath.Standard;
						}
						else if(tokenLower == "helix")
						{
							stage.CustomPath = ParticleCustomPath.Helix;
						}
						else if(tokenLower == "flies")
						{
							stage.CustomPath = ParticleCustomPath.Flies;
						}
						else if(tokenLower == "spherical")
						{
							stage.CustomPath = ParticleCustomPath.Orbit;
						}
						else
						{
							lexer.Error("bad path type: {0}", token.ToString());
						}

						stage.CustomPathParameters = ParseParams(lexer, stage.CustomPathParameters.Length);
					}
					else if(tokenLower == "speed")
					{					
						ParseParametric(lexer, stage.Speed);
					}
					else if(tokenLower == "rotation")
					{
						ParseParametric(lexer, stage.RotationSpeed);
					}
					else if(tokenLower == "angle")
					{
						stage.InitialAngle = lexer.ParseFloat();
					}
					else if(tokenLower == "entitycolor")
					{
						stage.EntityColor = lexer.ParseBool();
					}
					else if(tokenLower == "size")
					{
						ParseParametric(lexer, stage.Size);
					}
					else if(tokenLower == "aspect")
					{
						ParseParametric(lexer, stage.Aspect);
					}
					else if(tokenLower == "fadein")
					{
						stage.FadeInFraction = lexer.ParseFloat();
					}
					else if(tokenLower == "fadeout")
					{
						stage.FadeOutFraction = lexer.ParseFloat();
					}
					else if(tokenLower == "fadeindex")
					{
						stage.FadeIndexFraction = lexer.ParseFloat();
					}
					else if(tokenLower == "color")
					{
						stage.Color = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat());
					}
					else if(tokenLower == "fadecolor")
					{
						stage.FadeColor = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat());
					}
					else if(tokenLower == "offset")
					{
						stage.Offset = new Vector3(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat());
					}
					else if(tokenLower == "animationframes")
					{
						stage.AnimationFrames = lexer.ParseInt();
					}
					else if(tokenLower == "animationrate")
					{
						stage.AnimationRate = lexer.ParseFloat();
					}
					else if(tokenLower == "boundsexpansion")
					{
						stage.BoundsExpansion = lexer.ParseFloat();
					}
					else if(tokenLower == "gravity")
					{
						token = lexer.ReadToken();
						tokenLower = token.ToString().ToLower();

						if(tokenLower == "world")
						{
							stage.WorldGravity = true;
						}
						else
						{
							lexer.UnreadToken = token;
						}

						stage.Gravity = lexer.ParseFloat();
					}
					else
					{
						lexer.Error("unknown token {0}", token.ToString());
					}
				}
			}

			// derive values.
			stage.CycleTime = (int) (stage.ParticleLife + stage.DeadTime) * 1000;

			return stage;
		}
Esempio n. 26
0
        /// <summary>
        /// This is used during both the initial load, and any reloads.
        /// </summary>
        /// <returns></returns>
        public int LoadAndParse()
        {
            // load the text
            idConsole.DeveloperWriteLine("...loading '{0}'", this.FileName);

            byte[] data = idE.FileSystem.ReadFile(this.FileName);

            if (data == null)
            {
                idConsole.FatalError("couldn't load {0}", this.FileName);
                return(0);
            }

            string  content = UTF8Encoding.UTF8.GetString(data);
            idLexer lexer   = new idLexer();

            lexer.Options = LexerOptions;

            if (lexer.LoadMemory(content, this.FileName) == false)
            {
                idConsole.Error("Couldn't parse {0}", this.FileName);
                return(0);
            }

            // mark all the defs that were from the last reload of this file
            foreach (idDecl decl in _decls)
            {
                decl.RedefinedInReload = false;
            }

            // TODO: checksum = MD5_BlockChecksum( buffer, length );

            _fileSize = content.Length;

            int      startMarker, sourceLine;
            int      size;
            string   name;
            bool     reparse;
            idToken  token;
            idDecl   newDecl;
            DeclType identifiedType;

            string tokenValue;

            // scan through, identifying each individual declaration
            while (true)
            {
                startMarker = lexer.FileOffset;
                sourceLine  = lexer.LineNumber;

                // parse the decl type name
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }

                tokenValue = token.ToString();

                // get the decl type from the type name
                identifiedType = idE.DeclManager.GetDeclTypeFromName(tokenValue);

                if (identifiedType == DeclType.Unknown)
                {
                    if (tokenValue == "{")
                    {
                        // if we ever see an open brace, we somehow missed the [type] <name> prefix
                        lexer.Warning("Missing decl name");
                        lexer.SkipBracedSection(false);

                        continue;
                    }
                    else
                    {
                        if (this.DefaultType == DeclType.Unknown)
                        {
                            lexer.Warning("No type");
                            continue;
                        }

                        lexer.UnreadToken = token;

                        // use the default type
                        identifiedType = this.DefaultType;
                    }
                }

                // now parse the name
                if ((token = lexer.ReadToken()) == null)
                {
                    lexer.Warning("Type without definition at the end of file");
                    break;
                }

                tokenValue = token.ToString();

                if (tokenValue == "{")
                {
                    // if we ever see an open brace, we somehow missed the [type] <name> prefix
                    lexer.Warning("Missing decl name");
                    lexer.SkipBracedSection(false);

                    continue;
                }

                // FIXME: export decls are only used by the model exporter, they are skipped here for now
                if (identifiedType == DeclType.ModelExport)
                {
                    lexer.SkipBracedSection();
                    continue;
                }

                name = tokenValue;

                // make sure there's a '{'
                if ((token = lexer.ReadToken()) == null)
                {
                    lexer.Warning("Type without definition at end of file");
                    break;
                }

                tokenValue = token.ToString();

                if (tokenValue != "{")
                {
                    lexer.Warning("Expecting '{{' but found '{0}'", tokenValue);
                    continue;
                }

                lexer.UnreadToken = token;

                // now take everything until a matched closing brace
                lexer.SkipBracedSection();
                size = lexer.FileOffset - startMarker;

                // look it up, possibly getting a newly created default decl
                reparse = false;
                newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, false);

                if (newDecl != null)
                {
                    // update the existing copy
                    if ((newDecl.SourceFile != this) || (newDecl.RedefinedInReload == true))
                    {
                        lexer.Warning("{0} '{1}' previously defined at {2}:{3}", identifiedType.ToString().ToLower(), name, newDecl.FileName, newDecl.LineNumber);
                        continue;
                    }

                    if (newDecl.State != DeclState.Unparsed)
                    {
                        reparse = true;
                    }
                }
                else
                {
                    // allow it to be created as a default, then add it to the per-file list
                    newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, true);

                    if (newDecl == null)
                    {
                        lexer.Warning("could not instanciate decl '{0}' with name '{1}'", identifiedType.ToString().ToLower(), name);
                        continue;
                    }

                    _decls.Add(newDecl);
                }

                newDecl.RedefinedInReload = true;
                newDecl.SourceText        = content.Substring(startMarker, size);
                newDecl.SourceFile        = this;
                newDecl.SourceTextOffset  = startMarker;
                newDecl.SourceTextLength  = size;
                newDecl.SourceLine        = sourceLine;
                newDecl.State             = DeclState.Unparsed;

                // if it is currently in use, reparse it immedaitely
                if (reparse)
                {
                    newDecl.ParseLocal();
                }
            }

            _lineCount = lexer.LineNumber;

            // any defs that weren't redefinedInReload should now be defaulted
            foreach (idDecl decl in _decls)
            {
                if (decl.RedefinedInReload == false)
                {
                    decl.MakeDefault();
                    decl.SourceTextOffset = decl.SourceFile.FileSize;
                    decl.SourceTextLength = 0;
                    decl.SourceLine       = decl.SourceFile.LineCount;
                }
            }

            return(_checksum);
        }
Esempio n. 27
0
        /// <summary>
        /// Used for initial loads, reloadModel, and reloading the data of purged models.
        /// </summary>
        /// <remarks>
        /// Upon exit, the model will absolutely be valid, but possibly as a default model.
        /// </remarks>
        public override void Load()
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            if (_purged == false)
            {
                Purge();
            }

            _purged = false;

            idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters);

            if (lexer.LoadFile(Name) == false)
            {
                MakeDefault();
                return;
            }

            lexer.ExpectTokenString(VersionString);

            int     version = lexer.ParseInt();
            int     count   = 0;
            idToken token;

            if (version != Version)
            {
                lexer.Error("Invalid version {0}. Should be version {1}", version, Version);
            }

            //
            // skip commandline
            //
            lexer.ExpectTokenString("commandline");
            lexer.ReadToken();

            // parse num joints
            lexer.ExpectTokenString("numJoints");

            count = lexer.ParseInt();

            _joints      = new idMD5Joint[count];
            _defaultPose = new idJointQuaternion[count];

            idJointMatrix[] poseMat3 = new idJointMatrix[count];

            // parse num meshes
            lexer.ExpectTokenString("numMeshes");
            count = lexer.ParseInt();

            if (count < 0)
            {
                lexer.Error("Invalid size: {0}", count);
            }

            _meshes = new idMD5Mesh[count];

            //
            // parse joints
            //
            lexer.ExpectTokenString("joints");
            lexer.ExpectTokenString("{");

            int jointCount = _joints.Length;

            for (int i = 0; i < jointCount; i++)
            {
                idMD5Joint        joint = _joints[i] = new idMD5Joint();
                idJointQuaternion pose  = new idJointQuaternion();

                ParseJoint(lexer, joint, ref pose);

                poseMat3[i]             = idJointMatrix.Zero;
                poseMat3[i].Rotation    = Matrix.CreateFromQuaternion(pose.Quaternion);
                poseMat3[i].Translation = pose.Translation;

                if (joint.Parent != null)
                {
                    int parentIndex = GetJointIndex(joint.Parent);

                    pose.Quaternion = Quaternion.CreateFromRotationMatrix(poseMat3[i].ToMatrix()
                                                                          * Matrix.Transpose(poseMat3[parentIndex].ToMatrix()));
                    pose.Translation = Vector3.Transform(poseMat3[i].ToVector3() - poseMat3[parentIndex].ToVector3(),
                                                         Matrix.Transpose(poseMat3[parentIndex].ToMatrix()));
                }

                _defaultPose[i] = pose;
            }

            lexer.ExpectTokenString("}");

            int meshCount = _meshes.Length;

            for (int i = 0; i < meshCount; i++)
            {
                lexer.ExpectTokenString("mesh");

                _meshes[i] = new idMD5Mesh();
                _meshes[i].Parse(lexer, poseMat3);
            }

            //
            // calculate the bounds of the model
            //
            CalculateBounds(poseMat3);

            // set the timestamp for reloadmodels
            idConsole.Warning("TODO: fileSystem->ReadFile( name, NULL, &timeStamp );");
        }
Esempio n. 28
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException(this.GetType().Name);
			}

			idToken token;
			string tokenLower;

			idLexer lexer = new idLexer(idDeclFile.LexerOptions);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenLower = token.ToString().ToLower();

				if(tokenLower == "}")
				{
					break;
				}
				else if(tokenLower == "name")
				{
					token = lexer.ReadToken();
					_pdaName = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "fullname")
				{
					token = lexer.ReadToken();
					_fullName = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "icon")
				{
					token = lexer.ReadToken();
					_icon = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "id")
				{
					token = lexer.ReadToken();
					_id = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "post")
				{
					token = lexer.ReadToken();
					_post = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "title")
				{
					token = lexer.ReadToken();
					_title = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "security")
				{
					token = lexer.ReadToken();
					_security = (token != null) ? token.ToString() : string.Empty;
				}
				else if(tokenLower == "pda_email")
				{
					token = lexer.ReadToken();
					_emailList.Add(token.ToString());

					idE.DeclManager.FindType(DeclType.Email, token.ToString());
				}
				else if(tokenLower == "pda_audio")
				{
					token = lexer.ReadToken();
					_audioList.Add(token.ToString());

					idE.DeclManager.FindType(DeclType.Audio, token.ToString());
				}
				else if(tokenLower == "pda_video")
				{
					token = lexer.ReadToken();
					_videoList.Add(token.ToString());

					idE.DeclManager.FindType(DeclType.Video, token.ToString());
				}
			}

			if(lexer.HadError == true)
			{
				lexer.Warning("PDA decl '{0}' had a parse error", this.Name);
				return false;
			}

			_originalVideoCount = _videoList.Count;
			_originalEmailCount = _emailList.Count;

			return true;
		}
Esempio n. 29
0
		private void ParseSingleAction(idLexer lexer /*idFXSingleAction& FXAction*/) 
		{
			idToken token;
			string tokenValue;
			
			/*FXAction.type = -1;
			FXAction.sibling = -1;

			FXAction.data = "<none>";
			FXAction.name = "<none>";
			FXAction.fire = "<none>";

			FXAction.delay = 0.0f;
			FXAction.duration = 0.0f;
			FXAction.restart = 0.0f;
			FXAction.size = 0.0f;
			FXAction.fadeInTime = 0.0f;
			FXAction.fadeOutTime = 0.0f;
			FXAction.shakeTime = 0.0f;
			FXAction.shakeAmplitude = 0.0f;
			FXAction.shakeDistance = 0.0f;
			FXAction.shakeFalloff = false;
			FXAction.shakeImpulse = 0.0f;
			FXAction.shakeIgnoreMaster = false;
			FXAction.lightRadius = 0.0f;
			FXAction.rotate = 0.0f;
			FXAction.random1 = 0.0f;
			FXAction.random2 = 0.0f;

			FXAction.lightColor = vec3_origin;
			FXAction.offset = vec3_origin;
			FXAction.axis = mat3_identity;

			FXAction.bindParticles = false;
			FXAction.explicitAxis = false;
			FXAction.noshadows = false;
			FXAction.particleTrackVelocity = false;
			FXAction.trackOrigin = false;
			FXAction.soundStarted = false;*/

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenValue = token.ToString().ToLower();

				if(tokenValue == "}")
				{
					break;
				}
				else if(tokenValue == "shake")
				{
					/*FXAction.type = FX_SHAKE;*/
					/*FXAction.shakeTime = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.shakeAmplitude = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.shakeDistance = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.shakeFalloff = */lexer.ParseBool();
					lexer.ExpectTokenString(",");
					/*FXAction.shakeImpulse = */lexer.ParseFloat();
				}
				else if(tokenValue == "noshadows")
				{
					// TODO: FXAction.noshadows = true;
				}
				else if(tokenValue == "name")
				{
					token = lexer.ReadToken();
					// TODO: FXAction.name = token;
				}
				else if(tokenValue == "fire")
				{
					token = lexer.ReadToken();
					// TODO: FXAction.fire = token;
				}
				else if(tokenValue == "random")
				{
					/*FXAction.random1 = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.random2 = */lexer.ParseFloat();
					// FXAction.delay = 0.0f;		// check random
				}
				else if(tokenValue == "delay")
				{
					/*FXAction.delay = */lexer.ParseFloat();
				}
				else if(tokenValue == "rotate")
				{
					/*FXAction.rotate = */lexer.ParseFloat();
				}
				else if(tokenValue == "duration")
				{
					/*FXAction.duration = */lexer.ParseFloat();
				}
				else if(tokenValue == "trackorigin")
				{
					/*FXAction.trackOrigin = */lexer.ParseBool();
				}
				else if(tokenValue == "restart")
				{
					/*FXAction.restart = */lexer.ParseFloat();
				}
				else if(tokenValue == "fadein")
				{
					/*FXAction.fadeInTime = */lexer.ParseFloat();
				}
				else if(tokenValue == "fadeout")
				{
					/*FXAction.fadeOutTime = */lexer.ParseFloat();
				}
				else if(tokenValue == "size")
				{
					/*FXAction.size = */lexer.ParseFloat();
				}
				else if(tokenValue == "offset")
				{
					/*FXAction.offset.x = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.offset.y = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.offset.z = */lexer.ParseFloat();
				}
				else if(tokenValue == "axis")
				{
					/*idVec3 v;*/
					/*v.x = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*v.y = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*v.z = */lexer.ParseFloat();
					/*v.Normalize();
					FXAction.axis = v.ToMat3();
					FXAction.explicitAxis = true;*/
				}
				else if(tokenValue == "angle")
				{
					/*idAngles a;*/
					/*a[0] = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*a[1] = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*a[2] = */lexer.ParseFloat();
					/*FXAction.axis = a.ToMat3();
					FXAction.explicitAxis = true;*/
				}
				else if(tokenValue == "uselight")
				{
					token = lexer.ReadToken();
			
					/*FXAction.data = token;
					for( int i = 0; i < events.Num(); i++ ) {
						if ( events[i].name.Icmp( FXAction.data ) == 0 ) {
							FXAction.sibling = i;
							FXAction.lightColor = events[i].lightColor;
							FXAction.lightRadius = events[i].lightRadius;
						}
					}
					FXAction.type = FX_LIGHT;

					// precache the light material
					declManager->FindMaterial( FXAction.data );*/	
				}
				else if(tokenValue == "attachlight")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_ATTACHLIGHT;

					// precache it
					declManager->FindMaterial( FXAction.data );*/
				}
				else if(tokenValue == "attachentity")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_ATTACHENTITY;

					// precache the model
					renderModelManager->FindModel( FXAction.data );*/
				}
				else if(tokenValue == "launch")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_LAUNCH;

					// precache the entity def
					declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/
				}
				else if(tokenValue == "usemodel")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					for( int i = 0; i < events.Num(); i++ ) {
						if ( events[i].name.Icmp( FXAction.data ) == 0 ) {
							FXAction.sibling = i;
						}
					}
					FXAction.type = FX_MODEL;

					// precache the model
					renderModelManager->FindModel( FXAction.data );*/
				}
				else if(tokenValue == "light")
				{
					token = lexer.ReadToken();
					
					/*FXAction.data = token;*/
					lexer.ExpectTokenString(",");
					/*FXAction.lightColor[0] = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.lightColor[1] = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.lightColor[2] = */lexer.ParseFloat();
					lexer.ExpectTokenString(",");
					/*FXAction.lightRadius = */lexer.ParseFloat();
					/*FXAction.type = FX_LIGHT;

					// precache the light material
					declManager->FindMaterial( FXAction.data );*/
				}
				else if(tokenValue == "model")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_MODEL;

					// precache it
					renderModelManager->FindModel( FXAction.data );*/
				}
				else if(tokenValue == "particle") // FIXME: now the same as model
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_PARTICLE;

					// precache it
					renderModelManager->FindModel( FXAction.data );*/
				}
				else if(tokenValue == "decal")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_DECAL;

					// precache it
					declManager->FindMaterial( FXAction.data );*/
				}
				else if(tokenValue == "particletrackvelocity")
				{
					// TODO: FXAction.particleTrackVelocity = true;
				}
				else if(tokenValue == "sound")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_SOUND;

					// precache it
					declManager->FindSound( FXAction.data );*/
				}
				else if(tokenValue == "ignoremaster")
				{
					/*FXAction.shakeIgnoreMaster = true;*/
				}
				else if(tokenValue == "shockwave")
				{
					token = lexer.ReadToken();

					/*FXAction.data = token;
					FXAction.type = FX_SHOCKWAVE;

					// precache the entity def
					declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/
				}
				else
				{
					lexer.Warning("FX File: bad token");
				}
			}
		}
Esempio n. 30
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException(this.GetType().Name);
			}

			idLexer lexer = new idLexer(idDeclFile.LexerOptions);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			idToken token;
			string tokenValue;

			idConsole.Warning("TODO: actual fx parsing, we only step over the block");

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenValue = token.ToString().ToLower();

				if(tokenValue == "}")
				{
					break;
				}

				if(tokenValue == "bindto")
				{
					token = lexer.ReadToken();

					idConsole.Warning("TODO: FX: joint = token;");
				}
				else if(tokenValue == "{")
				{
					idConsole.Warning("TODO: FX: idFXSingleAction action;");
					ParseSingleAction(lexer/*, action*/);
					// events.Append(action);
					continue;
				}
			}

			if(lexer.HadError == true)
			{
				lexer.Warning("FX decl '{0}' had a parse error", this.Name);
				return false;
			}
			return true;
		}
Esempio n. 31
0
        private bool ParseCollisionModel(idLexer lexer)
        {
            CollisionModel model = new CollisionModel();

            _models[_modelCount++] = model;

            // parse the file
            idToken token = lexer.ExpectTokenType(TokenType.String, 0);
            string  tokenLower;

            model.Name = token.ToString();
            lexer.ExpectTokenString("{");

            while (lexer.CheckTokenString("}") == false)
            {
                token      = lexer.ReadToken();
                tokenLower = token.ToString().ToLower();

                if (tokenLower == "vertices")
                {
                    ParseVertices(lexer, model);
                }
                else if (tokenLower == "edges")
                {
                    ParseEdges(lexer, model);
                }
                else if (tokenLower == "nodes")
                {
                    lexer.ExpectTokenString("{");
                    model.Node = ParseNodes(lexer, model, null);
                    lexer.ExpectTokenString("}");
                }
                else if (tokenLower == "polygons")
                {
                    ParsePolygons(lexer, model);
                }
                else if (tokenLower == "brushes")
                {
                    ParseBrushes(lexer, model);
                }
                else
                {
                    lexer.Error("ParseCollisionModel: bad token \"{0}\"", token);
                }
            }

            // calculate edge normals
            _checkCount++;

            idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);");

            // get model bounds from brush and polygon bounds
            model.Bounds = GetNodeBounds(model.Node);

            // get model contents
            model.Contents = GetNodeContents(model.Node);

            idConsole.Warning("TODO: used memory");

            // total memory used by this model

            /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) +
             *                                      model->numEdges * sizeof(cm_edge_t) +
             *                                      model->polygonMemory +
             *                                      model->brushMemory +
             *                                              model->numNodes * sizeof(cm_node_t) +
             *                                      model->numPolygonRefs * sizeof(cm_polygonRef_t) +
             *                                      model->numBrushRefs * sizeof(cm_brushRef_t);*/

            return(true);
        }
Esempio n. 32
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException("idDeclTable");
			}

			idLexer lexer = new idLexer(idDeclFile.LexerOptions);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			idToken token;
			List<float> values = new List<float>();

			string tokenLower;
			string tokenValue;

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenValue = token.ToString();
				tokenLower = tokenValue.ToLower();

				if(tokenLower == "}")
				{
					break;
				}
				else if(tokenLower == "snap")
				{
					_snap = true;
				}
				else if(tokenLower == "clamp")
				{
					_clamp = true;
				}
				else if(tokenLower == "{")
				{
					while(true)
					{
						bool errorFlag;
						float v = lexer.ParseFloat(out errorFlag);

						if(errorFlag == true)
						{
							// we got something non-numeric
							MakeDefault();
							return false;
						}

						values.Add(v);

						token = lexer.ReadToken();
						tokenValue = token.ToString();

						if(tokenValue == "}")
						{
							break;
						}
						else if(tokenValue == ",")
						{
							continue;
						}

						lexer.Warning("expected comma or brace");
						MakeDefault();

						return false;
					}
				}
				else
				{
					lexer.Warning("unknown token '{0}'", tokenValue);
					MakeDefault();

					return false;
				}
			}

			// copy the 0 element to the end, so lerping doesn't
			// need to worry about the wrap case
			float val = values[0];
			values.Add(val);

			_values = values.ToArray();

			return true;
		}
Esempio n. 33
0
        public bool LoadAnimation(string fileName)
        {
            idToken token;
            idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.NoStringConcatination);

            if (lexer.LoadFile(fileName) == false)
            {
                return(false);
            }

            Clear();

            _name = fileName;

            lexer.ExpectTokenString(idRenderModel_MD5.VersionString);
            int version = lexer.ParseInt();

            if (version != idRenderModel_MD5.Version)
            {
                lexer.Error("Invalid version {0}.  Should be version {1}", version, idRenderModel_MD5.Version);
            }

            // skip the commandline
            lexer.ExpectTokenString("commandline");
            lexer.ReadToken();

            // parse num frames
            lexer.ExpectTokenString("numFrames");
            int frameCount = lexer.ParseInt();

            if (frameCount <= 0)
            {
                lexer.Error("Invalid number of frames: {0}", frameCount);
            }

            // parse num joints
            lexer.ExpectTokenString("numJoints");
            int jointCount = lexer.ParseInt();

            if (jointCount <= 0)
            {
                lexer.Error("Invalid number of joints: {0}", jointCount);
            }

            // parse frame rate
            lexer.ExpectTokenString("frameRate");
            _frameRate = lexer.ParseInt();

            if (_frameRate < 0)
            {
                lexer.Error("Invalid frame rate: {0}", _frameRate);
            }

            // parse number of animated components
            lexer.ExpectTokenString("numAnimatedComponents");
            _animatedComponentCount = lexer.ParseInt();

            if ((_animatedComponentCount < 0) || (_animatedComponentCount > (jointCount * 6)))
            {
                lexer.Error("Invalid number of animated components: {0}", _animatedComponentCount);
            }

            // parse the hierarchy
            _jointInfo = new JointAnimationInfo[jointCount];

            lexer.ExpectTokenString("hierarchy");
            lexer.ExpectTokenString("{");

            for (int i = 0; i < jointCount; i++)
            {
                token = lexer.ReadToken();

                _jointInfo[i]           = new JointAnimationInfo();
                _jointInfo[i].NameIndex = idR.AnimManager.GetJointIndex(token.ToString());

                // parse parent num
                _jointInfo[i].ParentIndex = lexer.ParseInt();

                if (_jointInfo[i].ParentIndex >= i)
                {
                    lexer.Error("Invalid parent num: {0}", _jointInfo[i].ParentIndex);
                }

                if ((i != 0) && (_jointInfo[i].ParentIndex < 0))
                {
                    lexer.Error("Animations may have only one root joint");
                }

                // parse anim bits
                _jointInfo[i].AnimationBits = (AnimationBits)lexer.ParseInt();

                if (((int)_jointInfo[i].AnimationBits & ~63) != 0)
                {
                    lexer.Error("Invalid anim bits: {0}", _jointInfo[i].AnimationBits);
                }

                // parse first component
                _jointInfo[i].FirstComponent = lexer.ParseInt();

                if ((_animatedComponentCount > 0) && ((_jointInfo[i].FirstComponent < 0) || (_jointInfo[i].FirstComponent >= _animatedComponentCount)))
                {
                    lexer.Error("Invalid first component: {0}", _jointInfo[i].FirstComponent);
                }
            }

            lexer.ExpectTokenString("}");

            // parse bounds
            lexer.ExpectTokenString("bounds");
            lexer.ExpectTokenString("{");

            _bounds = new idBounds[frameCount];

            for (int i = 0; i < frameCount; i++)
            {
                float[] tmp  = lexer.Parse1DMatrix(3);
                float[] tmp2 = lexer.Parse1DMatrix(3);

                _bounds[i] = new idBounds(
                    new Vector3(tmp[0], tmp[1], tmp[2]),
                    new Vector3(tmp2[0], tmp2[1], tmp2[2])
                    );
            }

            lexer.ExpectTokenString("}");

            // parse base frame
            _baseFrame = new idJointQuaternion[jointCount];

            lexer.ExpectTokenString("baseframe");
            lexer.ExpectTokenString("{");

            for (int i = 0; i < jointCount; i++)
            {
                float[] tmp  = lexer.Parse1DMatrix(3);
                float[] tmp2 = lexer.Parse1DMatrix(3);

                idCompressedQuaternion q = new idCompressedQuaternion(tmp2[0], tmp2[1], tmp2[2]);


                _baseFrame[i]             = new idJointQuaternion();
                _baseFrame[i].Translation = new Vector3(tmp[0], tmp[1], tmp[2]);
                _baseFrame[i].Quaternion  = q.ToQuaternion();
            }

            lexer.ExpectTokenString("}");

            // parse frames
            _componentFrames = new float[_animatedComponentCount * frameCount];
            int frameOffset = 0;

            for (int i = 0; i < frameCount; i++)
            {
                lexer.ExpectTokenString("frame");
                int count = lexer.ParseInt();

                if (count != i)
                {
                    lexer.Error("Expected frame number {0}", i);
                }

                lexer.ExpectTokenString("{");

                for (int j = 0; j < _animatedComponentCount; j++, frameOffset++)
                {
                    _componentFrames[frameOffset] = lexer.ParseFloat();
                }

                lexer.ExpectTokenString("}");
            }

            // get total move delta
            if (_animatedComponentCount == 0)
            {
                _totalDelta = Vector3.Zero;
            }
            else
            {
                int componentOffset = _jointInfo[0].FirstComponent;

                if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationX) == AnimationBits.TranslationX)
                {
                    for (int i = 0; i < frameCount; i++)
                    {
                        _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.X;
                    }

                    _totalDelta.X = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))];
                    componentOffset++;
                }
                else
                {
                    _totalDelta.X = 0;
                }

                if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationY) == AnimationBits.TranslationY)
                {
                    for (int i = 0; i < frameCount; i++)
                    {
                        _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Y;
                    }

                    _totalDelta.Y = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))];
                    componentOffset++;
                }
                else
                {
                    _totalDelta.Y = 0;
                }

                if ((_jointInfo[0].AnimationBits & AnimationBits.TranslationZ) == AnimationBits.TranslationZ)
                {
                    for (int i = 0; i < frameCount; i++)
                    {
                        _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Z;
                    }

                    _totalDelta.Z = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))];
                }
                else
                {
                    _totalDelta.Z = 0;
                }
            }

            _baseFrame[0].Translation = Vector3.Zero;

            // we don't count last frame because it would cause a 1 frame pause at the end
            _animLength = ((frameCount - 1) * 1000 + _frameRate - 1) / _frameRate;

            // done
            return(true);
        }
Esempio n. 34
0
		private void ParseParametric(idLexer lexer, idParticleParameter parm)
		{
			idToken token;

			if((token = lexer.ReadToken()) == null)
			{
				lexer.Error("not enough parameters");
				return;
			}

			if(token.IsNumeric == true)
			{
				// can have a to + 2nd parm.
				float tmp;
				float.TryParse(token.ToString(), out tmp);

				parm.From = tmp;
				parm.To = tmp;

				if((token = lexer.ReadToken()) != null)
				{
					if(token.ToString().ToLower() == "to")
					{
						if((token = lexer.ReadToken()) == null)
						{
							lexer.Error("missing second parameter");
							return;
						}

						float.TryParse(token.ToString(), out tmp);
						parm.To = tmp;
					}
					else
					{
						lexer.UnreadToken = token;
					}
				}
			}
			else
			{
				parm.Table = (idDeclTable) idE.DeclManager.FindType(DeclType.Table, token.ToString(), false);
			}
		}
Esempio n. 35
0
        public override bool Parse(string text)
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            idLexer lexer = new idLexer(idDeclFile.LexerOptions);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");

            idToken token;
            idToken token2;
            string  value;

            while (true)
            {
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }

                value = token.ToString();

                if (value == "}")
                {
                    break;
                }

                if (token.Type != TokenType.String)
                {
                    lexer.Warning("Expected quoted string, but found '{0}'", value);
                    MakeDefault();

                    return(false);
                }

                if ((token2 = lexer.ReadToken()) == null)
                {
                    lexer.Warning("Unexpected end of file");
                    MakeDefault();

                    return(false);
                }

                if (_dict.ContainsKey(value) == true)
                {
                    lexer.Warning("'{0}' already defined", value);
                }

                _dict.Set(value, token2.ToString());
            }

            // we always automatically set a "classname" key to our name
            _dict.Set("classname", this.Name);

            // "inherit" keys will cause all values from another entityDef to be copied into this one
            // if they don't conflict.  We can't have circular recursions, because each entityDef will
            // never be parsed more than once

            // find all of the dicts first, because copying inherited values will modify the dict
            List <idDeclEntity> defList      = new List <idDeclEntity>();
            List <string>       keysToRemove = new List <string>();

            foreach (KeyValuePair <string, string> kvp in _dict.MatchPrefix("inherit"))
            {
                idDeclEntity copy = idE.DeclManager.FindType <idDeclEntity>(DeclType.EntityDef, kvp.Value, false);

                if (copy == null)
                {
                    lexer.Warning("Unknown entityDef '{0}' inherited by '{1}'", kvp.Value, this.Name);
                }
                else
                {
                    defList.Add(copy);
                }

                // delete this key/value pair
                keysToRemove.Add(kvp.Key);
            }

            _dict.Remove(keysToRemove.ToArray());

            // now copy over the inherited key / value pairs
            foreach (idDeclEntity def in defList)
            {
                _dict.SetDefaults(def._dict);
            }

            // precache all referenced media
            // do this as long as we arent in modview
            idE.Game.CacheDictionaryMedia(_dict);

            return(true);
        }
Esempio n. 36
0
        public void Parse(idLexer lexer, idJointMatrix[] joints)
        {
            lexer.ExpectTokenString("{");

            //
            // parse name
            //
            if (lexer.CheckTokenString("name") == true)
            {
                lexer.ReadToken();
            }

            //
            // parse shader
            //
            lexer.ExpectTokenString("shader");

            idToken token        = lexer.ReadToken();
            string  materialName = token.ToString();

            _material = idE.DeclManager.FindMaterial(materialName);

            //
            // parse texture coordinates
            //
            lexer.ExpectTokenString("numverts");
            int count = lexer.ParseInt();

            if (count < 0)
            {
                lexer.Error("Invalid size: {0}", token.ToString());
            }

            _texCoords = new Vector2[count];

            int[] firstWeightForVertex = new int[count];
            int[] weightCountForVertex = new int[count];
            int   maxWeight            = 0;
            int   coordCount           = _texCoords.Length;

            _weightCount = 0;

            for (int i = 0; i < coordCount; i++)
            {
                lexer.ExpectTokenString("vert");
                lexer.ParseInt();

                float[] tmp = lexer.Parse1DMatrix(2);

                _texCoords[i] = new Vector2(tmp[0], tmp[1]);

                firstWeightForVertex[i] = lexer.ParseInt();
                weightCountForVertex[i] = lexer.ParseInt();

                if (weightCountForVertex[i] == 0)
                {
                    lexer.Error("Vertex without any joint weights.");
                }

                _weightCount += weightCountForVertex[i];

                if ((weightCountForVertex[i] + firstWeightForVertex[i]) > maxWeight)
                {
                    maxWeight = weightCountForVertex[i] + firstWeightForVertex[i];
                }
            }

            //
            // parse tris
            //
            lexer.ExpectTokenString("numtris");
            _triangleCount = lexer.ParseInt();

            if (_triangleCount < 0)
            {
                lexer.Error("Invalid size: {0}", _triangleCount);
            }

            int[] tris = new int[_triangleCount * 3];

            for (int i = 0; i < _triangleCount; i++)
            {
                lexer.ExpectTokenString("tri");
                lexer.ParseInt();

                tris[i * 3 + 0] = lexer.ParseInt();
                tris[i * 3 + 1] = lexer.ParseInt();
                tris[i * 3 + 2] = lexer.ParseInt();
            }

            //
            // parse weights
            //
            lexer.ExpectTokenString("numweights");
            count = lexer.ParseInt();

            if (count < 0)
            {
                lexer.Error("Invalid size: {0}", count);
            }

            if (maxWeight > count)
            {
                lexer.Warning("Vertices reference out of range weights in model ({0} of {1} weights).", maxWeight, count);
            }

            VertexWeight[] tempWeights = new VertexWeight[count];

            for (int i = 0; i < count; i++)
            {
                lexer.ExpectTokenString("weight");
                lexer.ParseInt();

                int jointIndex = lexer.ParseInt();

                if ((jointIndex < 0) || (jointIndex >= joints.Length))
                {
                    lexer.Error("Joint index out of range({0}): {1}", joints.Length, jointIndex);
                }

                tempWeights[i].JointIndex  = jointIndex;
                tempWeights[i].JointWeight = lexer.ParseFloat();

                float[] tmp = lexer.Parse1DMatrix(3);

                tempWeights[i].Offset = new Vector3(tmp[0], tmp[1], tmp[2]);
            }

            // create pre-scaled weights and an index for the vertex/joint lookup
            _scaledWeights = new Vector4[_weightCount];
            _weightIndex   = new int[_weightCount * 2];

            count      = 0;
            coordCount = _texCoords.Length;

            for (int i = 0; i < coordCount; i++)
            {
                int num         = firstWeightForVertex[i];
                int weightCount = weightCountForVertex[i];

                for (int j = 0; j < weightCount; j++, num++, count++)
                {
                    Vector3 tmp = tempWeights[num].Offset * tempWeights[num].JointWeight;

                    _scaledWeights[count].X = tmp.X;
                    _scaledWeights[count].Y = tmp.Y;
                    _scaledWeights[count].Z = tmp.Z;
                    _scaledWeights[count].W = tempWeights[num].JointWeight;

                    _weightIndex[count * 2 + 0] = tempWeights[num].JointIndex;
                }

                _weightIndex[count * 2 - 1] = 1;
            }

            lexer.ExpectTokenString("}");

            // update counters
            idConsole.Warning("TODO: idRenderModel_MD5 update counters");

            /*c_numVerts += texCoords.Num();
             * c_numWeights += numWeights;
             * c_numWeightJoints++;
             * for ( i = 0; i < numWeights; i++ ) {
             *      c_numWeightJoints += weightIndex[i*2+1];
             * }*/

            //
            // build the information that will be common to all animations of this mesh:
            // silhouette edge connectivity and normal / tangent generation information
            //
            Vertex[] verts     = new Vertex[_texCoords.Length];
            int      vertCount = verts.Length;

            for (int i = 0; i < vertCount; i++)
            {
                verts[i].TextureCoordinates = _texCoords[i];
            }

            TransformVertices(verts, joints);

            idConsole.Warning("TODO: idMD5Mesh Deform");
            //_deformInfo = idE.RenderSystem.BuildDeformInformation(verts, tris, _material.UseUnsmoothedTangents);
        }
Esempio n. 37
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException(this.GetType().Name);
			}

			idLexer lexer = new idLexer(idDeclFile.LexerOptions);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			List<SkinMapping> mappings = new List<SkinMapping>();
			List<string> associatedModels = new List<string>();

			idToken token, token2;
			string tokenLower;

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenLower = token.ToString().ToLower();

				if(tokenLower == "}")
				{
					break;
				}
				else if((token2 = lexer.ReadToken()) == null)
				{
					lexer.Warning("Unexpected end of file");
					MakeDefault();

					break;
				}
				else if(tokenLower == "model")
				{
					associatedModels.Add(token2.ToString());
					continue;
				}

				SkinMapping map = new SkinMapping();
				map.To = idE.DeclManager.FindMaterial(token2.ToString());

				if(tokenLower == "*")
				{
					// wildcard.
					map.From = null;
				}
				else
				{
					map.From = idE.DeclManager.FindMaterial(token.ToString());
				}
				
				mappings.Add(map);
			}

			_mappings = mappings.ToArray();
			_associatedModels = associatedModels.ToArray();

			return false;
		}
Esempio n. 38
0
		public override bool Parse(string text)
		{
			idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			idToken token;
			string tokenValue;

			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenValue = token.ToString().ToLower();

				if(tokenValue == "}")
				{
					break;
				}

				if(tokenValue == "audio")
				{
					_audio = lexer.ReadToken().ToString();
					idE.DeclManager.FindSound(_audio);
				}
				else if(tokenValue == "info")
				{
					_info = lexer.ReadToken().ToString();
				}
				else if(tokenValue == "name")
				{
					_audioName = lexer.ReadToken().ToString();
				}
				else if(tokenValue == "preview")
				{
					_preview = lexer.ReadToken().ToString();
				}
			}

			if(lexer.HadError == true)
			{
				lexer.Warning("Video decl '{0}' had a parse error", this.Name);
				return false;
			}

			return true;
		}
Esempio n. 39
0
        private bool LoadCollisionModelFile(string name, ulong mapFileCRC)
        {
            // load it
            string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension);

            idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation);

            if (lexer.LoadFile(fileName) == false)
            {
                return(false);
            }

            idToken token;

            if (lexer.ExpectTokenString(TokenFileID) == false)
            {
                idConsole.Warning("{0} is not a CM file.", fileName);
            }
            else if (((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion))
            {
                idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion);
            }
            else if ((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null)
            {
                idConsole.Warning("{0} has no map file CRC", fileName);
            }
            else
            {
                ulong crc = token.ToUInt64();

                if ((mapFileCRC != 0) && (crc != mapFileCRC))
                {
                    idConsole.WriteLine("{0} is out of date", fileName);
                }
                else
                {
                    // parse the file
                    while (true)
                    {
                        if ((token = lexer.ReadToken()) == null)
                        {
                            break;
                        }

                        if (token.ToString().ToLower() == "collisionmodel")
                        {
                            if (ParseCollisionModel(lexer) == false)
                            {
                                return(false);
                            }
                        }
                        else
                        {
                            lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token);
                        }
                    }

                    return(true);
                }
            }

            return(false);
        }
Esempio n. 40
0
        public override bool Parse(string text)
        {
            if (this.Disposed == true)
            {
                throw new ObjectDisposedException(this.GetType().Name);
            }

            idLexer lexer = new idLexer(idDeclFile.LexerOptions);

            lexer.LoadMemory(text, this.FileName, this.LineNumber);
            lexer.SkipUntilString("{");

            List <SkinMapping> mappings         = new List <SkinMapping>();
            List <string>      associatedModels = new List <string>();

            idToken token, token2;
            string  tokenLower;

            while (true)
            {
                if ((token = lexer.ReadToken()) == null)
                {
                    break;
                }

                tokenLower = token.ToString().ToLower();

                if (tokenLower == "}")
                {
                    break;
                }
                else if ((token2 = lexer.ReadToken()) == null)
                {
                    lexer.Warning("Unexpected end of file");
                    MakeDefault();

                    break;
                }
                else if (tokenLower == "model")
                {
                    associatedModels.Add(token2.ToString());
                    continue;
                }

                SkinMapping map = new SkinMapping();
                map.To = idE.DeclManager.FindMaterial(token2.ToString());

                if (tokenLower == "*")
                {
                    // wildcard.
                    map.From = null;
                }
                else
                {
                    map.From = idE.DeclManager.FindMaterial(token.ToString());
                }

                mappings.Add(map);
            }

            _mappings         = mappings.ToArray();
            _associatedModels = associatedModels.ToArray();

            return(false);
        }
Esempio n. 41
0
		public override bool Parse(string text)
		{
			if(this.Disposed == true)
			{
				throw new ObjectDisposedException(this.GetType().Name);
			}

			idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors);
			lexer.LoadMemory(text, this.FileName, this.LineNumber);
			lexer.SkipUntilString("{");

			idToken token;

			_text = string.Empty;

			string tokenLower;
			string tokenValue;

			// scan through, identifying each individual parameter
			while(true)
			{
				if((token = lexer.ReadToken()) == null)
				{
					break;
				}

				tokenValue = token.ToString();
				tokenLower = tokenValue.ToLower();

				if(tokenValue == "}")
				{
					break;
				}
				else if(tokenLower == "subject")
				{
					_subject = lexer.ReadToken().ToString();
				}
				else if(tokenLower == "to")
				{
					_to = lexer.ReadToken().ToString();
				}
				else if(tokenLower == "from")
				{
					_from = lexer.ReadToken().ToString();
				}
				else if(tokenLower == "date")
				{
					 _date = lexer.ReadToken().ToString();
				}
				else if(tokenLower == "text")
				{
					token = lexer.ReadToken();
					tokenValue = token.ToString();

					if(tokenValue != "{")
					{
						lexer.Warning("Email dec '{0}' had a parse error", this.Name);
						return false;
					}

					while(((token = lexer.ReadToken()) != null) && (token.ToString() != "}"))
					{
						_text += token.ToString();
					}
				}
				else if(tokenLower == "image")
				{
					_image = lexer.ReadToken().ToString();
				}
			}

			if(lexer.HadError == true)
			{
				lexer.Warning("Email decl '{0}' had a parse error", this.Name);
				return false;
			}

			return true;
		}