public Texture2D ParseImageProgram(string source, ref DateTime timeStamp, ref TextureDepth depth) { _lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); _lexer.LoadMemory(source, source); return ParseImageProgram(ref timeStamp, ref depth, false); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; idToken token2; string value; while(true) { if((token = lexer.ReadToken()) == null) { break; } value = token.ToString(); if(value == "}") { break; } if(token.Type != TokenType.String) { lexer.Warning("Expected quoted string, but found '{0}'", value); MakeDefault(); return false; } if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } if(_dict.ContainsKey(value) == true) { lexer.Warning("'{0}' already defined", value); } _dict.Set(value, token2.ToString()); } // we always automatically set a "classname" key to our name _dict.Set("classname", this.Name); // "inherit" keys will cause all values from another entityDef to be copied into this one // if they don't conflict. We can't have circular recursions, because each entityDef will // never be parsed more than once // find all of the dicts first, because copying inherited values will modify the dict List<idDeclEntity> defList = new List<idDeclEntity>(); List<string> keysToRemove = new List<string>(); foreach(KeyValuePair<string, string> kvp in _dict.MatchPrefix("inherit")) { idDeclEntity copy = idE.DeclManager.FindType<idDeclEntity>(DeclType.EntityDef, kvp.Value, false); if(copy == null) { lexer.Warning("Unknown entityDef '{0}' inherited by '{1}'", kvp.Value, this.Name); } else { defList.Add(copy); } // delete this key/value pair keysToRemove.Add(kvp.Key); } _dict.Remove(keysToRemove.ToArray()); // now copy over the inherited key / value pairs foreach(idDeclEntity def in defList) { _dict.SetDefaults(def._dict); } // precache all referenced media // do this as long as we arent in modview idE.Game.CacheDictionaryMedia(_dict); return true; }
/// <summary> /// Load the given source. /// </summary> /// <returns></returns> public bool LoadMemory(string content, string name) { if(_loaded == true) { idConsole.FatalError("idScriptParser::LoadMemory: another source already loaded"); return false; } idLexer script = new idLexer(_options); script.Punctuation = _punctuation; script.LoadMemory(content, name); if(script.IsLoaded == false) { return false; } _fileName = name; _scriptStack.Clear(); _indentStack.Clear(); _tokens.Clear(); _skip = 0; _loaded = true; _scriptStack.Push(script); if(_defineDict == null) { _defines.Clear(); _defineDict = new Dictionary<string, ScriptDefinition>(StringComparer.OrdinalIgnoreCase); AddGlobalDefinesToSource(); } return true; }
private ContentFlags ContentsFromString(string str) { idLexer lexer = new idLexer(); lexer.LoadMemory(str, "ContentsFromString"); idToken token; ContentFlags contents = ContentFlags.None; string tmp; while((token = lexer.ReadToken()) != null) { if(token.ToString() == ",") { continue; } tmp = token.ToString(); switch(tmp) { case "aas_solid": tmp = "AasSolid"; break; case "aas_obstacle": tmp = "AasObstacle"; break; case "flashlight_trigger": tmp = "FlashlightTrigger"; break; } contents |= (ContentFlags) Enum.Parse(typeof(ContentFlags), tmp, true); } return contents; }
public bool Load(string fileName, bool clear) { if(clear == true) { Clear(); } byte[] data = idE.FileSystem.ReadFile(fileName); if(data == null) { // let whoever called us deal with the failure (so sys_lang can be reset) return false; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName); if(lexer.IsLoaded == false) { return false; } idToken token, token2; lexer.ExpectTokenString("{"); while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { break; } else if((token2 = lexer.ReadToken()) != null) { if(token2.ToString() == "}") { break; } _regexReplaceIndex = 0; // stock d3 language files contain sprintf formatters, we need to replace them string val = token2.ToString(); val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler)); _elements.Add(token.ToString(), val); } } idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName); return true; }
private void UpdateChoicesAndValues() { idToken token; string str2 = string.Empty; if(_latchedChoices.Equals(_choicesStr.ToString(), StringComparison.OrdinalIgnoreCase) == true) { _choices.Clear(); idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choicesStr.ToString(), "<ChoiceList>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == ";") { if(str2.Length > 0) { str2 = idE.Language.Get(str2.TrimEnd()); _choices.Add(str2); str2 = string.Empty; } continue; } str2 += token.ToString(); str2 += " "; } if(str2.Length > 0) { _choices.Add(str2.TrimEnd()); } } _latchedChoices = _choicesStr.ToString(); } if((_choiceValues.ToString() != string.Empty) && (_latchedChoices.Equals(_choiceValues.ToString(), StringComparison.OrdinalIgnoreCase) == false)) { _values.Clear(); str2 = string.Empty; bool negNum = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choiceValues.ToString(), "<ChoiceVals>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == "-") { negNum = true; } else if(token.ToString() == ";") { if(str2.Length > 0) { _values.Add(str2.TrimEnd()); str2 = string.Empty; } } else if(negNum == true) { str2 += "-"; negNum = false; } else { str2 += token.ToString(); str2 += " "; } } if(str2.Length > 0) { _values.Add(str2.TrimEnd()); } } if(_choices.Count != _values.Count) { idConsole.Warning("idChoiceWindow:: gui '{0}' window '{1}' has value count unequal to choices count", this.UserInterface.SourceFile, this.Name); } _latchedChoices = _choiceValues.ToString(); } }
/// <summary> /// Adds implicit stages to the material. /// </summary> /// <remarks> /// If a material has diffuse or specular stages without any /// bump stage, add an implicit _flat bumpmap stage. /// <p/> /// It is valid to have either a diffuse or specular without the other. /// <p/> /// It is valid to have a reflection map and a bump map for bumpy reflection. /// </remarks> /// <param name="textureRepeatDefault"></param> private void AddImplicitStages(TextureRepeat textureRepeatDefault = TextureRepeat.Repeat) { bool hasDiffuse = false; bool hasSpecular = false; bool hasBump = false; bool hasReflection = false; int count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { switch(_parsingData.Stages[i].Lighting) { case StageLighting.Bump: hasBump = true; break; case StageLighting.Diffuse: hasDiffuse = true; break; case StageLighting.Specular: hasSpecular = true; break; } if(_parsingData.Stages[i].Texture.TextureCoordinates == TextureCoordinateGeneration.ReflectCube) { hasReflection = true; } } // if it doesn't have an interaction at all, don't add anything if((hasBump == false) && (hasDiffuse == false) && (hasSpecular == false)) { return; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); if(hasBump == false) { string bump = "blend bumpmap\nmap _flat\n}\n"; lexer.LoadMemory(bump, "bumpmap"); ParseStage(lexer, textureRepeatDefault); } if((hasDiffuse == false) && (hasSpecular == false) && (hasReflection == false)) { string bump = "blend bumpmap\nmap _white\n}\n"; lexer.LoadMemory(bump, "diffusemap"); ParseStage(lexer, textureRepeatDefault); } }
/// <summary> /// Parses the material, if there are any errors during parsing the defaultShader will be set. /// </summary> /// <param name="lexer"></param> private void ParseMaterial(idLexer lexer) { _registerCount = PredefinedRegisterCount; // leave space for the parms to be copied in. for(int i = 0; i < _registerCount; i++) { _parsingData.RegisterIsTemporary[i] = true; // they aren't constants that can be folded. } TextureRepeat textureRepeatDefault = TextureRepeat.Repeat; // allow a global setting for repeat. idToken token = null; string tokenValue; string tokenLower; int count; while(true) { if(TestMaterialFlag(Renderer.MaterialFlags.Defaulted) == true) { // we have a parse error. return; } if((token = lexer.ExpectAnyToken()) == null) { this.MaterialFlag = MaterialFlags.Defaulted; return; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); // end of material definition if(tokenLower == "}") { break; } else if(tokenLower == "qer_editorimage") { token = lexer.ReadTokenOnLine(); _editorImageName = (token != null) ? token.ToString() : string.Empty; lexer.SkipRestOfLine(); } else if(tokenLower == "description") { token = lexer.ReadTokenOnLine(); _description = (token != null) ? token.ToString() : string.Empty; } // check for the surface / content bit flags. else if(CheckSurfaceParameter(token) == true) { } else if(tokenLower == "polygonoffset") { this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; if((token = lexer.ReadTokenOnLine()) == null) { _polygonOffset = 1; } else { _polygonOffset = token.ToFloat(); } } // noshadow. else if(tokenLower == "noshadows") { this.MaterialFlag = MaterialFlags.NoShadows; } else if(tokenLower == "suppressinsubview") { _suppressInSubview = true; } else if(tokenLower == "portalsky") { _portalSky = true; } else if(tokenLower == "noselfshadow") { this.MaterialFlag = Renderer.MaterialFlags.NoSelfShadow; } else if(tokenLower == "noportalfog") { this.MaterialFlag = Renderer.MaterialFlags.NoPortalFog; } // forceShadows allows nodraw surfaces to cast shadows. else if(tokenLower == "forceshadows") { this.MaterialFlag = Renderer.MaterialFlags.ForceShadows; } // overlay / decal suppression. else if(tokenLower == "nooverlays") { _allowOverlays = false; } // moster blood overlay forcing for alpha tested or translucent surfaces. else if(tokenLower == "forceoverlays") { _parsingData.ForceOverlays = true; } else if(tokenLower == "translucent") { _coverage = MaterialCoverage.Translucent; } // global zero clamp. else if(tokenLower == "zeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // global clamp. else if(tokenLower == "clamp") { textureRepeatDefault = TextureRepeat.Clamp; } // global clamp. else if(tokenLower == "alphazeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // forceOpaque is used for skies-behind-windows. else if(tokenLower == "forceopaque") { _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "twosided") { _cullType = CullType.TwoSided; // twoSided implies no-shadows, because the shadow // volume would be coplanar with the surface, giving depth fighting // we could make this no-self-shadows, but it may be more important // to receive shadows from no-self-shadow monsters. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "backsided") { _cullType = CullType.Back; // the shadow code doesn't handle this, so just disable shadows. // We could fix this in the future if there was a need. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "foglight") { _fogLight = true; } else if(tokenLower == "blendlight") { _blendLight = true; } else if(tokenLower == "ambientlight") { _ambientLight = true; } else if(tokenLower == "mirror") { _sort = (float) MaterialSort.Subview; _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "nofog") { _noFog = true; } else if(tokenLower == "unsmoothedtangents") { _unsmoothedTangents = true; } // lightFallofImage <imageprogram> // specifies the image to use for the third axis of projected // light volumes. else if(tokenLower == "lightfalloffimage") { _lightFalloffImage = idE.ImageManager.ImageFromFile(ParsePastImageProgram(lexer), TextureFilter.Default, false, TextureRepeat.Clamp, TextureDepth.Default); } // guisurf <guifile> | guisurf entity // an entity guisurf must have an idUserInterface // specified in the renderEntity. else if(tokenLower == "guisurf") { token = lexer.ReadTokenOnLine(); tokenLower = token.ToString().ToLower(); if(tokenLower == "entity") { _entityGui = 1; } else if(tokenLower == "entity2") { _entityGui = 2; } else if(tokenLower == "entity3") { _entityGui = 3; } else { _userInterface = idE.UIManager.FindInterface(token.ToString(), true); } } // sort. else if(tokenLower == "sort") { ParseSort(lexer); } // spectrum <integer>. else if(tokenLower == "spectrum") { int.TryParse(lexer.ReadTokenOnLine().ToString(), out _spectrum); } // deform < sprite | tube | flare >. else if(tokenLower == "deform") { ParseDeform(lexer); } // decalInfo <staySeconds> <fadeSeconds> (<start rgb>) (<end rgb>). else if(tokenLower == "decalinfo") { ParseDecalInfo(lexer); } // renderbump <args...>. else if(tokenLower == "renderbump") { _renderBump = lexer.ParseRestOfLine(); } // diffusemap for stage shortcut. else if(tokenLower == "diffusemap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend diffusemap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "diffusemap"); ParseStage(newLexer, textureRepeatDefault); } // specularmap for stage shortcut. else if(tokenLower == "specularmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend specularmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "specularmap"); ParseStage(newLexer, textureRepeatDefault); } // normalmap for stage shortcut. else if(tokenLower == "bumpmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend bumpmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "bumpmap"); ParseStage(newLexer, textureRepeatDefault); } // DECAL_MACRO for backwards compatibility with the preprocessor macros. else if(tokenLower == "decal_macro") { // polygonOffset this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; _polygonOffset = -1; // discrete _surfaceFlags |= SurfaceFlags.Discrete; _contentFlags &= ~ContentFlags.Solid; // sort decal. _sort = (float) MaterialSort.Decal; // noShadows this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenValue == "{") { // create the new stage. ParseStage(lexer, textureRepeatDefault); } else { idConsole.WriteLine("unknown general material parameter '{0}' in '{1}'", tokenValue, this.Name); return; } } // add _flat or _white stages if needed. AddImplicitStages(); // order the diffuse / bump / specular stages properly. SortInteractionStages(); // if we need to do anything with normals (lighting or environment mapping) // and two sided lighting was asked for, flag // shouldCreateBackSides() and change culling back to single sided, // so we get proper tangent vectors on both sides. // we can't just call ReceivesLighting(), because the stages are still // in temporary form. if(_cullType == CullType.TwoSided) { count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if((_parsingData.Stages[i].Lighting != StageLighting.Ambient) || (_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit)) { if(_cullType == CullType.TwoSided) { _cullType = CullType.Front; _shouldCreateBackSides = true; } break; } } } // currently a surface can only have one unique texgen for all the stages on old hardware. TextureCoordinateGeneration firstGen = TextureCoordinateGeneration.Explicit; count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if(_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit) { if(firstGen == TextureCoordinateGeneration.Explicit) { firstGen = _parsingData.Stages[i].Texture.TextureCoordinates; } else if(firstGen != _parsingData.Stages[i].Texture.TextureCoordinates) { idConsole.Warning("material '{0}' has multiple stages with a texgen", this.Name); break; } } } }
/// <summary> /// Parses the current material definition and finds all necessary images. /// </summary> /// <param name="text"></param> /// <returns></returns> public override bool Parse(string text) { idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); // reset to the unparsed state. Clear(); _parsingData = new MaterialParsingData(); // this is only valid during parsing. // parse it ParseMaterial(lexer); // TODO: fs_copyFiles // if we are doing an fs_copyfiles, also reference the editorImage /*if ( cvarSystem->GetCVarInteger( "fs_copyFiles" ) ) { GetEditorImage(); }*/ // count non-lit stages. _ambientStageCount = 0; _stageCount = _parsingData.Stages.Count; for(int i = 0; i < _stageCount; i++) { if(_parsingData.Stages[i].Lighting == StageLighting.Ambient) { _ambientStageCount++; } } // see if there is a subview stage if(_sort == (float) MaterialSort.Subview) { _hasSubview = true; } else { _hasSubview = false; int count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if(_parsingData.Stages[i].Texture.Dynamic != null) { _hasSubview = true; } } } // automatically determine coverage if not explicitly set. if(_coverage == MaterialCoverage.Bad) { // automatically set MC_TRANSLUCENT if we don't have any interaction stages and // the first stage is blended and not an alpha test mask or a subview. if(_stageCount == 0) { // non-visible. _coverage = MaterialCoverage.Translucent; } else if(_stageCount != _ambientStageCount) { // we have an interaction draw. _coverage = MaterialCoverage.Opaque; } else { MaterialStates drawStateBits = _parsingData.Stages[0].DrawStateBits & MaterialStates.SourceBlendBits; if(((drawStateBits & MaterialStates.DestinationBlendBits) != MaterialStates.DestinationBlendZero) || (drawStateBits == MaterialStates.SourceBlendDestinationColor) || (drawStateBits == MaterialStates.SourceBlendOneMinusDestinationColor) || (drawStateBits == MaterialStates.SourceBlendDestinationAlpha) || (drawStateBits == MaterialStates.SourceBlendOneMinusDestinationAlpha)) { // blended with the destination _coverage = MaterialCoverage.Translucent; } else { _coverage = MaterialCoverage.Opaque; } } } // translucent automatically implies noshadows. if(_coverage == MaterialCoverage.Translucent) { this.MaterialFlag = MaterialFlags.NoShadows; } else { // mark the contents as opaque. _contentFlags |= ContentFlags.Opaque; } // the sorts can make reasonable defaults. if(_sort == (float) MaterialSort.Bad) { if(TestMaterialFlag(MaterialFlags.PolygonOffset) == true) { _sort = (float) MaterialSort.Decal; } else if(_coverage == MaterialCoverage.Translucent) { _sort = (float) MaterialSort.Medium; } else { _sort = (float) MaterialSort.Opaque; } } // anything that references _currentRender will automatically get sort = SS_POST_PROCESS // and coverage = MC_TRANSLUCENT. for(int i = 0; i < _stageCount; i++) { MaterialStage stage = _parsingData.Stages[i]; if(stage.Texture.Image == idE.ImageManager.CurrentRenderImage) { if(_sort != (float) MaterialSort.PortalSky) { _sort = (float) MaterialSort.PostProcess; _coverage = MaterialCoverage.Translucent; } break; } if(stage.NewStage.IsEmpty == false) { NewMaterialStage newShaderStage = stage.NewStage; int imageCount = newShaderStage.FragmentProgramImages.Length; for(int j = 0; j < imageCount; j++) { if(newShaderStage.FragmentProgramImages[j] == idE.ImageManager.CurrentRenderImage) { if(_sort != (float) MaterialSort.PortalSky) { _sort = (float) MaterialSort.PostProcess; _coverage = MaterialCoverage.Translucent; } i = _stageCount; break; } } } } // set the drawStateBits depth flags. for(int i = 0; i < _stageCount; i++) { MaterialStage stage = _parsingData.Stages[i]; if(_sort == (float) MaterialSort.PostProcess) { // post-process effects fill the depth buffer as they draw, so only the // topmost post-process effect is rendered. stage.DrawStateBits |= MaterialStates.DepthFunctionLess; } else if((_coverage == MaterialCoverage.Translucent) || (stage.IgnoreAlphaTest == true)) { // translucent surfaces can extend past the exactly marked depth buffer. stage.DrawStateBits |= MaterialStates.DepthFunctionLess | MaterialStates.DepthMask; } else { // opaque and perforated surfaces must exactly match the depth buffer, // which gets alpha test correct. stage.DrawStateBits |= MaterialStates.DepthFunctionEqual | MaterialStates.DepthMask; } _parsingData.Stages[i] = stage; } // determine if this surface will accept overlays / decals. if(_parsingData.ForceOverlays == true) { // explicitly flaged in material definition _allowOverlays = true; } else { if(this.IsDrawn == false) { _allowOverlays = false; } if(this.Coverage != MaterialCoverage.Opaque) { _allowOverlays = false; } if((this.SurfaceFlags & Renderer.SurfaceFlags.NoImpact) == Renderer.SurfaceFlags.NoImpact) { _allowOverlays = false; } } // add a tiny offset to the sort orders, so that different materials // that have the same sort value will at least sort consistantly, instead // of flickering back and forth. /* this messed up in-game guis if ( sort != SS_SUBVIEW ) { int hash, l; l = name.Length(); hash = 0; for ( int i = 0 ; i < l ; i++ ) { hash ^= name[i]; } sort += hash * 0.01; } */ if(_stageCount > 0) { _stages = _parsingData.Stages.ToArray(); } if(_parsingData.Operations.Count > 0) { _ops = _parsingData.Operations.ToArray(); } if(_registerCount > 0) { _expressionRegisters = new float[_registerCount]; Array.Copy(_parsingData.ShaderRegisters, _expressionRegisters, _registerCount); } // see if the registers are completely constant, and don't need to be evaluated per-surface. CheckForConstantRegisters(); _parsingData = null; // finish things up if(TestMaterialFlag(MaterialFlags.Defaulted) == true) { MakeDefault(); return false; } return true; }
/// <summary> /// Takes a string and breaks it up into arg tokens. /// </summary> /// <param name="text"></param> /// <param name="keepAsStrings">true to only seperate tokens from whitespace and comments, ignoring punctuation.</param> public void TokenizeString(string text, bool keepAsStrings) { // clear previous args. _args = new string[] { }; if(text.Length == 0) { return; } idLexer lexer = new idLexer(); lexer.LoadMemory(text, "idCmdSystem.TokenizeString"); lexer.Options = LexerOptions.NoErrors | LexerOptions.NoWarnings | LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowIPAddresses | ((keepAsStrings == true) ? LexerOptions.OnlyStrings : 0); idToken token = null, number = null; List<string> newArgs = new List<string>(); int len = 0, totalLength = 0; string tokenValue; while(true) { if(newArgs.Count == idE.MaxCommandArgs) { break; // this is usually something malicious. } if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if((keepAsStrings == false) && (tokenValue == "-")) { // check for negative numbers. if((number = lexer.CheckTokenType(TokenType.Number, 0)) != null) { token.Set("-" + number); } } // check for cvar expansion if(tokenValue == "$") { if((token = lexer.ReadToken()) == null) { break; } if(idE.CvarSystem.IsInitialized == true) { token.Set(idE.CvarSystem.GetString(token.ToString())); } else { token.Set("<unknown>"); } } tokenValue = token.ToString(); len = tokenValue.Length; totalLength += len + 1; // regular token newArgs.Add(tokenValue); } _args = newArgs.ToArray(); }
public virtual bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); lexer.SkipBracedSection(false); return true; }
/// <summary> /// This is used during both the initial load, and any reloads. /// </summary> /// <returns></returns> public int LoadAndParse() { // load the text idConsole.DeveloperWriteLine("...loading '{0}'", this.FileName); byte[] data = idE.FileSystem.ReadFile(this.FileName); if(data == null) { idConsole.FatalError("couldn't load {0}", this.FileName); return 0; } string content = UTF8Encoding.UTF8.GetString(data); idLexer lexer = new idLexer(); lexer.Options = LexerOptions; if(lexer.LoadMemory(content, this.FileName) == false) { idConsole.Error("Couldn't parse {0}", this.FileName); return 0; } // mark all the defs that were from the last reload of this file foreach(idDecl decl in _decls) { decl.RedefinedInReload = false; } // TODO: checksum = MD5_BlockChecksum( buffer, length ); _fileSize = content.Length; int startMarker, sourceLine; int size; string name; bool reparse; idToken token; idDecl newDecl; DeclType identifiedType; string tokenValue; // scan through, identifying each individual declaration while(true) { startMarker = lexer.FileOffset; sourceLine = lexer.LineNumber; // parse the decl type name if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); // get the decl type from the type name identifiedType = idE.DeclManager.GetDeclTypeFromName(tokenValue); if(identifiedType == DeclType.Unknown) { if(tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } else { if(this.DefaultType == DeclType.Unknown) { lexer.Warning("No type"); continue; } lexer.UnreadToken = token; // use the default type identifiedType = this.DefaultType; } } // now parse the name if((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at the end of file"); break; } tokenValue = token.ToString(); if(tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } // FIXME: export decls are only used by the model exporter, they are skipped here for now if(identifiedType == DeclType.ModelExport) { lexer.SkipBracedSection(); continue; } name = tokenValue; // make sure there's a '{' if((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at end of file"); break; } tokenValue = token.ToString(); if(tokenValue != "{") { lexer.Warning("Expecting '{{' but found '{0}'", tokenValue); continue; } lexer.UnreadToken = token; // now take everything until a matched closing brace lexer.SkipBracedSection(); size = lexer.FileOffset - startMarker; // look it up, possibly getting a newly created default decl reparse = false; newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, false); if(newDecl != null) { // update the existing copy if((newDecl.SourceFile != this) || (newDecl.RedefinedInReload == true)) { lexer.Warning("{0} '{1}' previously defined at {2}:{3}", identifiedType.ToString().ToLower(), name, newDecl.FileName, newDecl.LineNumber); continue; } if(newDecl.State != DeclState.Unparsed) { reparse = true; } } else { // allow it to be created as a default, then add it to the per-file list newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, true); if(newDecl == null) { lexer.Warning("could not instanciate decl '{0}' with name '{1}'", identifiedType.ToString().ToLower(), name); continue; } _decls.Add(newDecl); } newDecl.RedefinedInReload = true; newDecl.SourceText = content.Substring(startMarker, size); newDecl.SourceFile = this; newDecl.SourceTextOffset = startMarker; newDecl.SourceTextLength = size; newDecl.SourceLine = sourceLine; newDecl.State = DeclState.Unparsed; // if it is currently in use, reparse it immedaitely if(reparse) { newDecl.ParseLocal(); } } _lineCount = lexer.LineNumber; // any defs that weren't redefinedInReload should now be defaulted foreach(idDecl decl in _decls) { if(decl.RedefinedInReload == false) { decl.MakeDefault(); decl.SourceTextOffset = decl.SourceFile.FileSize; decl.SourceTextLength = 0; decl.SourceLine = decl.SourceFile.LineCount; } } return _checksum; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); // deeper functions can set this, which will cause MakeDefault() to be called at the end _errorDuringParse = false; if((ParseMaterial(lexer) == false) || (_errorDuringParse == true)) { MakeDefault(); return false; } return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); int defaultAnimationCount = 0; idToken token; idToken token2; string tokenValue; string fileName; string extension; int count; idMD5Joint[] md5Joints; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if(tokenValue == "}") { break; } if(tokenValue == "inherit") { idConsole.WriteLine("TODO: inherit"); /*if( !src.ReadToken( &token2 ) ) { src.Warning( "Unexpected end of file" ); MakeDefault(); return false; } const idDeclModelDef *copy = static_cast<const idDeclModelDef *>( declManager->FindType( DECL_MODELDEF, token2, false ) ); if ( !copy ) { common->Warning( "Unknown model definition '%s'", token2.c_str() ); } else if ( copy->GetState() == DS_DEFAULTED ) { common->Warning( "inherited model definition '%s' defaulted", token2.c_str() ); MakeDefault(); return false; } else { CopyDecl( copy ); numDefaultAnims = anims.Num(); }*/ } else if(tokenValue == "skin") { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } _skin = idE.DeclManager.FindSkin(token2.ToString()); if(_skin == null) { lexer.Warning("Skin '{0}' not found", token2.ToString()); MakeDefault(); return false; } } else if(tokenValue == "mesh") { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } fileName = token2.ToString(); extension = Path.GetExtension(fileName); if(extension != idRenderModel_MD5.MeshExtension) { lexer.Warning("Invalid model for MD5 mesh"); MakeDefault(); return false; } _model = idE.RenderModelManager.FindModel(fileName); if(_model == null) { lexer.Warning("Model '{0}' not found", fileName); MakeDefault(); return false; } else if(_model.IsDefault == true) { lexer.Warning("Model '{0}' defaulted", fileName); MakeDefault(); return false; } // get the number of joints count = _model.JointCount; if(count == 0) { lexer.Warning("Model '{0}' has no joints", fileName); } // set up the joint hierarchy md5Joints = _model.Joints; _joints = new JointInfo[count]; _jointParents = new int[count]; _channelJoints = new int[(int) AnimationChannel.Count][]; _channelJoints[0] = new int[count]; for(int i = 0; i < count; i++) { _joints[i] = new JointInfo(); _joints[i].Channel = AnimationChannel.All; _joints[i].Index = i; if(md5Joints[i].Parent != null) { _joints[i].ParentIndex = _model.GetJointIndex(md5Joints[i].Parent); } else { _joints[i].ParentIndex = -1; } _jointParents[i] = _joints[i].ParentIndex; _channelJoints[0][i] = i; } } else if(tokenValue == "remove") { idConsole.Warning("TODO: remove"); // removes any anims whos name matches /*if( !src.ReadToken( &token2 ) ) { src.Warning( "Unexpected end of file" ); MakeDefault(); return false; } num = 0; for( i = 0; i < anims.Num(); i++ ) { if ( ( token2 == anims[ i ]->Name() ) || ( token2 == anims[ i ]->FullName() ) ) { delete anims[ i ]; anims.RemoveIndex( i ); if ( i >= numDefaultAnims ) { src.Warning( "Anim '%s' was not inherited. Anim should be removed from the model def.", token2.c_str() ); MakeDefault(); return false; } i--; numDefaultAnims--; num++; continue; } } if ( !num ) { src.Warning( "Couldn't find anim '%s' to remove", token2.c_str() ); MakeDefault(); return false; }*/ } else if(tokenValue == "anim") { if(_model == null) { lexer.Warning("Must specify mesh before defining anims"); MakeDefault(); return false; } else if(ParseAnimation(lexer, defaultAnimationCount) == false) { MakeDefault(); return false; } } else if(tokenValue == "offset") { float[] tmp = lexer.Parse1DMatrix(3); if(tmp == null) { lexer.Warning("Expected vector following 'offset'"); MakeDefault(); return false; } _offset = new Vector3(tmp[0], tmp[1], tmp[2]); } else if(tokenValue == "channel") { if(_model == null) { lexer.Warning("Must specify mesh before defining channels"); MakeDefault(); return false; } // set the channel for a group of joints if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } if(lexer.CheckTokenString("(") == false) { lexer.Warning("Expected { after '{0}'", token2.ToString()); MakeDefault(); return false; } int i; int channelCount = (int) AnimationChannel.Count; for(i = (int) AnimationChannel.All + 1; i < channelCount; i++) { if(ChannelNames[i].Equals(token2.ToString(), StringComparison.OrdinalIgnoreCase) == true) { break; } } if(i >= channelCount) { lexer.Warning("Unknown channel '{0}'", token2.ToString()); MakeDefault(); return false; } int channel = i; StringBuilder jointNames = new StringBuilder(); string token2Value; while(lexer.CheckTokenString(")") == false) { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } token2Value = token2.ToString(); jointNames.Append(token2Value); if((token2Value != "*") && (token2Value != "-")) { jointNames.Append(" "); } } int[] jointList = GetJointList(jointNames.ToString()); int jointLength = jointList.Length; List<int> channelJoints = new List<int>(); for(count = i = 0; i < jointLength; i++) { int jointIndex = jointList[i]; if(_joints[jointIndex].Channel != AnimationChannel.All) { lexer.Warning("Join '{0}' assigned to multiple channels", _model.GetJointName(jointIndex)); continue; } _joints[jointIndex].Channel = (AnimationChannel) channel; channelJoints.Add(jointIndex); } _channelJoints[channel] = channelJoints.ToArray(); } else { lexer.Warning("unknown token '{0}'", token.ToString()); MakeDefault(); return false; } } return true; }