public Texture2D ParseImageProgram(string source, ref DateTime timeStamp, ref TextureDepth depth) { _lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); _lexer.LoadMemory(source, source); return(ParseImageProgram(ref timeStamp, ref depth, false)); }
public override bool Parse(string text) { idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if (tokenValue == "}") { break; } if (tokenValue == "audio") { _audio = lexer.ReadToken().ToString(); idE.DeclManager.FindSound(_audio); } else if (tokenValue == "info") { _info = lexer.ReadToken().ToString(); } else if (tokenValue == "name") { _videoName = lexer.ReadToken().ToString(); } else if (tokenValue == "preview") { _preview = lexer.ReadToken().ToString(); } else if (tokenValue == "video") { _video = lexer.ReadToken().ToString(); idE.DeclManager.FindMaterial(_video); } } if (lexer.HadError == true) { lexer.Warning("Video decl '{0}' had a parse error", this.Name); return(false); } return(true); }
public bool Load(string fileName, bool clear) { if (clear == true) { Clear(); } byte[] data = idE.FileSystem.ReadFile(fileName); if (data == null) { // let whoever called us deal with the failure (so sys_lang can be reset) return(false); } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName); if (lexer.IsLoaded == false) { return(false); } idToken token, token2; lexer.ExpectTokenString("{"); while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "}") { break; } else if ((token2 = lexer.ReadToken()) != null) { if (token2.ToString() == "}") { break; } _regexReplaceIndex = 0; // stock d3 language files contain sprintf formatters, we need to replace them string val = token2.ToString(); val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler)); _elements.Add(token.ToString(), val); } } idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName); return(true); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; idConsole.Warning("TODO: actual fx parsing, we only step over the block"); while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if (tokenValue == "}") { break; } if (tokenValue == "bindto") { token = lexer.ReadToken(); idConsole.Warning("TODO: FX: joint = token;"); } else if (tokenValue == "{") { idConsole.Warning("TODO: FX: idFXSingleAction action;"); ParseSingleAction(lexer /*, action*/); // events.Append(action); continue; } } if (lexer.HadError == true) { lexer.Warning("FX decl '{0}' had a parse error", this.Name); return(false); } return(true); }
public virtual bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); lexer.SkipBracedSection(false); return(true); }
private ContentFlags ContentsFromString(string str) { idLexer lexer = new idLexer(); lexer.LoadMemory(str, "ContentsFromString"); idToken token; ContentFlags contents = ContentFlags.None; string tmp; while ((token = lexer.ReadToken()) != null) { if (token.ToString() == ",") { continue; } tmp = token.ToString(); switch (tmp) { case "aas_solid": tmp = "AasSolid"; break; case "aas_obstacle": tmp = "AasObstacle"; break; case "flashlight_trigger": tmp = "FlashlightTrigger"; break; } contents |= (ContentFlags)Enum.Parse(typeof(ContentFlags), tmp, true); } return(contents); }
/// <summary> /// Load the given source. /// </summary> /// <returns></returns> public bool LoadMemory(string content, string name) { if (_loaded == true) { idConsole.FatalError("idScriptParser::LoadMemory: another source already loaded"); return(false); } idLexer script = new idLexer(_options); script.Punctuation = _punctuation; script.LoadMemory(content, name); if (script.IsLoaded == false) { return(false); } _fileName = name; _scriptStack.Clear(); _indentStack.Clear(); _tokens.Clear(); _skip = 0; _loaded = true; _scriptStack.Push(script); if (_defineDict == null) { _defines.Clear(); _defineDict = new Dictionary <string, ScriptDefinition>(StringComparer.OrdinalIgnoreCase); AddGlobalDefinesToSource(); } return(true); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); // deeper functions can set this, which will cause MakeDefault() to be called at the end _errorDuringParse = false; if ((ParseMaterial(lexer) == false) || (_errorDuringParse == true)) { MakeDefault(); return(false); } return(true); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException("idDeclTable"); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; List<float> values = new List<float>(); string tokenLower; string tokenValue; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "snap") { _snap = true; } else if(tokenLower == "clamp") { _clamp = true; } else if(tokenLower == "{") { while(true) { bool errorFlag; float v = lexer.ParseFloat(out errorFlag); if(errorFlag == true) { // we got something non-numeric MakeDefault(); return false; } values.Add(v); token = lexer.ReadToken(); tokenValue = token.ToString(); if(tokenValue == "}") { break; } else if(tokenValue == ",") { continue; } lexer.Warning("expected comma or brace"); MakeDefault(); return false; } } else { lexer.Warning("unknown token '{0}'", tokenValue); MakeDefault(); return false; } } // copy the 0 element to the end, so lerping doesn't // need to worry about the wrap case float val = values[0]; values.Add(val); _values = values.ToArray(); return true; }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; idToken token2; string value; while (true) { if ((token = lexer.ReadToken()) == null) { break; } value = token.ToString(); if (value == "}") { break; } if (token.Type != TokenType.String) { lexer.Warning("Expected quoted string, but found '{0}'", value); MakeDefault(); return(false); } if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } if (_dict.ContainsKey(value) == true) { lexer.Warning("'{0}' already defined", value); } _dict.Set(value, token2.ToString()); } // we always automatically set a "classname" key to our name _dict.Set("classname", this.Name); // "inherit" keys will cause all values from another entityDef to be copied into this one // if they don't conflict. We can't have circular recursions, because each entityDef will // never be parsed more than once // find all of the dicts first, because copying inherited values will modify the dict List <idDeclEntity> defList = new List <idDeclEntity>(); List <string> keysToRemove = new List <string>(); foreach (KeyValuePair <string, string> kvp in _dict.MatchPrefix("inherit")) { idDeclEntity copy = idE.DeclManager.FindType <idDeclEntity>(DeclType.EntityDef, kvp.Value, false); if (copy == null) { lexer.Warning("Unknown entityDef '{0}' inherited by '{1}'", kvp.Value, this.Name); } else { defList.Add(copy); } // delete this key/value pair keysToRemove.Add(kvp.Key); } _dict.Remove(keysToRemove.ToArray()); // now copy over the inherited key / value pairs foreach (idDeclEntity def in defList) { _dict.SetDefaults(def._dict); } // precache all referenced media // do this as long as we arent in modview idE.Game.CacheDictionaryMedia(_dict); return(true); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List<SkinMapping> mappings = new List<SkinMapping>(); List<string> associatedModels = new List<string>(); idToken token, token2; string tokenLower; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); break; } else if(tokenLower == "model") { associatedModels.Add(token2.ToString()); continue; } SkinMapping map = new SkinMapping(); map.To = idE.DeclManager.FindMaterial(token2.ToString()); if(tokenLower == "*") { // wildcard. map.From = null; } else { map.From = idE.DeclManager.FindMaterial(token.ToString()); } mappings.Add(map); } _mappings = mappings.ToArray(); _associatedModels = associatedModels.ToArray(); return false; }
/// <summary> /// This is used during both the initial load, and any reloads. /// </summary> /// <returns></returns> public int LoadAndParse() { // load the text idConsole.DeveloperWriteLine("...loading '{0}'", this.FileName); byte[] data = idE.FileSystem.ReadFile(this.FileName); if (data == null) { idConsole.FatalError("couldn't load {0}", this.FileName); return(0); } string content = UTF8Encoding.UTF8.GetString(data); idLexer lexer = new idLexer(); lexer.Options = LexerOptions; if (lexer.LoadMemory(content, this.FileName) == false) { idConsole.Error("Couldn't parse {0}", this.FileName); return(0); } // mark all the defs that were from the last reload of this file foreach (idDecl decl in _decls) { decl.RedefinedInReload = false; } // TODO: checksum = MD5_BlockChecksum( buffer, length ); _fileSize = content.Length; int startMarker, sourceLine; int size; string name; bool reparse; idToken token; idDecl newDecl; DeclType identifiedType; string tokenValue; // scan through, identifying each individual declaration while (true) { startMarker = lexer.FileOffset; sourceLine = lexer.LineNumber; // parse the decl type name if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); // get the decl type from the type name identifiedType = idE.DeclManager.GetDeclTypeFromName(tokenValue); if (identifiedType == DeclType.Unknown) { if (tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } else { if (this.DefaultType == DeclType.Unknown) { lexer.Warning("No type"); continue; } lexer.UnreadToken = token; // use the default type identifiedType = this.DefaultType; } } // now parse the name if ((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at the end of file"); break; } tokenValue = token.ToString(); if (tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } // FIXME: export decls are only used by the model exporter, they are skipped here for now if (identifiedType == DeclType.ModelExport) { lexer.SkipBracedSection(); continue; } name = tokenValue; // make sure there's a '{' if ((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at end of file"); break; } tokenValue = token.ToString(); if (tokenValue != "{") { lexer.Warning("Expecting '{{' but found '{0}'", tokenValue); continue; } lexer.UnreadToken = token; // now take everything until a matched closing brace lexer.SkipBracedSection(); size = lexer.FileOffset - startMarker; // look it up, possibly getting a newly created default decl reparse = false; newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, false); if (newDecl != null) { // update the existing copy if ((newDecl.SourceFile != this) || (newDecl.RedefinedInReload == true)) { lexer.Warning("{0} '{1}' previously defined at {2}:{3}", identifiedType.ToString().ToLower(), name, newDecl.FileName, newDecl.LineNumber); continue; } if (newDecl.State != DeclState.Unparsed) { reparse = true; } } else { // allow it to be created as a default, then add it to the per-file list newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, true); if (newDecl == null) { lexer.Warning("could not instanciate decl '{0}' with name '{1}'", identifiedType.ToString().ToLower(), name); continue; } _decls.Add(newDecl); } newDecl.RedefinedInReload = true; newDecl.SourceText = content.Substring(startMarker, size); newDecl.SourceFile = this; newDecl.SourceTextOffset = startMarker; newDecl.SourceTextLength = size; newDecl.SourceLine = sourceLine; newDecl.State = DeclState.Unparsed; // if it is currently in use, reparse it immedaitely if (reparse) { newDecl.ParseLocal(); } } _lineCount = lexer.LineNumber; // any defs that weren't redefinedInReload should now be defaulted foreach (idDecl decl in _decls) { if (decl.RedefinedInReload == false) { decl.MakeDefault(); decl.SourceTextOffset = decl.SourceFile.FileSize; decl.SourceTextLength = 0; decl.SourceLine = decl.SourceFile.LineCount; } } return(_checksum); }
/// <summary> /// Takes a string and breaks it up into arg tokens. /// </summary> /// <param name="text"></param> /// <param name="keepAsStrings">true to only seperate tokens from whitespace and comments, ignoring punctuation.</param> public void TokenizeString(string text, bool keepAsStrings) { // clear previous args. _args = new string[] { }; if (text.Length == 0) { return; } idLexer lexer = new idLexer(); lexer.LoadMemory(text, "idCmdSystem.TokenizeString"); lexer.Options = LexerOptions.NoErrors | LexerOptions.NoWarnings | LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowIPAddresses | ((keepAsStrings == true) ? LexerOptions.OnlyStrings : 0); idToken token = null, number = null; List <string> newArgs = new List <string>(); int len = 0, totalLength = 0; string tokenValue; while (true) { if (newArgs.Count == idE.MaxCommandArgs) { break; // this is usually something malicious. } if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if ((keepAsStrings == false) && (tokenValue == "-")) { // check for negative numbers. if ((number = lexer.CheckTokenType(TokenType.Number, 0)) != null) { token.Set("-" + number); } } // check for cvar expansion if (tokenValue == "$") { if ((token = lexer.ReadToken()) == null) { break; } if (idE.CvarSystem.IsInitialized == true) { token.Set(idE.CvarSystem.GetString(token.ToString())); } else { token.Set("<unknown>"); } } tokenValue = token.ToString(); len = tokenValue.Length; totalLength += len + 1; // regular token newArgs.Add(tokenValue); } _args = newArgs.ToArray(); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "name") { token = lexer.ReadToken(); _pdaName = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "fullname") { token = lexer.ReadToken(); _fullName = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "icon") { token = lexer.ReadToken(); _icon = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "id") { token = lexer.ReadToken(); _id = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "post") { token = lexer.ReadToken(); _post = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "title") { token = lexer.ReadToken(); _title = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "security") { token = lexer.ReadToken(); _security = (token != null) ? token.ToString() : string.Empty; } else if (tokenLower == "pda_email") { token = lexer.ReadToken(); _emailList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Email, token.ToString()); } else if (tokenLower == "pda_audio") { token = lexer.ReadToken(); _audioList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Audio, token.ToString()); } else if (tokenLower == "pda_video") { token = lexer.ReadToken(); _videoList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Video, token.ToString()); } } if (lexer.HadError == true) { lexer.Warning("PDA decl '{0}' had a parse error", this.Name); return(false); } _originalVideoCount = _videoList.Count; _originalEmailCount = _emailList.Count; return(true); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "name") { token = lexer.ReadToken(); _pdaName = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "fullname") { token = lexer.ReadToken(); _fullName = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "icon") { token = lexer.ReadToken(); _icon = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "id") { token = lexer.ReadToken(); _id = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "post") { token = lexer.ReadToken(); _post = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "title") { token = lexer.ReadToken(); _title = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "security") { token = lexer.ReadToken(); _security = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "pda_email") { token = lexer.ReadToken(); _emailList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Email, token.ToString()); } else if(tokenLower == "pda_audio") { token = lexer.ReadToken(); _audioList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Audio, token.ToString()); } else if(tokenLower == "pda_video") { token = lexer.ReadToken(); _videoList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Video, token.ToString()); } } if(lexer.HadError == true) { lexer.Warning("PDA decl '{0}' had a parse error", this.Name); return false; } _originalVideoCount = _videoList.Count; _originalEmailCount = _emailList.Count; return true; }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List <idParticleStage> stages = new List <idParticleStage>(); _depthHack = 0.0f; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "{") { idParticleStage stage = ParseParticleStage(lexer); if (stage == null) { lexer.Warning("Particle stage parse failed"); MakeDefault(); return(false); } stages.Add(stage); } else if (tokenLower == "depthhack") { _depthHack = lexer.ParseFloat(); } else { lexer.Warning("bad token {0}", token.ToString()); MakeDefault(); return(false); } } _stages = stages.ToArray(); // // calculate the bounds // _bounds.Clear(); int count = _stages.Length; for (int i = 0; i < count; i++) { idConsole.Warning("TODO: GetStageBounds"); // TODO: GetStageBounds(stages[i]); _bounds += _stages[i].Bounds; } if (_bounds.Volume <= 0.1f) { _bounds = idBounds.Expand(idBounds.Zero, 8.0f); } return(true); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); int defaultAnimationCount = 0; idToken token; idToken token2; string tokenValue; string fileName; string extension; int count; idMD5Joint[] md5Joints; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if (tokenValue == "}") { break; } if (tokenValue == "inherit") { idConsole.WriteLine("TODO: inherit"); /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * * const idDeclModelDef *copy = static_cast<const idDeclModelDef *>( declManager->FindType( DECL_MODELDEF, token2, false ) ); * if ( !copy ) { * common->Warning( "Unknown model definition '%s'", token2.c_str() ); * } else if ( copy->GetState() == DS_DEFAULTED ) { * common->Warning( "inherited model definition '%s' defaulted", token2.c_str() ); * MakeDefault(); * return false; * } else { * CopyDecl( copy ); * numDefaultAnims = anims.Num(); * }*/ } else if (tokenValue == "skin") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } _skin = idE.DeclManager.FindSkin(token2.ToString()); if (_skin == null) { lexer.Warning("Skin '{0}' not found", token2.ToString()); MakeDefault(); return(false); } } else if (tokenValue == "mesh") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } fileName = token2.ToString(); extension = Path.GetExtension(fileName); if (extension != idRenderModel_MD5.MeshExtension) { lexer.Warning("Invalid model for MD5 mesh"); MakeDefault(); return(false); } _model = idE.RenderModelManager.FindModel(fileName); if (_model == null) { lexer.Warning("Model '{0}' not found", fileName); MakeDefault(); return(false); } else if (_model.IsDefault == true) { lexer.Warning("Model '{0}' defaulted", fileName); MakeDefault(); return(false); } // get the number of joints count = _model.JointCount; if (count == 0) { lexer.Warning("Model '{0}' has no joints", fileName); } // set up the joint hierarchy md5Joints = _model.Joints; _joints = new JointInfo[count]; _jointParents = new int[count]; _channelJoints = new int[(int)AnimationChannel.Count][]; _channelJoints[0] = new int[count]; for (int i = 0; i < count; i++) { _joints[i] = new JointInfo(); _joints[i].Channel = AnimationChannel.All; _joints[i].Index = i; if (md5Joints[i].Parent != null) { _joints[i].ParentIndex = _model.GetJointIndex(md5Joints[i].Parent); } else { _joints[i].ParentIndex = -1; } _jointParents[i] = _joints[i].ParentIndex; _channelJoints[0][i] = i; } } else if (tokenValue == "remove") { idConsole.Warning("TODO: remove"); // removes any anims whos name matches /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * num = 0; * for( i = 0; i < anims.Num(); i++ ) { * if ( ( token2 == anims[ i ]->Name() ) || ( token2 == anims[ i ]->FullName() ) ) { * delete anims[ i ]; * anims.RemoveIndex( i ); * if ( i >= numDefaultAnims ) { * src.Warning( "Anim '%s' was not inherited. Anim should be removed from the model def.", token2.c_str() ); * MakeDefault(); * return false; * } * i--; * numDefaultAnims--; * num++; * continue; * } * } * if ( !num ) { * src.Warning( "Couldn't find anim '%s' to remove", token2.c_str() ); * MakeDefault(); * return false; * }*/ } else if (tokenValue == "anim") { if (_model == null) { lexer.Warning("Must specify mesh before defining anims"); MakeDefault(); return(false); } else if (ParseAnimation(lexer, defaultAnimationCount) == false) { MakeDefault(); return(false); } } else if (tokenValue == "offset") { float[] tmp = lexer.Parse1DMatrix(3); if (tmp == null) { lexer.Warning("Expected vector following 'offset'"); MakeDefault(); return(false); } _offset = new Vector3(tmp[0], tmp[1], tmp[2]); } else if (tokenValue == "channel") { if (_model == null) { lexer.Warning("Must specify mesh before defining channels"); MakeDefault(); return(false); } // set the channel for a group of joints if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } if (lexer.CheckTokenString("(") == false) { lexer.Warning("Expected { after '{0}'", token2.ToString()); MakeDefault(); return(false); } int i; int channelCount = (int)AnimationChannel.Count; for (i = (int)AnimationChannel.All + 1; i < channelCount; i++) { if (ChannelNames[i].Equals(token2.ToString(), StringComparison.OrdinalIgnoreCase) == true) { break; } } if (i >= channelCount) { lexer.Warning("Unknown channel '{0}'", token2.ToString()); MakeDefault(); return(false); } int channel = i; StringBuilder jointNames = new StringBuilder(); string token2Value; while (lexer.CheckTokenString(")") == false) { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } token2Value = token2.ToString(); jointNames.Append(token2Value); if ((token2Value != "*") && (token2Value != "-")) { jointNames.Append(" "); } } int[] jointList = GetJointList(jointNames.ToString()); int jointLength = jointList.Length; List <int> channelJoints = new List <int>(); for (count = i = 0; i < jointLength; i++) { int jointIndex = jointList[i]; if (_joints[jointIndex].Channel != AnimationChannel.All) { lexer.Warning("Join '{0}' assigned to multiple channels", _model.GetJointName(jointIndex)); continue; } _joints[jointIndex].Channel = (AnimationChannel)channel; channelJoints.Add(jointIndex); } _channelJoints[channel] = channelJoints.ToArray(); } else { lexer.Warning("unknown token '{0}'", token.ToString()); MakeDefault(); return(false); } } return(true); }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException("idDeclTable"); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; List <float> values = new List <float>(); string tokenLower; string tokenValue; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "snap") { _snap = true; } else if (tokenLower == "clamp") { _clamp = true; } else if (tokenLower == "{") { while (true) { bool errorFlag; float v = lexer.ParseFloat(out errorFlag); if (errorFlag == true) { // we got something non-numeric MakeDefault(); return(false); } values.Add(v); token = lexer.ReadToken(); tokenValue = token.ToString(); if (tokenValue == "}") { break; } else if (tokenValue == ",") { continue; } lexer.Warning("expected comma or brace"); MakeDefault(); return(false); } } else { lexer.Warning("unknown token '{0}'", tokenValue); MakeDefault(); return(false); } } // copy the 0 element to the end, so lerping doesn't // need to worry about the wrap case float val = values[0]; values.Add(val); _values = values.ToArray(); return(true); }
private void UpdateChoicesAndValues() { idToken token; string str2 = string.Empty; if (_latchedChoices.Equals(_choicesStr.ToString(), StringComparison.OrdinalIgnoreCase) == true) { _choices.Clear(); idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if (lexer.LoadMemory(_choicesStr.ToString(), "<ChoiceList>") == true) { while ((token = lexer.ReadToken()) != null) { if (token.ToString() == ";") { if (str2.Length > 0) { str2 = idE.Language.Get(str2.TrimEnd()); _choices.Add(str2); str2 = string.Empty; } continue; } str2 += token.ToString(); str2 += " "; } if (str2.Length > 0) { _choices.Add(str2.TrimEnd()); } } _latchedChoices = _choicesStr.ToString(); } if ((_choiceValues.ToString() != string.Empty) && (_latchedChoices.Equals(_choiceValues.ToString(), StringComparison.OrdinalIgnoreCase) == false)) { _values.Clear(); str2 = string.Empty; bool negNum = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if (lexer.LoadMemory(_choiceValues.ToString(), "<ChoiceVals>") == true) { while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "-") { negNum = true; } else if (token.ToString() == ";") { if (str2.Length > 0) { _values.Add(str2.TrimEnd()); str2 = string.Empty; } } else if (negNum == true) { str2 += "-"; negNum = false; } else { str2 += token.ToString(); str2 += " "; } } if (str2.Length > 0) { _values.Add(str2.TrimEnd()); } } if (_choices.Count != _values.Count) { idConsole.Warning("idChoiceWindow:: gui '{0}' window '{1}' has value count unequal to choices count", this.UserInterface.SourceFile, this.Name); } _latchedChoices = _choiceValues.ToString(); } }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List<idParticleStage> stages = new List<idParticleStage>(); _depthHack = 0.0f; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "{") { idParticleStage stage = ParseParticleStage(lexer); if(stage == null) { lexer.Warning("Particle stage parse failed"); MakeDefault(); return false; } stages.Add(stage); } else if(tokenLower == "depthhack") { _depthHack = lexer.ParseFloat(); } else { lexer.Warning("bad token {0}", token.ToString()); MakeDefault(); return false; } } _stages = stages.ToArray(); // // calculate the bounds // _bounds.Clear(); int count = _stages.Length; for(int i = 0; i < count; i++) { idConsole.Warning("TODO: GetStageBounds"); // TODO: GetStageBounds(stages[i]); _bounds += _stages[i].Bounds; } if(_bounds.Volume <= 0.1f) { _bounds = idBounds.Expand(idBounds.Zero, 8.0f); } return true; }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; _text = string.Empty; string tokenLower; string tokenValue; // scan through, identifying each individual parameter while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if (tokenValue == "}") { break; } else if (tokenLower == "subject") { _subject = lexer.ReadToken().ToString(); } else if (tokenLower == "to") { _to = lexer.ReadToken().ToString(); } else if (tokenLower == "from") { _from = lexer.ReadToken().ToString(); } else if (tokenLower == "date") { _date = lexer.ReadToken().ToString(); } else if (tokenLower == "text") { token = lexer.ReadToken(); tokenValue = token.ToString(); if (tokenValue != "{") { lexer.Warning("Email dec '{0}' had a parse error", this.Name); return(false); } while (((token = lexer.ReadToken()) != null) && (token.ToString() != "}")) { _text += token.ToString(); } } else if (tokenLower == "image") { _image = lexer.ReadToken().ToString(); } } if (lexer.HadError == true) { lexer.Warning("Email decl '{0}' had a parse error", this.Name); return(false); } return(true); }
public override bool Parse(string text) { idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } if(tokenValue == "audio") { _audio = lexer.ReadToken().ToString(); idE.DeclManager.FindSound(_audio); } else if(tokenValue == "info") { _info = lexer.ReadToken().ToString(); } else if(tokenValue == "name") { _audioName = lexer.ReadToken().ToString(); } else if(tokenValue == "preview") { _preview = lexer.ReadToken().ToString(); } } if(lexer.HadError == true) { lexer.Warning("Video decl '{0}' had a parse error", this.Name); return false; } return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; idConsole.Warning("TODO: actual fx parsing, we only step over the block"); while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } if(tokenValue == "bindto") { token = lexer.ReadToken(); idConsole.Warning("TODO: FX: joint = token;"); } else if(tokenValue == "{") { idConsole.Warning("TODO: FX: idFXSingleAction action;"); ParseSingleAction(lexer/*, action*/); // events.Append(action); continue; } } if(lexer.HadError == true) { lexer.Warning("FX decl '{0}' had a parse error", this.Name); return false; } return true; }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List <SkinMapping> mappings = new List <SkinMapping>(); List <string> associatedModels = new List <string>(); idToken token, token2; string tokenLower; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); break; } else if (tokenLower == "model") { associatedModels.Add(token2.ToString()); continue; } SkinMapping map = new SkinMapping(); map.To = idE.DeclManager.FindMaterial(token2.ToString()); if (tokenLower == "*") { // wildcard. map.From = null; } else { map.From = idE.DeclManager.FindMaterial(token.ToString()); } mappings.Add(map); } _mappings = mappings.ToArray(); _associatedModels = associatedModels.ToArray(); return(false); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; _text = string.Empty; string tokenLower; string tokenValue; // scan through, identifying each individual parameter while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenValue == "}") { break; } else if(tokenLower == "subject") { _subject = lexer.ReadToken().ToString(); } else if(tokenLower == "to") { _to = lexer.ReadToken().ToString(); } else if(tokenLower == "from") { _from = lexer.ReadToken().ToString(); } else if(tokenLower == "date") { _date = lexer.ReadToken().ToString(); } else if(tokenLower == "text") { token = lexer.ReadToken(); tokenValue = token.ToString(); if(tokenValue != "{") { lexer.Warning("Email dec '{0}' had a parse error", this.Name); return false; } while(((token = lexer.ReadToken()) != null) && (token.ToString() != "}")) { _text += token.ToString(); } } else if(tokenLower == "image") { _image = lexer.ReadToken().ToString(); } } if(lexer.HadError == true) { lexer.Warning("Email decl '{0}' had a parse error", this.Name); return false; } return true; }