private bool ParseAnimation(idLexer lexer, int defaultAnimCount) { List <idMD5Anim> md5anims = new List <idMD5Anim>(); idMD5Anim md5anim; idAnim anim; AnimationFlags flags = new AnimationFlags(); idToken token; idToken realName = lexer.ReadToken(); if (realName == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string alias = realName.ToString(); int i; int count = _anims.Count; for (i = 0; i < count; i++) { if (_anims[i].FullName.Equals(alias, StringComparison.OrdinalIgnoreCase) == true) { break; } } if ((i < count) && (i >= defaultAnimCount)) { lexer.Warning("Duplicate anim '{0}'", realName); MakeDefault(); return(false); } if (i < defaultAnimCount) { anim = _anims[i]; } else { // create the alias associated with this animation anim = new idAnim(); _anims.Add(anim); } // random anims end with a number. find the numeric suffix of the animation. int len = alias.Length; for (i = len - 1; i > 0; i--) { if (Char.IsNumber(alias[i]) == false) { break; } } // check for zero length name, or a purely numeric name if (i <= 0) { lexer.Warning("Invalid animation name '{0}'", alias); MakeDefault(); return(false); } // remove the numeric suffix alias = alias.Substring(0, i + 1); // parse the anims from the string do { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } // lookup the animation md5anim = idR.AnimManager.GetAnimation(token.ToString()); if (md5anim == null) { lexer.Warning("Couldn't load anim '{0}'", token); return(false); } md5anim.CheckModelHierarchy(_model); if (md5anims.Count > 0) { // make sure it's the same length as the other anims if (md5anim.Length != md5anims[0].Length) { lexer.Warning("Anim '{0}' does not match length of anim '{1}'", md5anim.Name, md5anims[0].Name); MakeDefault(); return(false); } } // add it to our list md5anims.Add(md5anim); }while(lexer.CheckTokenString(",") == true); if (md5anims.Count == 0) { lexer.Warning("No animation specified"); MakeDefault(); return(false); } anim.SetAnimation(this, realName.ToString(), alias, md5anims.ToArray()); // parse any frame commands or animflags if (lexer.CheckTokenString("{") == true) { while (true) { if ((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } string tokenValue = token.ToString(); if (tokenValue == "}") { break; } else if (tokenValue == "prevent_idle_override") { flags.PreventIdleOverride = true; } else if (tokenValue == "random_cycle_start") { flags.RandomCycleStart = true; } else if (tokenValue == "ai_no_turn") { flags.AINoTurn = true; } else if (tokenValue == "anim_turn") { flags.AnimationTurn = true; } else if (tokenValue == "frame") { // create a frame command int frameIndex; string err; // make sure we don't have any line breaks while reading the frame command so the error line # will be correct if ((token = lexer.ReadTokenOnLine()) == null) { lexer.Warning("Missing frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type == TokenType.Punctuation) && (token.ToString() == "-")) { lexer.Warning("Invalid frame # after 'frame'"); MakeDefault(); return(false); } else if ((token.Type != TokenType.Number) || (token.SubType == TokenSubType.Float)) { lexer.Error("expected integer value, found '{0}'", token); } // get the frame number frameIndex = token.ToInt32(); // put the command on the specified frame of the animation if ((err = anim.AddFrameCommand(this, frameIndex, lexer, null)) != null) { lexer.Warning(err.ToString()); MakeDefault(); return(false); } } else { lexer.Warning("Unknown command '{0}'", token); MakeDefault(); return(false); } } } // set the flags anim.Flags = flags; return(true); }
private void UpdateChoicesAndValues() { idToken token; string str2 = string.Empty; if (_latchedChoices.Equals(_choicesStr.ToString(), StringComparison.OrdinalIgnoreCase) == true) { _choices.Clear(); idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if (lexer.LoadMemory(_choicesStr.ToString(), "<ChoiceList>") == true) { while ((token = lexer.ReadToken()) != null) { if (token.ToString() == ";") { if (str2.Length > 0) { str2 = idE.Language.Get(str2.TrimEnd()); _choices.Add(str2); str2 = string.Empty; } continue; } str2 += token.ToString(); str2 += " "; } if (str2.Length > 0) { _choices.Add(str2.TrimEnd()); } } _latchedChoices = _choicesStr.ToString(); } if ((_choiceValues.ToString() != string.Empty) && (_latchedChoices.Equals(_choiceValues.ToString(), StringComparison.OrdinalIgnoreCase) == false)) { _values.Clear(); str2 = string.Empty; bool negNum = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if (lexer.LoadMemory(_choiceValues.ToString(), "<ChoiceVals>") == true) { while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "-") { negNum = true; } else if (token.ToString() == ";") { if (str2.Length > 0) { _values.Add(str2.TrimEnd()); str2 = string.Empty; } } else if (negNum == true) { str2 += "-"; negNum = false; } else { str2 += token.ToString(); str2 += " "; } } if (str2.Length > 0) { _values.Add(str2.TrimEnd()); } } if (_choices.Count != _values.Count) { idConsole.Warning("idChoiceWindow:: gui '{0}' window '{1}' has value count unequal to choices count", this.UserInterface.SourceFile, this.Name); } _latchedChoices = _choiceValues.ToString(); } }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException("idDeclTable"); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; List <float> values = new List <float>(); string tokenLower; string tokenValue; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "snap") { _snap = true; } else if (tokenLower == "clamp") { _clamp = true; } else if (tokenLower == "{") { while (true) { bool errorFlag; float v = lexer.ParseFloat(out errorFlag); if (errorFlag == true) { // we got something non-numeric MakeDefault(); return(false); } values.Add(v); token = lexer.ReadToken(); tokenValue = token.ToString(); if (tokenValue == "}") { break; } else if (tokenValue == ",") { continue; } lexer.Warning("expected comma or brace"); MakeDefault(); return(false); } } else { lexer.Warning("unknown token '{0}'", tokenValue); MakeDefault(); return(false); } } // copy the 0 element to the end, so lerping doesn't // need to worry about the wrap case float val = values[0]; values.Add(val); _values = values.ToArray(); return(true); }
private void ParseSingleAction(idLexer lexer /*idFXSingleAction& FXAction*/) { idToken token; string tokenValue; /*FXAction.type = -1; * FXAction.sibling = -1; * * FXAction.data = "<none>"; * FXAction.name = "<none>"; * FXAction.fire = "<none>"; * * FXAction.delay = 0.0f; * FXAction.duration = 0.0f; * FXAction.restart = 0.0f; * FXAction.size = 0.0f; * FXAction.fadeInTime = 0.0f; * FXAction.fadeOutTime = 0.0f; * FXAction.shakeTime = 0.0f; * FXAction.shakeAmplitude = 0.0f; * FXAction.shakeDistance = 0.0f; * FXAction.shakeFalloff = false; * FXAction.shakeImpulse = 0.0f; * FXAction.shakeIgnoreMaster = false; * FXAction.lightRadius = 0.0f; * FXAction.rotate = 0.0f; * FXAction.random1 = 0.0f; * FXAction.random2 = 0.0f; * * FXAction.lightColor = vec3_origin; * FXAction.offset = vec3_origin; * FXAction.axis = mat3_identity; * * FXAction.bindParticles = false; * FXAction.explicitAxis = false; * FXAction.noshadows = false; * FXAction.particleTrackVelocity = false; * FXAction.trackOrigin = false; * FXAction.soundStarted = false;*/ while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if (tokenValue == "}") { break; } else if (tokenValue == "shake") { /*FXAction.type = FX_SHAKE;*/ /*FXAction.shakeTime = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeAmplitude = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeDistance = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeFalloff = */ lexer.ParseBool(); lexer.ExpectTokenString(","); /*FXAction.shakeImpulse = */ lexer.ParseFloat(); } else if (tokenValue == "noshadows") { // TODO: FXAction.noshadows = true; } else if (tokenValue == "name") { token = lexer.ReadToken(); // TODO: FXAction.name = token; } else if (tokenValue == "fire") { token = lexer.ReadToken(); // TODO: FXAction.fire = token; } else if (tokenValue == "random") { /*FXAction.random1 = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.random2 = */ lexer.ParseFloat(); // FXAction.delay = 0.0f; // check random } else if (tokenValue == "delay") { /*FXAction.delay = */ lexer.ParseFloat(); } else if (tokenValue == "rotate") { /*FXAction.rotate = */ lexer.ParseFloat(); } else if (tokenValue == "duration") { /*FXAction.duration = */ lexer.ParseFloat(); } else if (tokenValue == "trackorigin") { /*FXAction.trackOrigin = */ lexer.ParseBool(); } else if (tokenValue == "restart") { /*FXAction.restart = */ lexer.ParseFloat(); } else if (tokenValue == "fadein") { /*FXAction.fadeInTime = */ lexer.ParseFloat(); } else if (tokenValue == "fadeout") { /*FXAction.fadeOutTime = */ lexer.ParseFloat(); } else if (tokenValue == "size") { /*FXAction.size = */ lexer.ParseFloat(); } else if (tokenValue == "offset") { /*FXAction.offset.x = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.offset.y = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.offset.z = */ lexer.ParseFloat(); } else if (tokenValue == "axis") { /*idVec3 v;*/ /*v.x = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*v.y = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*v.z = */ lexer.ParseFloat(); /*v.Normalize(); * FXAction.axis = v.ToMat3(); * FXAction.explicitAxis = true;*/ } else if (tokenValue == "angle") { /*idAngles a;*/ /*a[0] = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*a[1] = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*a[2] = */ lexer.ParseFloat(); /*FXAction.axis = a.ToMat3(); * FXAction.explicitAxis = true;*/ } else if (tokenValue == "uselight") { token = lexer.ReadToken(); /*FXAction.data = token; * for( int i = 0; i < events.Num(); i++ ) { * if ( events[i].name.Icmp( FXAction.data ) == 0 ) { * FXAction.sibling = i; * FXAction.lightColor = events[i].lightColor; * FXAction.lightRadius = events[i].lightRadius; * } * } * FXAction.type = FX_LIGHT; * * // precache the light material * declManager->FindMaterial( FXAction.data );*/ } else if (tokenValue == "attachlight") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_ATTACHLIGHT; * * // precache it * declManager->FindMaterial( FXAction.data );*/ } else if (tokenValue == "attachentity") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_ATTACHENTITY; * * // precache the model * renderModelManager->FindModel( FXAction.data );*/ } else if (tokenValue == "launch") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_LAUNCH; * * // precache the entity def * declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/ } else if (tokenValue == "usemodel") { token = lexer.ReadToken(); /*FXAction.data = token; * for( int i = 0; i < events.Num(); i++ ) { * if ( events[i].name.Icmp( FXAction.data ) == 0 ) { * FXAction.sibling = i; * } * } * FXAction.type = FX_MODEL; * * // precache the model * renderModelManager->FindModel( FXAction.data );*/ } else if (tokenValue == "light") { token = lexer.ReadToken(); /*FXAction.data = token;*/ lexer.ExpectTokenString(","); /*FXAction.lightColor[0] = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightColor[1] = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightColor[2] = */ lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightRadius = */ lexer.ParseFloat(); /*FXAction.type = FX_LIGHT; * * // precache the light material * declManager->FindMaterial( FXAction.data );*/ } else if (tokenValue == "model") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_MODEL; * * // precache it * renderModelManager->FindModel( FXAction.data );*/ } else if (tokenValue == "particle") // FIXME: now the same as model { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_PARTICLE; * * // precache it * renderModelManager->FindModel( FXAction.data );*/ } else if (tokenValue == "decal") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_DECAL; * * // precache it * declManager->FindMaterial( FXAction.data );*/ } else if (tokenValue == "particletrackvelocity") { // TODO: FXAction.particleTrackVelocity = true; } else if (tokenValue == "sound") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_SOUND; * * // precache it * declManager->FindSound( FXAction.data );*/ } else if (tokenValue == "ignoremaster") { /*FXAction.shakeIgnoreMaster = true;*/ } else if (tokenValue == "shockwave") { token = lexer.ReadToken(); /*FXAction.data = token; * FXAction.type = FX_SHOCKWAVE; * * // precache the entity def * declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/ } else { lexer.Warning("FX File: bad token"); } } }
private bool ParseMaterial(idLexer lexer) { _parameters.MinDistance = 1; _parameters.MaxDistance = 10; _parameters.Volume = 1; _speakerMask = 0; _altSound = null; idToken token; string tokenValue; int sampleCount = 0; while (true) { if ((token = lexer.ExpectAnyToken()) == null) { return(false); } tokenValue = token.ToString().ToLower(); if (tokenValue == "}") { break; } // minimum number of sounds else if (tokenValue == "minsamples") { sampleCount = lexer.ParseInt(); } else if (tokenValue == "description") { _description = lexer.ReadTokenOnLine().ToString(); } else if (tokenValue == "mindistance") { _parameters.MinDistance = lexer.ParseFloat(); } else if (tokenValue == "maxdistance") { _parameters.MaxDistance = lexer.ParseFloat(); } else if (tokenValue == "shakes") { token = lexer.ExpectAnyToken(); if (token.Type == TokenType.Number) { _parameters.Shakes = token.ToFloat(); } else { lexer.UnreadToken = token; _parameters.Shakes = 1.0f; } } else if (tokenValue == "reverb") { float reg0 = lexer.ParseFloat(); if (lexer.ExpectTokenString(",") == false) { return(false); } float reg1 = lexer.ParseFloat(); // no longer supported } else if (tokenValue == "volume") { _parameters.Volume = lexer.ParseFloat(); } // leadinVolume is used to allow light breaking leadin sounds to be much louder than the broken loop else if (tokenValue == "leadinvolume") { _leadInVolume = lexer.ParseFloat(); } else if (tokenValue == "mask_center") { _speakerMask |= 1 << (int)Speakers.Center; } else if (tokenValue == "mask_left") { _speakerMask |= 1 << (int)Speakers.Left; } else if (tokenValue == "mask_right") { _speakerMask |= 1 << (int)Speakers.Right; } else if (tokenValue == "mask_backright") { _speakerMask |= 1 << (int)Speakers.BackRight; } else if (tokenValue == "mask_backleft") { _speakerMask |= 1 << (int)Speakers.BackLeft; } else if (tokenValue == "mask_lfe") { _speakerMask |= 1 << (int)Speakers.Lfe; } else if (tokenValue == "soundclass") { _parameters.SoundClass = lexer.ParseInt(); if (_parameters.SoundClass < 0) { lexer.Warning("SoundClass out of range"); return(false); } } else if (tokenValue == "altsound") { if ((token = lexer.ExpectAnyToken()) == null) { return(false); } _altSound = idE.DeclManager.FindSound(token.ToString()); } else if (tokenValue == "ordered") { // no longer supported } else if (tokenValue == "no_dups") { _parameters.Flags |= SoundMaterialFlags.NoDuplicates; } else if (tokenValue == "no_flicker") { _parameters.Flags |= SoundMaterialFlags.NoFlicker; } else if (tokenValue == "plain") { // no longer supported } else if (tokenValue == "looping") { _parameters.Flags |= SoundMaterialFlags.Looping; } else if (tokenValue == "no_occlusion") { _parameters.Flags |= SoundMaterialFlags.NoOcclusion; } else if (tokenValue == "private") { _parameters.Flags |= SoundMaterialFlags.PrivateSound; } else if (tokenValue == "antiprivate") { _parameters.Flags |= SoundMaterialFlags.AntiPrivateSound; } else if (tokenValue == "playonce") { _parameters.Flags |= SoundMaterialFlags.PlayOnce; } else if (tokenValue == "global") { _parameters.Flags |= SoundMaterialFlags.Global; } else if (tokenValue == "unclamped") { _parameters.Flags |= SoundMaterialFlags.Unclamped; } else if (tokenValue == "omnidirectional") { _parameters.Flags |= SoundMaterialFlags.OmniDirectional; } // onDemand can't be a parms, because we must track all references and overrides would confuse it else if (tokenValue == "ondemand") { // no longer loading sounds on demand // _onDemand = true; } // the wave files else if (tokenValue == "leadin") { // add to the leadin list if ((token = lexer.ReadToken()) == null) { lexer.Warning("Expected sound after leadin"); return(false); } idConsole.Warning("TODO: leadin"); /*if(soundSystemLocal.soundCache && numLeadins < maxSamples) * { * leadins[numLeadins] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); * numLeadins++; * }*/ } else if ((tokenValue.EndsWith(".wav") == true) || (tokenValue.EndsWith(".ogg") == true)) { idConsole.Warning("TODO: .wav|.ogg"); /*// add to the wav list * if(soundSystemLocal.soundCache && numEntries < maxSamples) * { * token.BackSlashesToSlashes(); * idStr lang = cvarSystem->GetCVarString("sys_lang"); * if(lang.Icmp("english") != 0 && token.Find("sound/vo/", false) >= 0) * { * idStr work = token; * work.ToLower(); * work.StripLeading("sound/vo/"); * work = va("sound/vo/%s/%s", lang.c_str(), work.c_str()); * if(fileSystem->ReadFile(work, NULL, NULL) > 0) * { * token = work; * } * else * { * // also try to find it with the .ogg extension * work.SetFileExtension(".ogg"); * if(fileSystem->ReadFile(work, NULL, NULL) > 0) * { * token = work; * } * } * } * entries[numEntries] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); * numEntries++; * }*/ } else { lexer.Warning("unknown token '{0}'", token.ToString()); return(false); } } if (_parameters.Shakes > 0.0f) { idConsole.Warning("TODO: CheckShakesAndOgg()"); } return(true); }
public static idMapEntity Parse(idLexer lexer, bool isWordSpawn = false, float version = idMapFile.CurrentMapVersion) { idToken token; if ((token = lexer.ReadToken()) == null) { return(null); } if (token.ToString() != "{") { lexer.Error("idMapEntity.Parse: {{ not found, found {0}", token.ToString()); return(null); } idMapEntity mapEnt = new idMapEntity(); idMapBrush mapBrush = null; idMapPatch mapPatch = null; Vector3 origin = Vector3.Zero; bool worldEnt = false; string tokenValue; do { if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: EOF without closing brace"); return(null); } if (token.ToString() == "}") { break; } if (token.ToString() == "{") { // parse a brush or patch if ((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: unexpected EOF"); return(null); } if (worldEnt == true) { origin = Vector3.Zero; } tokenValue = token.ToString(); // if is it a brush: brush, brushDef, brushDef2, brushDef3 if (tokenValue.StartsWith("brush", StringComparison.OrdinalIgnoreCase) == true) { mapBrush = idMapBrush.Parse(lexer, origin, (tokenValue.Equals("brushDef2", StringComparison.OrdinalIgnoreCase) || tokenValue.Equals("brushDef3", StringComparison.OrdinalIgnoreCase)), version); if (mapBrush == null) { return(null); } mapEnt.AddPrimitive(mapBrush); } // if is it a patch: patchDef2, patchDef3 else if (tokenValue.StartsWith("patch", StringComparison.OrdinalIgnoreCase) == true) { mapPatch = idMapPatch.Parse(lexer, origin, tokenValue.Equals("patchDef3", StringComparison.OrdinalIgnoreCase), version); if (mapPatch == null) { return(null); } mapEnt.AddPrimitive(mapPatch); } // assume it's a brush in Q3 or older style else { lexer.UnreadToken = token; mapBrush = idMapBrush.ParseQ3(lexer, origin); if (mapBrush == null) { return(null); } mapEnt.AddPrimitive(mapBrush); } } else { // parse a key / value pair string key = token.ToString(); token = lexer.ReadTokenOnLine(); string value = token.ToString(); // strip trailing spaces that sometimes get accidentally added in the editor value = value.Trim(); key = key.Trim(); mapEnt.Dict.Set(key, value); if (key.Equals("origin", StringComparison.OrdinalIgnoreCase) == true) { // scanf into doubles, then assign, so it is idVec size independent string[] parts = value.Split(' '); float.TryParse(parts[0], out origin.X); float.TryParse(parts[1], out origin.Y); float.TryParse(parts[2], out origin.Z); } else if ((key.Equals("classname", StringComparison.OrdinalIgnoreCase) == true) && (value.Equals("worldspawn", StringComparison.OrdinalIgnoreCase) == true)) { worldEnt = true; } } }while(true); return(mapEnt); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException("idDeclTable"); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; List<float> values = new List<float>(); string tokenLower; string tokenValue; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "snap") { _snap = true; } else if(tokenLower == "clamp") { _clamp = true; } else if(tokenLower == "{") { while(true) { bool errorFlag; float v = lexer.ParseFloat(out errorFlag); if(errorFlag == true) { // we got something non-numeric MakeDefault(); return false; } values.Add(v); token = lexer.ReadToken(); tokenValue = token.ToString(); if(tokenValue == "}") { break; } else if(tokenValue == ",") { continue; } lexer.Warning("expected comma or brace"); MakeDefault(); return false; } } else { lexer.Warning("unknown token '{0}'", tokenValue); MakeDefault(); return false; } } // copy the 0 element to the end, so lerping doesn't // need to worry about the wrap case float val = values[0]; values.Add(val); _values = values.ToArray(); return true; }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List <idParticleStage> stages = new List <idParticleStage>(); _depthHack = 0.0f; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "{") { idParticleStage stage = ParseParticleStage(lexer); if (stage == null) { lexer.Warning("Particle stage parse failed"); MakeDefault(); return(false); } stages.Add(stage); } else if (tokenLower == "depthhack") { _depthHack = lexer.ParseFloat(); } else { lexer.Warning("bad token {0}", token.ToString()); MakeDefault(); return(false); } } _stages = stages.ToArray(); // // calculate the bounds // _bounds.Clear(); int count = _stages.Length; for (int i = 0; i < count; i++) { idConsole.Warning("TODO: GetStageBounds"); // TODO: GetStageBounds(stages[i]); _bounds += _stages[i].Bounds; } if (_bounds.Volume <= 0.1f) { _bounds = idBounds.Expand(idBounds.Zero, 8.0f); } return(true); }
private idParticleStage ParseParticleStage(idLexer lexer) { idToken token; string tokenLower; idParticleStage stage = new idParticleStage(); stage.Default(); while (true) { if (lexer.HadError == true) { break; } else if ((token = lexer.ReadToken()) == null) { break; } else { tokenLower = token.ToString().ToLower(); if (tokenLower == "}") { break; } else if (tokenLower == "material") { token = lexer.ReadToken(); stage.Material = idE.DeclManager.FindMaterial(token.ToString()); } else if (tokenLower == "count") { stage.TotalParticles = lexer.ParseInt(); } else if (tokenLower == "time") { stage.ParticleLife = lexer.ParseFloat(); } else if (tokenLower == "cycles") { stage.Cycles = lexer.ParseFloat(); } else if (tokenLower == "timeoffset") { stage.TimeOffset = lexer.ParseFloat(); } else if (tokenLower == "deadtime") { stage.DeadTime = lexer.ParseFloat(); } else if (tokenLower == "randomdistribution") { stage.RandomDistribution = lexer.ParseBool(); } else if (tokenLower == "bunching") { stage.SpawnBunching = lexer.ParseFloat(); } else if (tokenLower == "distribution") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "rect") { stage.Distribution = ParticleDistribution.Rectangle; } else if (tokenLower == "cyclinder") { stage.Distribution = ParticleDistribution.Cyclinder; } else if (tokenLower == "sphere") { stage.Distribution = ParticleDistribution.Sphere; } else { lexer.Error("bad distribution type: {0}", token.ToString()); } stage.DistributionParameters = ParseParams(lexer, stage.DistributionParameters.Length); } else if (tokenLower == "direction") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "cone") { stage.Direction = ParticleDirection.Cone; } else if (tokenLower == "outward") { stage.Direction = ParticleDirection.Outward; } else { lexer.Error("bad direction type: {0}", token.ToString()); } stage.DirectionParameters = ParseParams(lexer, stage.DirectionParameters.Length); } else if (tokenLower == "orientation") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "view") { stage.Orientation = ParticleOrientation.View; } else if (tokenLower == "aimed") { stage.Orientation = ParticleOrientation.Aimed; } else if (tokenLower == "x") { stage.Orientation = ParticleOrientation.X; } else if (tokenLower == "y") { stage.Orientation = ParticleOrientation.Y; } else if (tokenLower == "z") { stage.Orientation = ParticleOrientation.Z; } else { lexer.Error("bad orientation type: {0}", token.ToString()); } stage.OrientationParameters = ParseParams(lexer, stage.OrientationParameters.Length); } else if (tokenLower == "custompath") { token = lexer.ReadToken(); tokenLower = tokenLower.ToLower().ToLower(); if (tokenLower == "standard") { stage.CustomPath = ParticleCustomPath.Standard; } else if (tokenLower == "helix") { stage.CustomPath = ParticleCustomPath.Helix; } else if (tokenLower == "flies") { stage.CustomPath = ParticleCustomPath.Flies; } else if (tokenLower == "spherical") { stage.CustomPath = ParticleCustomPath.Orbit; } else { lexer.Error("bad path type: {0}", token.ToString()); } stage.CustomPathParameters = ParseParams(lexer, stage.CustomPathParameters.Length); } else if (tokenLower == "speed") { ParseParametric(lexer, stage.Speed); } else if (tokenLower == "rotation") { ParseParametric(lexer, stage.RotationSpeed); } else if (tokenLower == "angle") { stage.InitialAngle = lexer.ParseFloat(); } else if (tokenLower == "entitycolor") { stage.EntityColor = lexer.ParseBool(); } else if (tokenLower == "size") { ParseParametric(lexer, stage.Size); } else if (tokenLower == "aspect") { ParseParametric(lexer, stage.Aspect); } else if (tokenLower == "fadein") { stage.FadeInFraction = lexer.ParseFloat(); } else if (tokenLower == "fadeout") { stage.FadeOutFraction = lexer.ParseFloat(); } else if (tokenLower == "fadeindex") { stage.FadeIndexFraction = lexer.ParseFloat(); } else if (tokenLower == "color") { stage.Color = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "fadecolor") { stage.FadeColor = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "offset") { stage.Offset = new Vector3(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if (tokenLower == "animationframes") { stage.AnimationFrames = lexer.ParseInt(); } else if (tokenLower == "animationrate") { stage.AnimationRate = lexer.ParseFloat(); } else if (tokenLower == "boundsexpansion") { stage.BoundsExpansion = lexer.ParseFloat(); } else if (tokenLower == "gravity") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "world") { stage.WorldGravity = true; } else { lexer.UnreadToken = token; } stage.Gravity = lexer.ParseFloat(); } else { lexer.Error("unknown token {0}", token.ToString()); } } } // derive values. stage.CycleTime = (int)(stage.ParticleLife + stage.DeadTime) * 1000; return(stage); }
private idParticleStage ParseParticleStage(idLexer lexer) { idToken token; string tokenLower; idParticleStage stage = new idParticleStage(); stage.Default(); while(true) { if(lexer.HadError == true) { break; } else if((token = lexer.ReadToken()) == null) { break; } else { tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "material") { token = lexer.ReadToken(); stage.Material = idE.DeclManager.FindMaterial(token.ToString()); } else if(tokenLower == "count") { stage.TotalParticles = lexer.ParseInt(); } else if(tokenLower == "time") { stage.ParticleLife = lexer.ParseFloat(); } else if(tokenLower == "cycles") { stage.Cycles = lexer.ParseFloat(); } else if(tokenLower == "timeoffset") { stage.TimeOffset = lexer.ParseFloat(); } else if(tokenLower == "deadtime") { stage.DeadTime = lexer.ParseFloat(); } else if(tokenLower == "randomdistribution") { stage.RandomDistribution = lexer.ParseBool(); } else if(tokenLower == "bunching") { stage.SpawnBunching = lexer.ParseFloat(); } else if(tokenLower == "distribution") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "rect") { stage.Distribution = ParticleDistribution.Rectangle; } else if(tokenLower == "cyclinder") { stage.Distribution = ParticleDistribution.Cyclinder; } else if(tokenLower == "sphere") { stage.Distribution = ParticleDistribution.Sphere; } else { lexer.Error("bad distribution type: {0}", token.ToString()); } stage.DistributionParameters = ParseParams(lexer, stage.DistributionParameters.Length); } else if(tokenLower == "direction") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "cone") { stage.Direction = ParticleDirection.Cone; } else if(tokenLower == "outward") { stage.Direction = ParticleDirection.Outward; } else { lexer.Error("bad direction type: {0}", token.ToString()); } stage.DirectionParameters = ParseParams(lexer, stage.DirectionParameters.Length); } else if(tokenLower == "orientation") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "view") { stage.Orientation = ParticleOrientation.View; } else if(tokenLower == "aimed") { stage.Orientation = ParticleOrientation.Aimed; } else if(tokenLower == "x") { stage.Orientation = ParticleOrientation.X; } else if(tokenLower == "y") { stage.Orientation = ParticleOrientation.Y; } else if(tokenLower == "z") { stage.Orientation = ParticleOrientation.Z; } else { lexer.Error("bad orientation type: {0}", token.ToString()); } stage.OrientationParameters = ParseParams(lexer, stage.OrientationParameters.Length); } else if(tokenLower == "custompath") { token = lexer.ReadToken(); tokenLower = tokenLower.ToLower().ToLower(); if(tokenLower == "standard") { stage.CustomPath = ParticleCustomPath.Standard; } else if(tokenLower == "helix") { stage.CustomPath = ParticleCustomPath.Helix; } else if(tokenLower == "flies") { stage.CustomPath = ParticleCustomPath.Flies; } else if(tokenLower == "spherical") { stage.CustomPath = ParticleCustomPath.Orbit; } else { lexer.Error("bad path type: {0}", token.ToString()); } stage.CustomPathParameters = ParseParams(lexer, stage.CustomPathParameters.Length); } else if(tokenLower == "speed") { ParseParametric(lexer, stage.Speed); } else if(tokenLower == "rotation") { ParseParametric(lexer, stage.RotationSpeed); } else if(tokenLower == "angle") { stage.InitialAngle = lexer.ParseFloat(); } else if(tokenLower == "entitycolor") { stage.EntityColor = lexer.ParseBool(); } else if(tokenLower == "size") { ParseParametric(lexer, stage.Size); } else if(tokenLower == "aspect") { ParseParametric(lexer, stage.Aspect); } else if(tokenLower == "fadein") { stage.FadeInFraction = lexer.ParseFloat(); } else if(tokenLower == "fadeout") { stage.FadeOutFraction = lexer.ParseFloat(); } else if(tokenLower == "fadeindex") { stage.FadeIndexFraction = lexer.ParseFloat(); } else if(tokenLower == "color") { stage.Color = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "fadecolor") { stage.FadeColor = new Vector4(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "offset") { stage.Offset = new Vector3(lexer.ParseFloat(), lexer.ParseFloat(), lexer.ParseFloat()); } else if(tokenLower == "animationframes") { stage.AnimationFrames = lexer.ParseInt(); } else if(tokenLower == "animationrate") { stage.AnimationRate = lexer.ParseFloat(); } else if(tokenLower == "boundsexpansion") { stage.BoundsExpansion = lexer.ParseFloat(); } else if(tokenLower == "gravity") { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "world") { stage.WorldGravity = true; } else { lexer.UnreadToken = token; } stage.Gravity = lexer.ParseFloat(); } else { lexer.Error("unknown token {0}", token.ToString()); } } } // derive values. stage.CycleTime = (int) (stage.ParticleLife + stage.DeadTime) * 1000; return stage; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "name") { token = lexer.ReadToken(); _pdaName = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "fullname") { token = lexer.ReadToken(); _fullName = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "icon") { token = lexer.ReadToken(); _icon = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "id") { token = lexer.ReadToken(); _id = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "post") { token = lexer.ReadToken(); _post = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "title") { token = lexer.ReadToken(); _title = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "security") { token = lexer.ReadToken(); _security = (token != null) ? token.ToString() : string.Empty; } else if(tokenLower == "pda_email") { token = lexer.ReadToken(); _emailList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Email, token.ToString()); } else if(tokenLower == "pda_audio") { token = lexer.ReadToken(); _audioList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Audio, token.ToString()); } else if(tokenLower == "pda_video") { token = lexer.ReadToken(); _videoList.Add(token.ToString()); idE.DeclManager.FindType(DeclType.Video, token.ToString()); } } if(lexer.HadError == true) { lexer.Warning("PDA decl '{0}' had a parse error", this.Name); return false; } _originalVideoCount = _videoList.Count; _originalEmailCount = _emailList.Count; return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idToken token; string tokenLower; idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List<idParticleStage> stages = new List<idParticleStage>(); _depthHack = 0.0f; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if(tokenLower == "{") { idParticleStage stage = ParseParticleStage(lexer); if(stage == null) { lexer.Warning("Particle stage parse failed"); MakeDefault(); return false; } stages.Add(stage); } else if(tokenLower == "depthhack") { _depthHack = lexer.ParseFloat(); } else { lexer.Warning("bad token {0}", token.ToString()); MakeDefault(); return false; } } _stages = stages.ToArray(); // // calculate the bounds // _bounds.Clear(); int count = _stages.Length; for(int i = 0; i < count; i++) { idConsole.Warning("TODO: GetStageBounds"); // TODO: GetStageBounds(stages[i]); _bounds += _stages[i].Bounds; } if(_bounds.Volume <= 0.1f) { _bounds = idBounds.Expand(idBounds.Zero, 8.0f); } return true; }
/// <summary> /// Parses a variable length list of parms on one line. /// </summary> /// <param name="lexer"></param> /// <param name="parms"></param> /// <param name="maxParms"></param> private float[] ParseParams(idLexer lexer, int maxParms) { idToken token; List<float> parms = new List<float>(); int count = 0; float tmp; while(true) { if((token = lexer.ReadToken()) == null) { break; } else if(count == maxParms) { lexer.Error("too many parms on line"); break; } else { token.StripQuotes(); float.TryParse(token.ToString(), out tmp); parms.Add(tmp); count++; } } return parms.ToArray(); }
private void ParseParametric(idLexer lexer, idParticleParameter parm) { idToken token; if((token = lexer.ReadToken()) == null) { lexer.Error("not enough parameters"); return; } if(token.IsNumeric == true) { // can have a to + 2nd parm. float tmp; float.TryParse(token.ToString(), out tmp); parm.From = tmp; parm.To = tmp; if((token = lexer.ReadToken()) != null) { if(token.ToString().ToLower() == "to") { if((token = lexer.ReadToken()) == null) { lexer.Error("missing second parameter"); return; } float.TryParse(token.ToString(), out tmp); parm.To = tmp; } else { lexer.UnreadToken = token; } } } else { parm.Table = (idDeclTable) idE.DeclManager.FindType(DeclType.Table, token.ToString(), false); } }
public override bool Parse(string text) { if (this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); int defaultAnimationCount = 0; idToken token; idToken token2; string tokenValue; string fileName; string extension; int count; idMD5Joint[] md5Joints; while (true) { if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if (tokenValue == "}") { break; } if (tokenValue == "inherit") { idConsole.WriteLine("TODO: inherit"); /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * * const idDeclModelDef *copy = static_cast<const idDeclModelDef *>( declManager->FindType( DECL_MODELDEF, token2, false ) ); * if ( !copy ) { * common->Warning( "Unknown model definition '%s'", token2.c_str() ); * } else if ( copy->GetState() == DS_DEFAULTED ) { * common->Warning( "inherited model definition '%s' defaulted", token2.c_str() ); * MakeDefault(); * return false; * } else { * CopyDecl( copy ); * numDefaultAnims = anims.Num(); * }*/ } else if (tokenValue == "skin") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } _skin = idE.DeclManager.FindSkin(token2.ToString()); if (_skin == null) { lexer.Warning("Skin '{0}' not found", token2.ToString()); MakeDefault(); return(false); } } else if (tokenValue == "mesh") { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } fileName = token2.ToString(); extension = Path.GetExtension(fileName); if (extension != idRenderModel_MD5.MeshExtension) { lexer.Warning("Invalid model for MD5 mesh"); MakeDefault(); return(false); } _model = idE.RenderModelManager.FindModel(fileName); if (_model == null) { lexer.Warning("Model '{0}' not found", fileName); MakeDefault(); return(false); } else if (_model.IsDefault == true) { lexer.Warning("Model '{0}' defaulted", fileName); MakeDefault(); return(false); } // get the number of joints count = _model.JointCount; if (count == 0) { lexer.Warning("Model '{0}' has no joints", fileName); } // set up the joint hierarchy md5Joints = _model.Joints; _joints = new JointInfo[count]; _jointParents = new int[count]; _channelJoints = new int[(int)AnimationChannel.Count][]; _channelJoints[0] = new int[count]; for (int i = 0; i < count; i++) { _joints[i] = new JointInfo(); _joints[i].Channel = AnimationChannel.All; _joints[i].Index = i; if (md5Joints[i].Parent != null) { _joints[i].ParentIndex = _model.GetJointIndex(md5Joints[i].Parent); } else { _joints[i].ParentIndex = -1; } _jointParents[i] = _joints[i].ParentIndex; _channelJoints[0][i] = i; } } else if (tokenValue == "remove") { idConsole.Warning("TODO: remove"); // removes any anims whos name matches /*if( !src.ReadToken( &token2 ) ) { * src.Warning( "Unexpected end of file" ); * MakeDefault(); * return false; * } * num = 0; * for( i = 0; i < anims.Num(); i++ ) { * if ( ( token2 == anims[ i ]->Name() ) || ( token2 == anims[ i ]->FullName() ) ) { * delete anims[ i ]; * anims.RemoveIndex( i ); * if ( i >= numDefaultAnims ) { * src.Warning( "Anim '%s' was not inherited. Anim should be removed from the model def.", token2.c_str() ); * MakeDefault(); * return false; * } * i--; * numDefaultAnims--; * num++; * continue; * } * } * if ( !num ) { * src.Warning( "Couldn't find anim '%s' to remove", token2.c_str() ); * MakeDefault(); * return false; * }*/ } else if (tokenValue == "anim") { if (_model == null) { lexer.Warning("Must specify mesh before defining anims"); MakeDefault(); return(false); } else if (ParseAnimation(lexer, defaultAnimationCount) == false) { MakeDefault(); return(false); } } else if (tokenValue == "offset") { float[] tmp = lexer.Parse1DMatrix(3); if (tmp == null) { lexer.Warning("Expected vector following 'offset'"); MakeDefault(); return(false); } _offset = new Vector3(tmp[0], tmp[1], tmp[2]); } else if (tokenValue == "channel") { if (_model == null) { lexer.Warning("Must specify mesh before defining channels"); MakeDefault(); return(false); } // set the channel for a group of joints if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } if (lexer.CheckTokenString("(") == false) { lexer.Warning("Expected { after '{0}'", token2.ToString()); MakeDefault(); return(false); } int i; int channelCount = (int)AnimationChannel.Count; for (i = (int)AnimationChannel.All + 1; i < channelCount; i++) { if (ChannelNames[i].Equals(token2.ToString(), StringComparison.OrdinalIgnoreCase) == true) { break; } } if (i >= channelCount) { lexer.Warning("Unknown channel '{0}'", token2.ToString()); MakeDefault(); return(false); } int channel = i; StringBuilder jointNames = new StringBuilder(); string token2Value; while (lexer.CheckTokenString(")") == false) { if ((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return(false); } token2Value = token2.ToString(); jointNames.Append(token2Value); if ((token2Value != "*") && (token2Value != "-")) { jointNames.Append(" "); } } int[] jointList = GetJointList(jointNames.ToString()); int jointLength = jointList.Length; List <int> channelJoints = new List <int>(); for (count = i = 0; i < jointLength; i++) { int jointIndex = jointList[i]; if (_joints[jointIndex].Channel != AnimationChannel.All) { lexer.Warning("Join '{0}' assigned to multiple channels", _model.GetJointName(jointIndex)); continue; } _joints[jointIndex].Channel = (AnimationChannel)channel; channelJoints.Add(jointIndex); } _channelJoints[channel] = channelJoints.ToArray(); } else { lexer.Warning("unknown token '{0}'", token.ToString()); MakeDefault(); return(false); } } return(true); }
private void ParseSingleAction(idLexer lexer /*idFXSingleAction& FXAction*/) { idToken token; string tokenValue; /*FXAction.type = -1; FXAction.sibling = -1; FXAction.data = "<none>"; FXAction.name = "<none>"; FXAction.fire = "<none>"; FXAction.delay = 0.0f; FXAction.duration = 0.0f; FXAction.restart = 0.0f; FXAction.size = 0.0f; FXAction.fadeInTime = 0.0f; FXAction.fadeOutTime = 0.0f; FXAction.shakeTime = 0.0f; FXAction.shakeAmplitude = 0.0f; FXAction.shakeDistance = 0.0f; FXAction.shakeFalloff = false; FXAction.shakeImpulse = 0.0f; FXAction.shakeIgnoreMaster = false; FXAction.lightRadius = 0.0f; FXAction.rotate = 0.0f; FXAction.random1 = 0.0f; FXAction.random2 = 0.0f; FXAction.lightColor = vec3_origin; FXAction.offset = vec3_origin; FXAction.axis = mat3_identity; FXAction.bindParticles = false; FXAction.explicitAxis = false; FXAction.noshadows = false; FXAction.particleTrackVelocity = false; FXAction.trackOrigin = false; FXAction.soundStarted = false;*/ while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } else if(tokenValue == "shake") { /*FXAction.type = FX_SHAKE;*/ /*FXAction.shakeTime = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeAmplitude = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeDistance = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.shakeFalloff = */lexer.ParseBool(); lexer.ExpectTokenString(","); /*FXAction.shakeImpulse = */lexer.ParseFloat(); } else if(tokenValue == "noshadows") { // TODO: FXAction.noshadows = true; } else if(tokenValue == "name") { token = lexer.ReadToken(); // TODO: FXAction.name = token; } else if(tokenValue == "fire") { token = lexer.ReadToken(); // TODO: FXAction.fire = token; } else if(tokenValue == "random") { /*FXAction.random1 = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.random2 = */lexer.ParseFloat(); // FXAction.delay = 0.0f; // check random } else if(tokenValue == "delay") { /*FXAction.delay = */lexer.ParseFloat(); } else if(tokenValue == "rotate") { /*FXAction.rotate = */lexer.ParseFloat(); } else if(tokenValue == "duration") { /*FXAction.duration = */lexer.ParseFloat(); } else if(tokenValue == "trackorigin") { /*FXAction.trackOrigin = */lexer.ParseBool(); } else if(tokenValue == "restart") { /*FXAction.restart = */lexer.ParseFloat(); } else if(tokenValue == "fadein") { /*FXAction.fadeInTime = */lexer.ParseFloat(); } else if(tokenValue == "fadeout") { /*FXAction.fadeOutTime = */lexer.ParseFloat(); } else if(tokenValue == "size") { /*FXAction.size = */lexer.ParseFloat(); } else if(tokenValue == "offset") { /*FXAction.offset.x = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.offset.y = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.offset.z = */lexer.ParseFloat(); } else if(tokenValue == "axis") { /*idVec3 v;*/ /*v.x = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*v.y = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*v.z = */lexer.ParseFloat(); /*v.Normalize(); FXAction.axis = v.ToMat3(); FXAction.explicitAxis = true;*/ } else if(tokenValue == "angle") { /*idAngles a;*/ /*a[0] = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*a[1] = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*a[2] = */lexer.ParseFloat(); /*FXAction.axis = a.ToMat3(); FXAction.explicitAxis = true;*/ } else if(tokenValue == "uselight") { token = lexer.ReadToken(); /*FXAction.data = token; for( int i = 0; i < events.Num(); i++ ) { if ( events[i].name.Icmp( FXAction.data ) == 0 ) { FXAction.sibling = i; FXAction.lightColor = events[i].lightColor; FXAction.lightRadius = events[i].lightRadius; } } FXAction.type = FX_LIGHT; // precache the light material declManager->FindMaterial( FXAction.data );*/ } else if(tokenValue == "attachlight") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_ATTACHLIGHT; // precache it declManager->FindMaterial( FXAction.data );*/ } else if(tokenValue == "attachentity") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_ATTACHENTITY; // precache the model renderModelManager->FindModel( FXAction.data );*/ } else if(tokenValue == "launch") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_LAUNCH; // precache the entity def declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/ } else if(tokenValue == "usemodel") { token = lexer.ReadToken(); /*FXAction.data = token; for( int i = 0; i < events.Num(); i++ ) { if ( events[i].name.Icmp( FXAction.data ) == 0 ) { FXAction.sibling = i; } } FXAction.type = FX_MODEL; // precache the model renderModelManager->FindModel( FXAction.data );*/ } else if(tokenValue == "light") { token = lexer.ReadToken(); /*FXAction.data = token;*/ lexer.ExpectTokenString(","); /*FXAction.lightColor[0] = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightColor[1] = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightColor[2] = */lexer.ParseFloat(); lexer.ExpectTokenString(","); /*FXAction.lightRadius = */lexer.ParseFloat(); /*FXAction.type = FX_LIGHT; // precache the light material declManager->FindMaterial( FXAction.data );*/ } else if(tokenValue == "model") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_MODEL; // precache it renderModelManager->FindModel( FXAction.data );*/ } else if(tokenValue == "particle") // FIXME: now the same as model { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_PARTICLE; // precache it renderModelManager->FindModel( FXAction.data );*/ } else if(tokenValue == "decal") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_DECAL; // precache it declManager->FindMaterial( FXAction.data );*/ } else if(tokenValue == "particletrackvelocity") { // TODO: FXAction.particleTrackVelocity = true; } else if(tokenValue == "sound") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_SOUND; // precache it declManager->FindSound( FXAction.data );*/ } else if(tokenValue == "ignoremaster") { /*FXAction.shakeIgnoreMaster = true;*/ } else if(tokenValue == "shockwave") { token = lexer.ReadToken(); /*FXAction.data = token; FXAction.type = FX_SHOCKWAVE; // precache the entity def declManager->FindType( DECL_ENTITYDEF, FXAction.data );*/ } else { lexer.Warning("FX File: bad token"); } } }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if (lexer.ExpectTokenString("{") == false) { return(null); } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if (token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return(null); } // Parse it float[] info; if (patchDef3 == true) { info = lexer.Parse1DMatrix(7); if (info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return(null); } } else { info = lexer.Parse1DMatrix(5); if (info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return(null); } } idMapPatch patch = new idMapPatch((int)info[0], (int)info[1]); if (version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if (patchDef3 == true) { patch.HorizontalSubdivisions = (int)info[2]; patch.VerticalSubdivisions = (int)info[3]; patch.ExplicitlySubdivided = true; } if ((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return(null); } // these were written out in the wrong order, IMHO if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return(null); } for (int j = 0; j < patch.Width; j++) { if (lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return(null); } for (int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if (v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return(null); } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return(null); } } if (lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure"); return(null); } // read any key/value pairs while ((token = lexer.ReadToken()) != null) { if (token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if (token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return(patch); }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; idConsole.Warning("TODO: actual fx parsing, we only step over the block"); while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } if(tokenValue == "bindto") { token = lexer.ReadToken(); idConsole.Warning("TODO: FX: joint = token;"); } else if(tokenValue == "{") { idConsole.Warning("TODO: FX: idFXSingleAction action;"); ParseSingleAction(lexer/*, action*/); // events.Append(action); continue; } } if(lexer.HadError == true) { lexer.Warning("FX decl '{0}' had a parse error", this.Name); return false; } return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); List<SkinMapping> mappings = new List<SkinMapping>(); List<string> associatedModels = new List<string>(); idToken token, token2; string tokenLower; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenLower = token.ToString().ToLower(); if(tokenLower == "}") { break; } else if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); break; } else if(tokenLower == "model") { associatedModels.Add(token2.ToString()); continue; } SkinMapping map = new SkinMapping(); map.To = idE.DeclManager.FindMaterial(token2.ToString()); if(tokenLower == "*") { // wildcard. map.From = null; } else { map.From = idE.DeclManager.FindMaterial(token.ToString()); } mappings.Add(map); } _mappings = mappings.ToArray(); _associatedModels = associatedModels.ToArray(); return false; }
/// <summary> /// Takes a string and breaks it up into arg tokens. /// </summary> /// <param name="text"></param> /// <param name="keepAsStrings">true to only seperate tokens from whitespace and comments, ignoring punctuation.</param> public void TokenizeString(string text, bool keepAsStrings) { // clear previous args. _args = new string[] { }; if (text.Length == 0) { return; } idLexer lexer = new idLexer(); lexer.LoadMemory(text, "idCmdSystem.TokenizeString"); lexer.Options = LexerOptions.NoErrors | LexerOptions.NoWarnings | LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowIPAddresses | ((keepAsStrings == true) ? LexerOptions.OnlyStrings : 0); idToken token = null, number = null; List <string> newArgs = new List <string>(); int len = 0, totalLength = 0; string tokenValue; while (true) { if (newArgs.Count == idE.MaxCommandArgs) { break; // this is usually something malicious. } if ((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if ((keepAsStrings == false) && (tokenValue == "-")) { // check for negative numbers. if ((number = lexer.CheckTokenType(TokenType.Number, 0)) != null) { token.Set("-" + number); } } // check for cvar expansion if (tokenValue == "$") { if ((token = lexer.ReadToken()) == null) { break; } if (idE.CvarSystem.IsInitialized == true) { token.Set(idE.CvarSystem.GetString(token.ToString())); } else { token.Set("<unknown>"); } } tokenValue = token.ToString(); len = tokenValue.Length; totalLength += len + 1; // regular token newArgs.Add(tokenValue); } _args = newArgs.ToArray(); }
public override bool Parse(string text) { idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; string tokenValue; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } if(tokenValue == "audio") { _audio = lexer.ReadToken().ToString(); idE.DeclManager.FindSound(_audio); } else if(tokenValue == "info") { _info = lexer.ReadToken().ToString(); } else if(tokenValue == "name") { _audioName = lexer.ReadToken().ToString(); } else if(tokenValue == "preview") { _preview = lexer.ReadToken().ToString(); } } if(lexer.HadError == true) { lexer.Warning("Video decl '{0}' had a parse error", this.Name); return false; } return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination | LexerOptions.NoFatalErrors); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; _text = string.Empty; string tokenLower; string tokenValue; // scan through, identifying each individual parameter while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenValue == "}") { break; } else if(tokenLower == "subject") { _subject = lexer.ReadToken().ToString(); } else if(tokenLower == "to") { _to = lexer.ReadToken().ToString(); } else if(tokenLower == "from") { _from = lexer.ReadToken().ToString(); } else if(tokenLower == "date") { _date = lexer.ReadToken().ToString(); } else if(tokenLower == "text") { token = lexer.ReadToken(); tokenValue = token.ToString(); if(tokenValue != "{") { lexer.Warning("Email dec '{0}' had a parse error", this.Name); return false; } while(((token = lexer.ReadToken()) != null) && (token.ToString() != "}")) { _text += token.ToString(); } } else if(tokenLower == "image") { _image = lexer.ReadToken().ToString(); } } if(lexer.HadError == true) { lexer.Warning("Email decl '{0}' had a parse error", this.Name); return false; } return true; }