public static idMapBrush ParseQ3(idLexer lexer, Vector3 origin) { int rotate; int[] shift = new int[2]; float[] scale = new float[2]; Vector3[] planePoints = new Vector3[3]; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idMapBrushSide side; idToken token; do { if(lexer.CheckTokenString("}") == true) { break; } side = new idMapBrushSide(); sides.Add(side); // read the three point plane definition float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); float[] tmp3 = lexer.Parse1DMatrix(3); if((tmp == null) || (tmp2 == null) || (tmp3 == null)) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); // read the material token = lexer.ReadTokenOnLine(); if(token == null) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side material"); return null; } // we have an implicit 'textures/' in the old format side.Material = "textures/" + token.ToString(); // read the texture shift, rotate and scale shift[0] = lexer.ParseInt(); shift[1] = lexer.ParseInt(); rotate = lexer.ParseInt(); scale[0] = lexer.ParseFloat(); scale[1] = lexer.ParseFloat(); side.TextureMatrix[0] = new Vector3(0.03125f, 0.0f, 0.0f); side.TextureMatrix[1] = new Vector3(0.0f, 0.03125f, 0.0f); side.Origin = origin; // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); idMapBrush brush = new idMapBrush(); for(int i = 0; i < sides.Count; i++) { brush.AddSide(sides[i]); } brush.Dict = new idDict(); return brush; }
public static idMapEntity Parse(idLexer lexer, bool isWordSpawn = false, float version = idMapFile.CurrentMapVersion) { idToken token; if((token = lexer.ReadToken()) == null) { return null; } if(token.ToString() != "{") { lexer.Error("idMapEntity.Parse: {{ not found, found {0}", token.ToString()); return null; } idMapEntity mapEnt = new idMapEntity(); idMapBrush mapBrush = null; idMapPatch mapPatch = null; Vector3 origin = Vector3.Zero; bool worldEnt = false; string tokenValue; do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: EOF without closing brace"); return null; } if(token.ToString() == "}") { break; } if(token.ToString() == "{") { // parse a brush or patch if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: unexpected EOF"); return null; } if(worldEnt == true) { origin = Vector3.Zero; } tokenValue = token.ToString(); // if is it a brush: brush, brushDef, brushDef2, brushDef3 if(tokenValue.StartsWith("brush", StringComparison.OrdinalIgnoreCase) == true) { mapBrush = idMapBrush.Parse(lexer, origin, (tokenValue.Equals("brushDef2", StringComparison.OrdinalIgnoreCase) || tokenValue.Equals("brushDef3", StringComparison.OrdinalIgnoreCase)), version); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } // if is it a patch: patchDef2, patchDef3 else if(tokenValue.StartsWith("patch", StringComparison.OrdinalIgnoreCase) == true) { mapPatch = idMapPatch.Parse(lexer, origin, tokenValue.Equals("patchDef3", StringComparison.OrdinalIgnoreCase), version); if(mapPatch == null) { return null; } mapEnt.AddPrimitive(mapPatch); } // assume it's a brush in Q3 or older style else { lexer.UnreadToken = token; mapBrush = idMapBrush.ParseQ3(lexer, origin); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } } else { // parse a key / value pair string key = token.ToString(); token = lexer.ReadTokenOnLine(); string value = token.ToString(); // strip trailing spaces that sometimes get accidentally added in the editor value = value.Trim(); key = key.Trim(); mapEnt.Dict.Set(key, value); if(key.Equals("origin", StringComparison.OrdinalIgnoreCase) == true) { // scanf into doubles, then assign, so it is idVec size independent string[] parts = value.Split(' '); float.TryParse(parts[0], out origin.X); float.TryParse(parts[1], out origin.Y); float.TryParse(parts[2], out origin.Z); } else if((key.Equals("classname", StringComparison.OrdinalIgnoreCase) == true) && (value.Equals("worldspawn", StringComparison.OrdinalIgnoreCase) == true)) { worldEnt = true; } } } while(true); return mapEnt; }
public static idMapBrush Parse(idLexer lexer, Vector3 origin, bool newFormat = true, float version = idMapFile.CurrentMapVersion) { idToken token; idMapBrushSide side; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idDict dict = new idDict(); Vector3[] planePoints = new Vector3[3]; if(lexer.ExpectTokenString("{") == false) { return null; } do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } if(token.ToString() == "}") { break; } // here we may have to jump over brush epairs ( only used in editor ) do { // if token is a brace if(token.ToString() == "(") { break; } // the token should be a key string for a key/value pair if(token.Type != TokenType.String) { lexer.Error("idMapBrush::Parse: unexpected {0}, expected ( or epair key string", token.ToString()); return null; } string key = token.ToString(); if(((token = lexer.ReadTokenOnLine()) == null) || (token.Type != TokenType.String)) { lexer.Error("idMapBrush::Parse: expected epair value string not found"); return null; } dict.Set(key, token.ToString()); // try to read the next key if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } } while(true); lexer.UnreadToken = token; side = new idMapBrushSide(); sides.Add(side); if(newFormat == true) { float[] tmp = lexer.Parse1DMatrix(4); if(tmp == null) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } else { side.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); } } else { // read the three point plane definition float[] tmp, tmp2, tmp3; if(((tmp = lexer.Parse1DMatrix(3)) == null) || ((tmp2 = lexer.Parse1DMatrix(3)) == null) || ((tmp3 = lexer.Parse1DMatrix(3)) == null)) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); } // read the texture matrix // this is odd, because the texmat is 2D relative to default planar texture axis float[,] tmp5 = lexer.Parse2DMatrix(2, 3); if(tmp5 == null) { lexer.Error("idMapBrush::Parse: unable to read brush side texture matrix"); return null; } side.TextureMatrix[0] = new Vector3(tmp5[0, 0], tmp5[0, 1], tmp5[0, 2]); side.TextureMatrix[1] = new Vector3(tmp5[1, 0], tmp5[1, 1], tmp5[1, 2]); side.Origin = origin; // read the material if((token = lexer.ReadTokenOnLine()) == null) { lexer.Error("idMapBrush::Parse: unable to read brush side material"); return null; } // we had an implicit 'textures/' in the old format... if(version < 2.0f) { side.Material = "textures/" + token.ToString(); } else { side.Material = token.ToString(); } // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); if(lexer.ExpectTokenString("}") == false) { return null; } idMapBrush brush = new idMapBrush(); foreach(idMapBrushSide s in sides) { brush.AddSide(s); } brush.Dict = dict; return brush; }
/// <summary> /// Parses the material, if there are any errors during parsing the defaultShader will be set. /// </summary> /// <param name="lexer"></param> private void ParseMaterial(idLexer lexer) { _registerCount = PredefinedRegisterCount; // leave space for the parms to be copied in. for(int i = 0; i < _registerCount; i++) { _parsingData.RegisterIsTemporary[i] = true; // they aren't constants that can be folded. } TextureRepeat textureRepeatDefault = TextureRepeat.Repeat; // allow a global setting for repeat. idToken token = null; string tokenValue; string tokenLower; int count; while(true) { if(TestMaterialFlag(Renderer.MaterialFlags.Defaulted) == true) { // we have a parse error. return; } if((token = lexer.ExpectAnyToken()) == null) { this.MaterialFlag = MaterialFlags.Defaulted; return; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); // end of material definition if(tokenLower == "}") { break; } else if(tokenLower == "qer_editorimage") { token = lexer.ReadTokenOnLine(); _editorImageName = (token != null) ? token.ToString() : string.Empty; lexer.SkipRestOfLine(); } else if(tokenLower == "description") { token = lexer.ReadTokenOnLine(); _description = (token != null) ? token.ToString() : string.Empty; } // check for the surface / content bit flags. else if(CheckSurfaceParameter(token) == true) { } else if(tokenLower == "polygonoffset") { this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; if((token = lexer.ReadTokenOnLine()) == null) { _polygonOffset = 1; } else { _polygonOffset = token.ToFloat(); } } // noshadow. else if(tokenLower == "noshadows") { this.MaterialFlag = MaterialFlags.NoShadows; } else if(tokenLower == "suppressinsubview") { _suppressInSubview = true; } else if(tokenLower == "portalsky") { _portalSky = true; } else if(tokenLower == "noselfshadow") { this.MaterialFlag = Renderer.MaterialFlags.NoSelfShadow; } else if(tokenLower == "noportalfog") { this.MaterialFlag = Renderer.MaterialFlags.NoPortalFog; } // forceShadows allows nodraw surfaces to cast shadows. else if(tokenLower == "forceshadows") { this.MaterialFlag = Renderer.MaterialFlags.ForceShadows; } // overlay / decal suppression. else if(tokenLower == "nooverlays") { _allowOverlays = false; } // moster blood overlay forcing for alpha tested or translucent surfaces. else if(tokenLower == "forceoverlays") { _parsingData.ForceOverlays = true; } else if(tokenLower == "translucent") { _coverage = MaterialCoverage.Translucent; } // global zero clamp. else if(tokenLower == "zeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // global clamp. else if(tokenLower == "clamp") { textureRepeatDefault = TextureRepeat.Clamp; } // global clamp. else if(tokenLower == "alphazeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // forceOpaque is used for skies-behind-windows. else if(tokenLower == "forceopaque") { _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "twosided") { _cullType = CullType.TwoSided; // twoSided implies no-shadows, because the shadow // volume would be coplanar with the surface, giving depth fighting // we could make this no-self-shadows, but it may be more important // to receive shadows from no-self-shadow monsters. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "backsided") { _cullType = CullType.Back; // the shadow code doesn't handle this, so just disable shadows. // We could fix this in the future if there was a need. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "foglight") { _fogLight = true; } else if(tokenLower == "blendlight") { _blendLight = true; } else if(tokenLower == "ambientlight") { _ambientLight = true; } else if(tokenLower == "mirror") { _sort = (float) MaterialSort.Subview; _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "nofog") { _noFog = true; } else if(tokenLower == "unsmoothedtangents") { _unsmoothedTangents = true; } // lightFallofImage <imageprogram> // specifies the image to use for the third axis of projected // light volumes. else if(tokenLower == "lightfalloffimage") { _lightFalloffImage = idE.ImageManager.ImageFromFile(ParsePastImageProgram(lexer), TextureFilter.Default, false, TextureRepeat.Clamp, TextureDepth.Default); } // guisurf <guifile> | guisurf entity // an entity guisurf must have an idUserInterface // specified in the renderEntity. else if(tokenLower == "guisurf") { token = lexer.ReadTokenOnLine(); tokenLower = token.ToString().ToLower(); if(tokenLower == "entity") { _entityGui = 1; } else if(tokenLower == "entity2") { _entityGui = 2; } else if(tokenLower == "entity3") { _entityGui = 3; } else { _userInterface = idE.UIManager.FindInterface(token.ToString(), true); } } // sort. else if(tokenLower == "sort") { ParseSort(lexer); } // spectrum <integer>. else if(tokenLower == "spectrum") { int.TryParse(lexer.ReadTokenOnLine().ToString(), out _spectrum); } // deform < sprite | tube | flare >. else if(tokenLower == "deform") { ParseDeform(lexer); } // decalInfo <staySeconds> <fadeSeconds> (<start rgb>) (<end rgb>). else if(tokenLower == "decalinfo") { ParseDecalInfo(lexer); } // renderbump <args...>. else if(tokenLower == "renderbump") { _renderBump = lexer.ParseRestOfLine(); } // diffusemap for stage shortcut. else if(tokenLower == "diffusemap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend diffusemap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "diffusemap"); ParseStage(newLexer, textureRepeatDefault); } // specularmap for stage shortcut. else if(tokenLower == "specularmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend specularmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "specularmap"); ParseStage(newLexer, textureRepeatDefault); } // normalmap for stage shortcut. else if(tokenLower == "bumpmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend bumpmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "bumpmap"); ParseStage(newLexer, textureRepeatDefault); } // DECAL_MACRO for backwards compatibility with the preprocessor macros. else if(tokenLower == "decal_macro") { // polygonOffset this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; _polygonOffset = -1; // discrete _surfaceFlags |= SurfaceFlags.Discrete; _contentFlags &= ~ContentFlags.Solid; // sort decal. _sort = (float) MaterialSort.Decal; // noShadows this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenValue == "{") { // create the new stage. ParseStage(lexer, textureRepeatDefault); } else { idConsole.WriteLine("unknown general material parameter '{0}' in '{1}'", tokenValue, this.Name); return; } } // add _flat or _white stages if needed. AddImplicitStages(); // order the diffuse / bump / specular stages properly. SortInteractionStages(); // if we need to do anything with normals (lighting or environment mapping) // and two sided lighting was asked for, flag // shouldCreateBackSides() and change culling back to single sided, // so we get proper tangent vectors on both sides. // we can't just call ReceivesLighting(), because the stages are still // in temporary form. if(_cullType == CullType.TwoSided) { count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if((_parsingData.Stages[i].Lighting != StageLighting.Ambient) || (_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit)) { if(_cullType == CullType.TwoSided) { _cullType = CullType.Front; _shouldCreateBackSides = true; } break; } } } // currently a surface can only have one unique texgen for all the stages on old hardware. TextureCoordinateGeneration firstGen = TextureCoordinateGeneration.Explicit; count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if(_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit) { if(firstGen == TextureCoordinateGeneration.Explicit) { firstGen = _parsingData.Stages[i].Texture.TextureCoordinates; } else if(firstGen != _parsingData.Stages[i].Texture.TextureCoordinates) { idConsole.Warning("material '{0}' has multiple stages with a texgen", this.Name); break; } } } }
/// <summary> /// /// </summary> /// <remarks> /// Normally this will use a .reg file instead of a .map file if it exists, /// which is what the game and dmap want, but the editor will want to always /// load a .map file. /// </remarks> /// <param name="fileName">Does not require an extension.</param> /// <param name="ignoreRegion"></param> /// <param name="osPath"></param> /// <returns></returns> public bool Parse(string fileName, bool ignoreRegion = false, bool osPath = false) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } _hasPrimitiveData = false; _name = Path.Combine(Path.GetDirectoryName(fileName), Path.GetFileNameWithoutExtension(fileName)); string fullName = _name; // no string concatenation for epairs and allow path names for materials idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); idMapEntity mapEnt; if(ignoreRegion == false) { // try loading a .reg file first lexer.LoadFile(fullName + ".reg", osPath); } if(lexer.IsLoaded == false) { // now try a .map file lexer.LoadFile(fullName + ".map", osPath); if(lexer.IsLoaded == false) { // didn't get anything at all return false; } } _version = idMapFile.OldMapVersion; _fileTime = lexer.FileTime; _entities.Clear(); if(lexer.CheckTokenString("Version") == true) { _version = lexer.ReadTokenOnLine().ToFloat(); } while(true) { if((mapEnt = idMapEntity.Parse(lexer, (_entities.Count == 0), _version)) == null) { break; } _entities.Add(mapEnt); } idConsole.Warning("TODO: SetGeometryCRC();"); // if the map has a worldspawn if(_entities.Count > 0) { // "removeEntities" "classname" can be set in the worldspawn to remove all entities with the given classname foreach(KeyValuePair<string, string> removeEntities in _entities[0].Dict.MatchPrefix("removeEntities")) { RemoveEntities(removeEntities.Value); } // "overrideMaterial" "material" can be set in the worldspawn to reset all materials string material; int entityCount = _entities.Count; int primitiveCount = 0; int sideCount = 0; if((material = (_entities[0].Dict.GetString("overrideMaterial", ""))) != string.Empty) { for(int i = 0; i < entityCount; i++) { mapEnt = _entities[i]; primitiveCount = mapEnt.Primitives.Count; for(int j = 0; j < primitiveCount; j++) { idMapPrimitive mapPrimitive = mapEnt.GetPrimitive(j); switch(mapPrimitive.Type) { case MapPrimitiveType.Brush: idMapBrush mapBrush = (idMapBrush) mapPrimitive; sideCount = mapBrush.SideCount; for(int k = 0; k < sideCount; k++) { mapBrush.GetSide(k).Material = material; } break; case MapPrimitiveType.Patch: idConsole.Warning("TODO: PATCH"); // TODO: ((idMapPatch) mapPrimitive).Material = material; break; } } } } // force all entities to have a name key/value pair if(_entities[0].Dict.GetBool("forceEntityNames") == true) { for(int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if(mapEnt.Dict.ContainsKey("name") == false) { mapEnt.Dict.Set("name", string.Format("{0}{1}", mapEnt.Dict.GetString("classname", "forcedName"), i)); } } } // move the primitives of any func_group entities to the worldspawn if(_entities[0].Dict.GetBool("moveFuncGroups") == true) { for(int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if(mapEnt.Dict.GetString("classname").ToLower() == "func_group") { _entities[0].Primitives.AddRange(mapEnt.Primitives); mapEnt.Primitives.Clear(); } } } } _hasPrimitiveData = true; return true; }
/// <summary> /// Parses a vertex parameter. /// </summary> /// <remarks> /// If there is a single value, it will be repeated across all elements. /// If there are two values, 3 = 0.0, 4 = 1.0. /// if there are three values, 4 = 1.0. /// </remarks> /// <param name="lexer"></param> /// <param name="newStage"></param> private void ParseVertexParameter(idLexer lexer, ref NewMaterialStage newStage) { idToken token = lexer.ReadTokenOnLine(); int parm = token.ToInt32(); int tmp = 0; string tokenValue = token.ToString(); if((int.TryParse(tokenValue, out tmp) == false) || (parm < 0)) { idConsole.Warning("bad vertexParm number"); this.MaterialFlag = MaterialFlags.Defaulted; return; } newStage.VertexParameters[parm, 0] = ParseExpression(lexer); token = lexer.ReadTokenOnLine(); if((token == null) || (token.ToString() != ",")) { newStage.VertexParameters[parm, 1] = newStage.VertexParameters[parm, 2] = newStage.VertexParameters[parm, 3] = newStage.VertexParameters[parm, 0]; } else { newStage.VertexParameters[parm, 1] = ParseExpression(lexer); token = lexer.ReadTokenOnLine(); if((token == null) || (token.ToString() != ",")) { newStage.VertexParameters[parm, 2] = GetExpressionConstant(0); newStage.VertexParameters[parm, 3] = GetExpressionConstant(1); } else { newStage.VertexParameters[parm, 2] = ParseExpression(lexer); token = lexer.ReadTokenOnLine(); if((token == null) || (token.ToString() != ",")) { newStage.VertexParameters[parm, 3] = GetExpressionConstant(1); } else { newStage.VertexParameters[parm, 3] = ParseExpression(lexer); } } } }
private void ParseFragmentMap(idLexer lexer, ref NewMaterialStage newStage) { TextureFilter textureFilter = TextureFilter.Default; TextureRepeat textureRepeat = TextureRepeat.Repeat; TextureDepth textureDepth = TextureDepth.Default; CubeFiles cubeMap = CubeFiles.TwoD; bool allowPicmip = true; idToken token = lexer.ReadTokenOnLine(); int unit = token.ToInt32(); int tmp; if((int.TryParse(token.ToString(), out tmp) == false) || (unit < 0)) { idConsole.Warning("bad fragmentMap number"); this.MaterialFlag = MaterialFlags.Defaulted; return; } // unit 1 is the normal map.. make sure it gets flagged as the proper depth if(unit == 1) { textureDepth = TextureDepth.Bump; } string tokenValue; string tokenLower; while(true) { token = lexer.ReadTokenOnLine(); tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenLower == "cubemap") { cubeMap = CubeFiles.Native; } else if(tokenLower == "cameracubemap") { cubeMap = CubeFiles.Camera; } else if(tokenLower == "nearest") { textureFilter = TextureFilter.Nearest; } else if(tokenLower == "linear") { textureFilter = TextureFilter.Linear; } else if(tokenLower == "clamp") { textureRepeat = TextureRepeat.Clamp; } else if(tokenLower == "noclamp") { textureRepeat = TextureRepeat.Repeat; } else if(tokenLower == "zeroclamp") { textureRepeat = TextureRepeat.ClampToZero; } else if(tokenLower == "alphazeroclamp") { textureRepeat = TextureRepeat.ClampToZeroAlpha; } else if(tokenLower == "forcehighquality") { textureDepth = TextureDepth.HighQuality; } else if((tokenLower == "uncompressed") || (tokenLower == "highquality")) { if(idE.CvarSystem.GetInteger("image_ignoreHighQuality") == 0) { textureDepth = TextureDepth.HighQuality; } } else if(tokenLower == "nopicmip") { allowPicmip = false; } else { // assume anything else is the image name. lexer.UnreadToken = token; break; } } // TODO string image = ParsePastImageProgram(lexer); // TODO: fragment program images. remember we use a global // sampler state. it'll ignore these texturemin/max filters. idConsole.Warning("TODO: fragment program images"); /* newStage->fragmentProgramImages[unit] = globalImages->ImageFromFile( str, tf, allowPicmip, trp, td, cubeMap ); if ( !newStage->fragmentProgramImages[unit] ) { newStage->fragmentProgramImages[unit] = globalImages->defaultImage; }*/ }
private void ParseStage(idLexer lexer, TextureRepeat textureRepeatDefault) { TextureFilter textureFilter = TextureFilter.Default; TextureRepeat textureRepeat = textureRepeatDefault; TextureDepth textureDepth = TextureDepth.Default; CubeFiles cubeMap = CubeFiles.TwoD; bool allowPicmip = true; string imageName = string.Empty; NewMaterialStage newStage = new NewMaterialStage(); newStage.VertexParameters = new int[4, 4]; MaterialStage materialStage = new MaterialStage(); materialStage.ConditionRegister = GetExpressionConstant(1); materialStage.Color.Registers = new int[] { GetExpressionConstant(1), GetExpressionConstant(1), GetExpressionConstant(1), GetExpressionConstant(1) }; int[,] matrix = new int[2, 3]; idToken token; int a, b; string tokenValue; string tokenLower; while(true) { if(TestMaterialFlag(MaterialFlags.Defaulted) == true) { // we have a parse error return; } else if((token = lexer.ExpectAnyToken()) == null) { this.MaterialFlag = MaterialFlags.Defaulted; return; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); // the close brace for the entire material ends the draw block if(tokenLower == "}") { break; } // BSM Nerve: Added for stage naming in the material editor else if(tokenLower == "name") { lexer.SkipRestOfLine(); } // image options else if(tokenLower == "blend") { ParseBlend(lexer, ref materialStage); } else if(tokenLower == "map") { imageName = ParsePastImageProgram(lexer); } else if(tokenLower == "remoterendermap") { materialStage.Texture.Dynamic = DynamicImageType.RemoteRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); } else if(tokenLower == "mirrorrendermap") { materialStage.Texture.Dynamic = DynamicImageType.MirrorRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "xrayrendermap") { materialStage.Texture.Dynamic = DynamicImageType.XRayRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "screen") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "screen2") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "glasswarp") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.GlassWarp; } else if(tokenLower == "videomap") { // note that videomaps will always be in clamp mode, so texture // coordinates had better be in the 0 to 1 range if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'videoMap' keyword in material '{0}'", this.Name); } else { bool loop = false; if(token.ToString().Equals("loop", StringComparison.OrdinalIgnoreCase) == true) { loop = true; if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'videoMap' keyword in material '{0}'", this.Name); continue; } } idConsole.Warning("TODO: material videoMap keyword"); // TODO: cinematic /*ts->cinematic = idCinematic::Alloc(); ts->cinematic->InitFromFile( token.c_str(), loop );*/ } } else if(tokenLower == "soundmap") { if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'soundMap' keyword in material '{0}'", this.Name); } else { idConsole.Warning("TODO: material soundMap keyword"); // TODO /*ts->cinematic = new idSndWindow(); ts->cinematic->InitFromFile( token.c_str(), true );*/ } } else if(tokenLower == "cubemap") { imageName = ParsePastImageProgram(lexer); cubeMap = CubeFiles.Native; } else if(tokenLower == "cameracubemap") { imageName = ParsePastImageProgram(lexer); cubeMap = CubeFiles.Camera; } else if(tokenLower == "ignorealphatest") { materialStage.IgnoreAlphaTest = true; } else if(tokenLower == "nearest") { textureFilter = TextureFilter.Nearest; } else if(tokenLower == "linear") { textureFilter = TextureFilter.Linear; } else if(tokenLower == "clamp") { textureRepeat = TextureRepeat.Clamp; } else if(tokenLower == "noclamp") { textureRepeat = TextureRepeat.Repeat; } else if(tokenLower == "zeroclamp") { textureRepeat = TextureRepeat.ClampToZero; } else if(tokenLower == "alphazeroclamp") { textureRepeat = TextureRepeat.ClampToZeroAlpha; } else if((tokenLower == "uncompressed") || (tokenLower == "highquality")) { if(idE.CvarSystem.GetInteger("image_ignoreHighQuality") == 0) { textureDepth = TextureDepth.HighQuality; } } else if(tokenLower == "forcehighquality") { textureDepth = TextureDepth.HighQuality; } else if(tokenLower == "nopicmip") { allowPicmip = false; } else if(tokenLower == "vertexcolor") { materialStage.VertexColor = StageVertexColor.Modulate; } else if(tokenLower == "inversevertexcolor") { materialStage.VertexColor = StageVertexColor.InverseModulate; } // privatePolygonOffset else if(tokenLower == "privatepolygonoffset") { if((token = lexer.ReadTokenOnLine()) == null) { materialStage.PrivatePolygonOffset = 1; } else { // explict larger (or negative) offset lexer.UnreadToken = token; materialStage.PrivatePolygonOffset = lexer.ParseFloat(); } } // texture coordinate generation else if(tokenLower == "texgen") { token = lexer.ExpectAnyToken(); tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenLower == "normal") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.DiffuseCube; } else if(tokenLower == "reflect") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.ReflectCube; } else if(tokenLower == "skybox") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.SkyboxCube; } else if(tokenLower == "wobblesky") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.WobbleSkyCube; _texGenRegisters = new int[4]; _texGenRegisters[0] = ParseExpression(lexer); _texGenRegisters[1] = ParseExpression(lexer); _texGenRegisters[2] = ParseExpression(lexer); } else { idConsole.Warning("bad texGen '{0}' in material {1}", tokenValue, this.Name); this.MaterialFlag = MaterialFlags.Defaulted; } } else if((tokenLower == "scroll") || (tokenLower == "translate")) { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); matrix[0, 0] = GetExpressionConstant(1); matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = a; matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = GetExpressionConstant(1); matrix[1, 2] = b; MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "scale") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this just scales without a centering matrix[0, 0] = a; matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = GetExpressionConstant(0); matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = b; matrix[1, 2] = GetExpressionConstant(0); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "centerscale") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this subtracts 0.5, then scales, then adds 0.5 matrix[0, 0] = a; matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = EmitOp(GetExpressionConstant(0.5f), EmitOp(GetExpressionConstant(0.5f), a, ExpressionOperationType.Multiply), ExpressionOperationType.Subtract); matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = b; matrix[1, 2] = EmitOp(GetExpressionConstant(0.5f), EmitOp(GetExpressionConstant(0.5f), b, ExpressionOperationType.Multiply), ExpressionOperationType.Subtract); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "shear") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this subtracts 0.5, then shears, then adds 0.5 matrix[0, 0] = GetExpressionConstant(1); matrix[0, 1] = a; matrix[0, 2] = EmitOp(GetExpressionConstant(-0.5f), a, ExpressionOperationType.Multiply); matrix[1, 0] = b; matrix[1, 1] = GetExpressionConstant(1); matrix[1, 2] = EmitOp(GetExpressionConstant(-0.5f), b, ExpressionOperationType.Multiply); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "rotate") { int sinReg, cosReg; // in cycles a = ParseExpression(lexer); idDeclTable table = idE.DeclManager.FindType<idDeclTable>(DeclType.Table, "sinTable", false); if(table == null) { idConsole.Warning("no sinTable for rotate defined"); this.MaterialFlag = MaterialFlags.Defaulted; return; } sinReg = EmitOp(table.Index, a, ExpressionOperationType.Table); table = idE.DeclManager.FindType<idDeclTable>(DeclType.Table, "cosTable", false); if(table == null) { idConsole.Warning("no cosTable for rotate defined"); this.MaterialFlag = MaterialFlags.Defaulted; return; } cosReg = EmitOp(table.Index, a, ExpressionOperationType.Table); // this subtracts 0.5, then rotates, then adds 0.5 matrix[0, 0] = cosReg; matrix[0, 1] = EmitOp(GetExpressionConstant(0), sinReg, ExpressionOperationType.Subtract); matrix[0, 2] = EmitOp(EmitOp(EmitOp(GetExpressionConstant(-0.5f), cosReg, ExpressionOperationType.Multiply), EmitOp(GetExpressionConstant(0.5f), sinReg, ExpressionOperationType.Multiply), ExpressionOperationType.Add), GetExpressionConstant(0.5f), ExpressionOperationType.Add); matrix[1, 0] = sinReg; matrix[1, 1] = cosReg; matrix[1, 2] = EmitOp(EmitOp(EmitOp(GetExpressionConstant(-0.5f), sinReg, ExpressionOperationType.Multiply), EmitOp(GetExpressionConstant(-0.5f), cosReg, ExpressionOperationType.Multiply), ExpressionOperationType.Add), GetExpressionConstant(0.5f), ExpressionOperationType.Add); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } // color mask options else if(tokenLower == "maskred") { materialStage.DrawStateBits |= MaterialStates.RedMask; } else if(tokenLower == "maskgreen") { materialStage.DrawStateBits |= MaterialStates.GreenMask; } else if(tokenLower == "maskblue") { materialStage.DrawStateBits |= MaterialStates.BlueMask; } else if(tokenLower == "maskalpha") { materialStage.DrawStateBits |= MaterialStates.AlphaMask; } else if(tokenLower == "maskcolor") { materialStage.DrawStateBits |= MaterialStates.ColorMask; } else if(tokenLower == "maskdepth") { materialStage.DrawStateBits |= MaterialStates.DepthMask; } else if(tokenLower == "alphatest") { materialStage.HasAlphaTest = true; materialStage.AlphaTestRegister = ParseExpression(lexer); _coverage = MaterialCoverage.Perforated; } // shorthand for 2D modulated else if(tokenLower == "colored") { materialStage.Color.Registers[0] = (int) ExpressionRegister.Parm0; materialStage.Color.Registers[1] = (int) ExpressionRegister.Parm1; materialStage.Color.Registers[2] = (int) ExpressionRegister.Parm2; materialStage.Color.Registers[3] = (int) ExpressionRegister.Parm3; _parsingData.RegistersAreConstant = false; } else if(tokenLower == "color") { materialStage.Color.Registers[0] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[1] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[2] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "red") { materialStage.Color.Registers[0] = ParseExpression(lexer); } else if(tokenLower == "green") { materialStage.Color.Registers[1] = ParseExpression(lexer); } else if(tokenLower == "blue") { materialStage.Color.Registers[2] = ParseExpression(lexer); } else if(tokenLower == "alpha") { materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "rgb") { materialStage.Color.Registers[0] = materialStage.Color.Registers[1] = materialStage.Color.Registers[2] = ParseExpression(lexer); } else if(tokenLower == "rgba") { materialStage.Color.Registers[0] = materialStage.Color.Registers[1] = materialStage.Color.Registers[2] = materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "if") { materialStage.ConditionRegister = ParseExpression(lexer); } else if(tokenLower == "program") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material program keyword"); // TODO /*newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, token.c_str() ); newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, token.c_str() );*/ } } else if(tokenLower == "fragmentprogram") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material fragmentProgram keyword"); // TODO //newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, token.c_str() ); } } else if(tokenLower == "vertexprogram") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material vertexProgram keyword"); // TODO //newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, token.c_str() ); } } else if(tokenLower == "megatexture") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material megaTexture keyword"); // TODO /*newStage.megaTexture = new idMegaTexture; if ( !newStage.megaTexture->InitFromMegaFile( token.c_str() ) ) { delete newStage.megaTexture; SetMaterialFlag( MF_DEFAULTED ); continue; } newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, "megaTexture.vfp" ); newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, "megaTexture.vfp" );*/ } } else if(tokenLower == "vertexparm") { ParseVertexParameter(lexer, ref newStage); } else if(tokenLower == "fragmentmap") { ParseFragmentMap(lexer, ref newStage); } else { idConsole.Warning("unknown token '{0}' in material '{1}'", tokenValue, this.Name); this.MaterialFlag = MaterialFlags.Defaulted; return; } } // if we are using newStage, allocate a copy of it if((newStage.FragmentProgram != 0) || (newStage.VertexProgram != 0)) { materialStage.NewStage = newStage; } // select a compressed depth based on what the stage is if(textureDepth == TextureDepth.Default) { switch(materialStage.Lighting) { case StageLighting.Bump: textureDepth = TextureDepth.Bump; break; case StageLighting.Diffuse: textureDepth = TextureDepth.Diffuse; break; case StageLighting.Specular: textureDepth = TextureDepth.Specular; break; } } // now load the image with all the parms we parsed if((imageName != null) && (imageName != string.Empty)) { materialStage.Texture.Image = idE.ImageManager.ImageFromFile(imageName, textureFilter, allowPicmip, textureRepeat, textureDepth, cubeMap); if(materialStage.Texture.Image == null) { materialStage.Texture.Image = idE.ImageManager.DefaultImage; } } else if(/*TODO: !ts->cinematic &&*/ (materialStage.Texture.Dynamic == 0) && (materialStage.NewStage.IsEmpty == true)) { idConsole.Warning("material '{0}' had stage with no image", this.Name); materialStage.Texture.Image = idE.ImageManager.DefaultImage; } // successfully parsed a stage. _parsingData.Stages.Add(materialStage); }
private void ParseSort(idLexer lexer) { idToken token = lexer.ReadTokenOnLine(); if(token == null) { lexer.Warning("missing sort parameter"); this.MaterialFlag = MaterialFlags.Defaulted; return; } try { _sort = (int) Enum.Parse(typeof(MaterialSort), token.ToString(), true); } catch(Exception) { float.TryParse(token.ToString(), out _sort); } }
private bool ParseMaterial(idLexer lexer) { _parameters.MinDistance = 1; _parameters.MaxDistance = 10; _parameters.Volume = 1; _speakerMask = 0; _altSound = null; idToken token; string tokenValue; int sampleCount = 0; while(true) { if((token = lexer.ExpectAnyToken()) == null) { return false; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } // minimum number of sounds else if(tokenValue == "minsamples") { sampleCount = lexer.ParseInt(); } else if(tokenValue == "description") { _description = lexer.ReadTokenOnLine().ToString(); } else if(tokenValue == "mindistance") { _parameters.MinDistance = lexer.ParseFloat(); } else if(tokenValue == "maxdistance") { _parameters.MaxDistance = lexer.ParseFloat(); } else if(tokenValue == "shakes") { token = lexer.ExpectAnyToken(); if(token.Type == TokenType.Number) { _parameters.Shakes = token.ToFloat(); } else { lexer.UnreadToken = token; _parameters.Shakes = 1.0f; } } else if(tokenValue == "reverb") { float reg0 = lexer.ParseFloat(); if(lexer.ExpectTokenString(",") == false) { return false; } float reg1 = lexer.ParseFloat(); // no longer supported } else if(tokenValue == "volume") { _parameters.Volume = lexer.ParseFloat(); } // leadinVolume is used to allow light breaking leadin sounds to be much louder than the broken loop else if(tokenValue == "leadinvolume") { _leadInVolume = lexer.ParseFloat(); } else if(tokenValue == "mask_center") { _speakerMask |= 1 << (int) Speakers.Center; } else if(tokenValue == "mask_left") { _speakerMask |= 1 << (int) Speakers.Left; } else if(tokenValue == "mask_right") { _speakerMask |= 1 << (int) Speakers.Right; } else if(tokenValue == "mask_backright") { _speakerMask |= 1 << (int) Speakers.BackRight; } else if(tokenValue == "mask_backleft") { _speakerMask |= 1 << (int) Speakers.BackLeft; } else if(tokenValue == "mask_lfe") { _speakerMask |= 1 << (int) Speakers.Lfe; } else if(tokenValue == "soundclass") { _parameters.SoundClass = lexer.ParseInt(); if(_parameters.SoundClass < 0) { lexer.Warning("SoundClass out of range"); return false; } } else if(tokenValue == "altsound") { if((token = lexer.ExpectAnyToken()) == null) { return false; } _altSound = idE.DeclManager.FindSound(token.ToString()); } else if(tokenValue == "ordered") { // no longer supported } else if(tokenValue == "no_dups") { _parameters.Flags |= SoundMaterialFlags.NoDuplicates; } else if(tokenValue == "no_flicker") { _parameters.Flags |= SoundMaterialFlags.NoFlicker; } else if(tokenValue == "plain") { // no longer supported } else if(tokenValue == "looping") { _parameters.Flags |= SoundMaterialFlags.Looping; } else if(tokenValue == "no_occlusion") { _parameters.Flags |= SoundMaterialFlags.NoOcclusion; } else if(tokenValue == "private") { _parameters.Flags |= SoundMaterialFlags.PrivateSound; } else if(tokenValue == "antiprivate") { _parameters.Flags |= SoundMaterialFlags.AntiPrivateSound; } else if(tokenValue == "playonce") { _parameters.Flags |= SoundMaterialFlags.PlayOnce; } else if(tokenValue == "global") { _parameters.Flags |= SoundMaterialFlags.Global; } else if(tokenValue == "unclamped") { _parameters.Flags |= SoundMaterialFlags.Unclamped; } else if(tokenValue == "omnidirectional") { _parameters.Flags |= SoundMaterialFlags.OmniDirectional; } // onDemand can't be a parms, because we must track all references and overrides would confuse it else if(tokenValue == "ondemand") { // no longer loading sounds on demand // _onDemand = true; } // the wave files else if(tokenValue == "leadin") { // add to the leadin list if((token = lexer.ReadToken()) == null) { lexer.Warning("Expected sound after leadin"); return false; } idConsole.Warning("TODO: leadin"); /*if(soundSystemLocal.soundCache && numLeadins < maxSamples) { leadins[numLeadins] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numLeadins++; }*/ } else if((tokenValue.EndsWith(".wav") == true) || (tokenValue.EndsWith(".ogg") == true)) { idConsole.Warning("TODO: .wav|.ogg"); /*// add to the wav list if(soundSystemLocal.soundCache && numEntries < maxSamples) { token.BackSlashesToSlashes(); idStr lang = cvarSystem->GetCVarString("sys_lang"); if(lang.Icmp("english") != 0 && token.Find("sound/vo/", false) >= 0) { idStr work = token; work.ToLower(); work.StripLeading("sound/vo/"); work = va("sound/vo/%s/%s", lang.c_str(), work.c_str()); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } else { // also try to find it with the .ogg extension work.SetFileExtension(".ogg"); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } } } entries[numEntries] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numEntries++; }*/ } else { lexer.Warning("unknown token '{0}'", token.ToString()); return false; } } if(_parameters.Shakes > 0.0f) { idConsole.Warning("TODO: CheckShakesAndOgg()"); } return true; }
private bool ParseAnimation(idLexer lexer, int defaultAnimCount) { List<idMD5Anim> md5anims = new List<idMD5Anim>(); idMD5Anim md5anim; idAnim anim; AnimationFlags flags = new AnimationFlags(); idToken token; idToken realName = lexer.ReadToken(); if(realName == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } string alias = realName.ToString(); int i; int count = _anims.Count; for(i = 0; i < count; i++) { if(_anims[i].FullName.Equals(alias, StringComparison.OrdinalIgnoreCase) == true) { break; } } if((i < count) && (i >= defaultAnimCount)) { lexer.Warning("Duplicate anim '{0}'", realName); MakeDefault(); return false; } if(i < defaultAnimCount) { anim = _anims[i]; } else { // create the alias associated with this animation anim = new idAnim(); _anims.Add(anim); } // random anims end with a number. find the numeric suffix of the animation. int len = alias.Length; for(i = len - 1; i > 0; i--) { if(Char.IsNumber(alias[i]) == false) { break; } } // check for zero length name, or a purely numeric name if(i <= 0) { lexer.Warning("Invalid animation name '{0}'", alias); MakeDefault(); return false; } // remove the numeric suffix alias = alias.Substring(0, i + 1); // parse the anims from the string do { if((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } // lookup the animation md5anim = idR.AnimManager.GetAnimation(token.ToString()); if(md5anim == null) { lexer.Warning("Couldn't load anim '{0}'", token); return false; } md5anim.CheckModelHierarchy(_model); if(md5anims.Count > 0) { // make sure it's the same length as the other anims if(md5anim.Length != md5anims[0].Length) { lexer.Warning("Anim '{0}' does not match length of anim '{1}'", md5anim.Name, md5anims[0].Name); MakeDefault(); return false; } } // add it to our list md5anims.Add(md5anim); } while(lexer.CheckTokenString(",") == true); if(md5anims.Count == 0) { lexer.Warning("No animation specified"); MakeDefault(); return false; } anim.SetAnimation(this, realName.ToString(), alias, md5anims.ToArray()); // parse any frame commands or animflags if(lexer.CheckTokenString("{") == true) { while(true) { if((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } string tokenValue = token.ToString(); if(tokenValue == "}") { break; } else if(tokenValue == "prevent_idle_override") { flags.PreventIdleOverride = true; } else if(tokenValue == "random_cycle_start") { flags.RandomCycleStart = true; } else if(tokenValue == "ai_no_turn") { flags.AINoTurn = true; } else if(tokenValue == "anim_turn") { flags.AnimationTurn = true; } else if(tokenValue == "frame") { // create a frame command int frameIndex; string err; // make sure we don't have any line breaks while reading the frame command so the error line # will be correct if((token = lexer.ReadTokenOnLine()) == null) { lexer.Warning("Missing frame # after 'frame'"); MakeDefault(); return false; } else if((token.Type == TokenType.Punctuation) && (token.ToString() == "-")) { lexer.Warning("Invalid frame # after 'frame'"); MakeDefault(); return false; } else if((token.Type != TokenType.Number) || (token.SubType == TokenSubType.Float)) { lexer.Error("expected integer value, found '{0}'", token); } // get the frame number frameIndex = token.ToInt32(); // put the command on the specified frame of the animation if((err = anim.AddFrameCommand(this, frameIndex, lexer, null)) != null) { lexer.Warning(err.ToString()); MakeDefault(); return false; } } else { lexer.Warning("Unknown command '{0}'", token); MakeDefault(); return false; } } } // set the flags anim.Flags = flags; return true; }
public string AddFrameCommand(idDeclModel modelDef, int frameIndex, idLexer lexer, idDict def) { // make sure we're within bounds if((frameIndex < 1) || (frameIndex > _anims[0].FrameCount)) { return string.Format("Frame {0} out of range", frameIndex); } // frame numbers are 1 based in .def files, but 0 based internally frameIndex--; idToken token; AnimationFrameCommand frameCommand = new AnimationFrameCommand(); if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } string tokenValue = token.ToString(); if(tokenValue == "call") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.ScriptFunction; idConsole.Warning("TODO: fc.function = gameLocal.program.FindFunction( token );"); if(frameCommand.Function == null) { return string.Format("Function '{0}' not found", tokenValue); } } else if(tokenValue == "object_call") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.ScriptFunctionObject; frameCommand.String = tokenValue; } else if(tokenValue == "event") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.EventFunction; idConsole.Warning("TODO: idAnim Event"); /*const idEventDef *ev = idEventDef::FindEvent( token ); if ( !ev ) { return va( "Event '%s' not found", token.c_str() ); } if ( ev->GetNumArgs() != 0 ) { return va( "Event '%s' has arguments", token.c_str() ); }*/ frameCommand.String = tokenValue; } else if((tokenValue == "sound") || (tokenValue == "sound_voice") || (tokenValue == "sound_voice2") || (tokenValue == "sound_body") || (tokenValue == "sound_body2") || (tokenValue == "sound_body3") || (tokenValue == "sound_weapon") || (tokenValue == "sound_global") || (tokenValue == "sound_item") || (tokenValue == "sound_chatter")) { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } switch(tokenValue) { case "sound": frameCommand.Type = AnimationFrameCommandType.Sound; break; case "sound_voice": frameCommand.Type = AnimationFrameCommandType.SoundVoice; break; case "sound_voice2": frameCommand.Type = AnimationFrameCommandType.SoundVoice2; break; case "sound_body": frameCommand.Type = AnimationFrameCommandType.SoundBody; break; case "sound_body2": frameCommand.Type = AnimationFrameCommandType.SoundBody2; break; case "sound_body3": frameCommand.Type = AnimationFrameCommandType.SoundBody3; break; case "sound_weapon": frameCommand.Type = AnimationFrameCommandType.SoundWeapon; break; case "sound_global": frameCommand.Type = AnimationFrameCommandType.SoundGlobal; break; case "sound_item": frameCommand.Type = AnimationFrameCommandType.SoundItem; break; case "sound_chatter": frameCommand.Type = AnimationFrameCommandType.SoundChatter; break; } tokenValue = token.ToString(); if(tokenValue.StartsWith("snd_") == true) { frameCommand.String = tokenValue; } else { frameCommand.SoundMaterial = idE.DeclManager.FindSound(tokenValue); if(frameCommand.SoundMaterial.State == DeclState.Defaulted) { idConsole.Warning("Sound '{0}' not found", tokenValue); } } } else if(tokenValue == "skin") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.Skin; if(tokenValue == "none") { frameCommand.Skin = null; } else { frameCommand.Skin = idE.DeclManager.FindSkin(tokenValue); if(frameCommand.Skin == null) { return string.Format("Skin '{0}' not found", tokenValue); } } } else if(tokenValue == "fx") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.Fx; if(idE.DeclManager.FindType(DeclType.Fx, tokenValue) == null) { return string.Format("fx '{0}' not found", tokenValue); } frameCommand.String = tokenValue; } else if(tokenValue == "trigger") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.Trigger; frameCommand.String = tokenValue; } else if(tokenValue == "triggerSmokeParticle") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); frameCommand.Type = AnimationFrameCommandType.TriggerSmokeParticle; frameCommand.String = tokenValue; } else if((tokenValue == "melee") || (tokenValue == "direct_damage") || (tokenValue == "attack_begin")) { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } switch(tokenValue) { case "melee": frameCommand.Type = AnimationFrameCommandType.Melee; break; case "direct_damage": frameCommand.Type = AnimationFrameCommandType.DirectDamage; break; case "attack_begin": frameCommand.Type = AnimationFrameCommandType.BeginAttack; break; } tokenValue = token.ToString(); if(idR.Game.FindEntityDef(tokenValue, false) == null) { return string.Format("Unknown entityDef '{0}'", tokenValue); } frameCommand.String = tokenValue; } else if(tokenValue == "attack_end") { frameCommand.Type = AnimationFrameCommandType.EndAttack; } else if(tokenValue == "muzzle_flash") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } tokenValue = token.ToString(); if((tokenValue != string.Empty) && (modelDef.FindJoint(tokenValue) == null)) { return string.Format("Joint '{0}' not found", tokenValue); } frameCommand.Type = AnimationFrameCommandType.MuzzleFlash; frameCommand.String = tokenValue; } else if((tokenValue == "create_missile") || (tokenValue == "launch_missile")) { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } switch(tokenValue) { case "create_missile": frameCommand.Type = AnimationFrameCommandType.CreateMissile; break; case "launch_missile": frameCommand.Type = AnimationFrameCommandType.LaunchMissile; break; } tokenValue = token.ToString(); frameCommand.String = tokenValue; if(modelDef.FindJoint(tokenValue) == null) { return string.Format("Joint '{0}' not found", tokenValue); } } else if(tokenValue == "fire_missile_at_target") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } JointInfo jointInfo = modelDef.FindJoint(token.ToString()); if(jointInfo == null) { return string.Format("Joint '{0}' not found", token.ToString()); } if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of line"; } frameCommand.Type = AnimationFrameCommandType.FireMissileAtTarget; frameCommand.String = token.ToString(); frameCommand.Index = jointInfo.Index; } else if(tokenValue == "footstep") { frameCommand.Type = AnimationFrameCommandType.Footstep; } else if(tokenValue == "leftfoot") { frameCommand.Type = AnimationFrameCommandType.LeftFoot; } else if(tokenValue == "rightfoot") { frameCommand.Type = AnimationFrameCommandType.RightFoot; } else if(tokenValue == "enableEyeFocus") { frameCommand.Type = AnimationFrameCommandType.EnableEyeFocus; } else if(tokenValue == "disableEyeFocus") { frameCommand.Type = AnimationFrameCommandType.DisableEyeFocus; } else if(tokenValue == "disableGravity") { frameCommand.Type = AnimationFrameCommandType.DisableGravity; } else if(tokenValue == "enableGravity") { frameCommand.Type = AnimationFrameCommandType.EnableGravity; } else if(tokenValue == "jump") { frameCommand.Type = AnimationFrameCommandType.Jump; } else if(tokenValue == "enableClip") { frameCommand.Type = AnimationFrameCommandType.EnableClip; } else if(tokenValue == "disableClip") { frameCommand.Type = AnimationFrameCommandType.DisableClip; } else if(tokenValue == "enableWalkIK") { frameCommand.Type = AnimationFrameCommandType.EnableWalkIk; } else if(tokenValue == "disableWalkIK") { frameCommand.Type = AnimationFrameCommandType.DisableWalkIk; } else if(tokenValue == "enableLegIK") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of file"; } frameCommand.Type = AnimationFrameCommandType.EnableLegIk; frameCommand.Index = int.Parse(token.ToString()); } else if(tokenValue == "disableLegIK") { if((token = lexer.ReadTokenOnLine()) == null) { return "Unexpected end of file"; } frameCommand.Type = AnimationFrameCommandType.DisableLegIk; frameCommand.Index = int.Parse(token.ToString()); } else if(tokenValue == "recordDemo") { frameCommand.Type = AnimationFrameCommandType.RecordDemo; if((token = lexer.ReadTokenOnLine()) != null) { frameCommand.String = token.ToString(); } } else if(tokenValue == "aviGame") { frameCommand.Type = AnimationFrameCommandType.AviGame; if((token = lexer.ReadTokenOnLine()) != null) { frameCommand.String = token.ToString(); } } else { return string.Format("Unknown command '{0}'", tokenValue); } // check if we've initialized the frame lookup table if(_frameLookups.Count == 0) { // we haven't, so allocate the table and initialize it for(int i = 0; i < _anims[0].FrameCount; i++) { _frameLookups.Add(new AnimationFrameLookup()); } } // calculate the index of the new command int index = _frameLookups[frameIndex].FirstCommand + _frameLookups[frameIndex].Index; int count = _frameLookups.Count; _frameCommands.Insert(index, frameCommand); // fix the indices of any later frames to account for the inserted command for(int i = frameIndex + 1; i < count; i++) { _frameLookups[i].FirstCommand++; } // increase the number of commands on this frame _frameLookups[frameIndex].Index++; // return with no error return null; }