public Texture2D ParseImageProgram(string source, ref DateTime timeStamp, ref TextureDepth depth) { _lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); _lexer.LoadMemory(source, source); return ParseImageProgram(ref timeStamp, ref depth, false); }
private void ParseJoint(idLexer lexer, idMD5Joint joint, ref idJointQuaternion defaultPose) { // // parse name // joint.Name = lexer.ReadToken().ToString(); // // parse parent // int parentIndex = lexer.ParseInt(); if(parentIndex >= 0) { if(parentIndex >= (_joints.Length - 1)) { lexer.Error("Invalid parent for joint '{0}'", joint.Name); } joint.Parent = _joints[parentIndex]; } // // parse default pose // float[] tmp = lexer.Parse1DMatrix(3); defaultPose.Translation = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); defaultPose.Quaternion = new Quaternion(tmp[0], tmp[1], tmp[2], 0); defaultPose.Quaternion.W = idHelper.CalculateW(defaultPose.Quaternion); }
public Texture2D ParseImageProgram(idLexer lexer) { _lexer = lexer; DateTime timeStamp = DateTime.Now;; TextureDepth depth = TextureDepth.Default; return ParseImageProgram(ref timeStamp, ref depth, true); }
private void MatchAndAppendToken(idLexer lexer, string match) { if(_lexer.ExpectTokenString(match) == false) { return; } // a matched token won't need a leading space _builder.Append(match); }
public virtual bool HandleSingleGuiCommand(idEntity entityGui, idLexer lexer) { return false; }
public static idMapBrush ParseQ3(idLexer lexer, Vector3 origin) { int rotate; int[] shift = new int[2]; float[] scale = new float[2]; Vector3[] planePoints = new Vector3[3]; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idMapBrushSide side; idToken token; do { if(lexer.CheckTokenString("}") == true) { break; } side = new idMapBrushSide(); sides.Add(side); // read the three point plane definition float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); float[] tmp3 = lexer.Parse1DMatrix(3); if((tmp == null) || (tmp2 == null) || (tmp3 == null)) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); // read the material token = lexer.ReadTokenOnLine(); if(token == null) { lexer.Error("idMapBrush::ParseQ3: unable to read brush side material"); return null; } // we have an implicit 'textures/' in the old format side.Material = "textures/" + token.ToString(); // read the texture shift, rotate and scale shift[0] = lexer.ParseInt(); shift[1] = lexer.ParseInt(); rotate = lexer.ParseInt(); scale[0] = lexer.ParseFloat(); scale[1] = lexer.ParseFloat(); side.TextureMatrix[0] = new Vector3(0.03125f, 0.0f, 0.0f); side.TextureMatrix[1] = new Vector3(0.0f, 0.03125f, 0.0f); side.Origin = origin; // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); idMapBrush brush = new idMapBrush(); for(int i = 0; i < sides.Count; i++) { brush.AddSide(sides[i]); } brush.Dict = new idDict(); return brush; }
public static idMapEntity Parse(idLexer lexer, bool isWordSpawn = false, float version = idMapFile.CurrentMapVersion) { idToken token; if((token = lexer.ReadToken()) == null) { return null; } if(token.ToString() != "{") { lexer.Error("idMapEntity.Parse: {{ not found, found {0}", token.ToString()); return null; } idMapEntity mapEnt = new idMapEntity(); idMapBrush mapBrush = null; idMapPatch mapPatch = null; Vector3 origin = Vector3.Zero; bool worldEnt = false; string tokenValue; do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: EOF without closing brace"); return null; } if(token.ToString() == "}") { break; } if(token.ToString() == "{") { // parse a brush or patch if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: unexpected EOF"); return null; } if(worldEnt == true) { origin = Vector3.Zero; } tokenValue = token.ToString(); // if is it a brush: brush, brushDef, brushDef2, brushDef3 if(tokenValue.StartsWith("brush", StringComparison.OrdinalIgnoreCase) == true) { mapBrush = idMapBrush.Parse(lexer, origin, (tokenValue.Equals("brushDef2", StringComparison.OrdinalIgnoreCase) || tokenValue.Equals("brushDef3", StringComparison.OrdinalIgnoreCase)), version); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } // if is it a patch: patchDef2, patchDef3 else if(tokenValue.StartsWith("patch", StringComparison.OrdinalIgnoreCase) == true) { mapPatch = idMapPatch.Parse(lexer, origin, tokenValue.Equals("patchDef3", StringComparison.OrdinalIgnoreCase), version); if(mapPatch == null) { return null; } mapEnt.AddPrimitive(mapPatch); } // assume it's a brush in Q3 or older style else { lexer.UnreadToken = token; mapBrush = idMapBrush.ParseQ3(lexer, origin); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } } else { // parse a key / value pair string key = token.ToString(); token = lexer.ReadTokenOnLine(); string value = token.ToString(); // strip trailing spaces that sometimes get accidentally added in the editor value = value.Trim(); key = key.Trim(); mapEnt.Dict.Set(key, value); if(key.Equals("origin", StringComparison.OrdinalIgnoreCase) == true) { // scanf into doubles, then assign, so it is idVec size independent string[] parts = value.Split(' '); float.TryParse(parts[0], out origin.X); float.TryParse(parts[1], out origin.Y); float.TryParse(parts[2], out origin.Z); } else if((key.Equals("classname", StringComparison.OrdinalIgnoreCase) == true) && (value.Equals("worldspawn", StringComparison.OrdinalIgnoreCase) == true)) { worldEnt = true; } } } while(true); return mapEnt; }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if(lexer.ExpectTokenString("{") == false) { return null; } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if(token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return null; } // Parse it float[] info; if(patchDef3 == true) { info = lexer.Parse1DMatrix(7); if(info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return null; } } else { info = lexer.Parse1DMatrix(5); if(info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return null; } } idMapPatch patch = new idMapPatch((int) info[0], (int) info[1]); if(version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if(patchDef3 == true) { patch.HorizontalSubdivisions = (int) info[2]; patch.VerticalSubdivisions = (int) info[3]; patch.ExplicitlySubdivided = true; } if((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return null; } // these were written out in the wrong order, IMHO if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return null; } for(int j = 0; j < patch.Width; j++) { if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return null; } for(int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if(v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return null; } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return null; } } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure" ); return null; } // read any key/value pairs while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if(token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return patch; }
/// <summary> /// Load the given source. /// </summary> /// <returns></returns> public bool LoadMemory(string content, string name) { if(_loaded == true) { idConsole.FatalError("idScriptParser::LoadMemory: another source already loaded"); return false; } idLexer script = new idLexer(_options); script.Punctuation = _punctuation; script.LoadMemory(content, name); if(script.IsLoaded == false) { return false; } _fileName = name; _scriptStack.Clear(); _indentStack.Clear(); _tokens.Clear(); _skip = 0; _loaded = true; _scriptStack.Push(script); if(_defineDict == null) { _defines.Clear(); _defineDict = new Dictionary<string, ScriptDefinition>(StringComparer.OrdinalIgnoreCase); AddGlobalDefinesToSource(); } return true; }
private void ParseVertices(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int vertexCount = lexer.ParseInt(); model.Vertices = new CollisionModelVertex[vertexCount]; for(int i = 0; i < vertexCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); model.Vertices[i].Point = new Vector3(tmp[0], tmp[1], tmp[2]); model.Vertices[i].Side = 0; model.Vertices[i].SideSet = 0; model.Vertices[i].CheckCount = 0; } lexer.ExpectTokenString("}"); }
/// <summary> /// Adds implicit stages to the material. /// </summary> /// <remarks> /// If a material has diffuse or specular stages without any /// bump stage, add an implicit _flat bumpmap stage. /// <p/> /// It is valid to have either a diffuse or specular without the other. /// <p/> /// It is valid to have a reflection map and a bump map for bumpy reflection. /// </remarks> /// <param name="textureRepeatDefault"></param> private void AddImplicitStages(TextureRepeat textureRepeatDefault = TextureRepeat.Repeat) { bool hasDiffuse = false; bool hasSpecular = false; bool hasBump = false; bool hasReflection = false; int count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { switch(_parsingData.Stages[i].Lighting) { case StageLighting.Bump: hasBump = true; break; case StageLighting.Diffuse: hasDiffuse = true; break; case StageLighting.Specular: hasSpecular = true; break; } if(_parsingData.Stages[i].Texture.TextureCoordinates == TextureCoordinateGeneration.ReflectCube) { hasReflection = true; } } // if it doesn't have an interaction at all, don't add anything if((hasBump == false) && (hasDiffuse == false) && (hasSpecular == false)) { return; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); if(hasBump == false) { string bump = "blend bumpmap\nmap _flat\n}\n"; lexer.LoadMemory(bump, "bumpmap"); ParseStage(lexer, textureRepeatDefault); } if((hasDiffuse == false) && (hasSpecular == false) && (hasReflection == false)) { string bump = "blend bumpmap\nmap _white\n}\n"; lexer.LoadMemory(bump, "diffusemap"); ParseStage(lexer, textureRepeatDefault); } }
/// <summary> /// Parses the material, if there are any errors during parsing the defaultShader will be set. /// </summary> /// <param name="lexer"></param> private void ParseMaterial(idLexer lexer) { _registerCount = PredefinedRegisterCount; // leave space for the parms to be copied in. for(int i = 0; i < _registerCount; i++) { _parsingData.RegisterIsTemporary[i] = true; // they aren't constants that can be folded. } TextureRepeat textureRepeatDefault = TextureRepeat.Repeat; // allow a global setting for repeat. idToken token = null; string tokenValue; string tokenLower; int count; while(true) { if(TestMaterialFlag(Renderer.MaterialFlags.Defaulted) == true) { // we have a parse error. return; } if((token = lexer.ExpectAnyToken()) == null) { this.MaterialFlag = MaterialFlags.Defaulted; return; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); // end of material definition if(tokenLower == "}") { break; } else if(tokenLower == "qer_editorimage") { token = lexer.ReadTokenOnLine(); _editorImageName = (token != null) ? token.ToString() : string.Empty; lexer.SkipRestOfLine(); } else if(tokenLower == "description") { token = lexer.ReadTokenOnLine(); _description = (token != null) ? token.ToString() : string.Empty; } // check for the surface / content bit flags. else if(CheckSurfaceParameter(token) == true) { } else if(tokenLower == "polygonoffset") { this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; if((token = lexer.ReadTokenOnLine()) == null) { _polygonOffset = 1; } else { _polygonOffset = token.ToFloat(); } } // noshadow. else if(tokenLower == "noshadows") { this.MaterialFlag = MaterialFlags.NoShadows; } else if(tokenLower == "suppressinsubview") { _suppressInSubview = true; } else if(tokenLower == "portalsky") { _portalSky = true; } else if(tokenLower == "noselfshadow") { this.MaterialFlag = Renderer.MaterialFlags.NoSelfShadow; } else if(tokenLower == "noportalfog") { this.MaterialFlag = Renderer.MaterialFlags.NoPortalFog; } // forceShadows allows nodraw surfaces to cast shadows. else if(tokenLower == "forceshadows") { this.MaterialFlag = Renderer.MaterialFlags.ForceShadows; } // overlay / decal suppression. else if(tokenLower == "nooverlays") { _allowOverlays = false; } // moster blood overlay forcing for alpha tested or translucent surfaces. else if(tokenLower == "forceoverlays") { _parsingData.ForceOverlays = true; } else if(tokenLower == "translucent") { _coverage = MaterialCoverage.Translucent; } // global zero clamp. else if(tokenLower == "zeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // global clamp. else if(tokenLower == "clamp") { textureRepeatDefault = TextureRepeat.Clamp; } // global clamp. else if(tokenLower == "alphazeroclamp") { textureRepeatDefault = TextureRepeat.ClampToZero; } // forceOpaque is used for skies-behind-windows. else if(tokenLower == "forceopaque") { _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "twosided") { _cullType = CullType.TwoSided; // twoSided implies no-shadows, because the shadow // volume would be coplanar with the surface, giving depth fighting // we could make this no-self-shadows, but it may be more important // to receive shadows from no-self-shadow monsters. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "backsided") { _cullType = CullType.Back; // the shadow code doesn't handle this, so just disable shadows. // We could fix this in the future if there was a need. this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenLower == "foglight") { _fogLight = true; } else if(tokenLower == "blendlight") { _blendLight = true; } else if(tokenLower == "ambientlight") { _ambientLight = true; } else if(tokenLower == "mirror") { _sort = (float) MaterialSort.Subview; _coverage = MaterialCoverage.Opaque; } else if(tokenLower == "nofog") { _noFog = true; } else if(tokenLower == "unsmoothedtangents") { _unsmoothedTangents = true; } // lightFallofImage <imageprogram> // specifies the image to use for the third axis of projected // light volumes. else if(tokenLower == "lightfalloffimage") { _lightFalloffImage = idE.ImageManager.ImageFromFile(ParsePastImageProgram(lexer), TextureFilter.Default, false, TextureRepeat.Clamp, TextureDepth.Default); } // guisurf <guifile> | guisurf entity // an entity guisurf must have an idUserInterface // specified in the renderEntity. else if(tokenLower == "guisurf") { token = lexer.ReadTokenOnLine(); tokenLower = token.ToString().ToLower(); if(tokenLower == "entity") { _entityGui = 1; } else if(tokenLower == "entity2") { _entityGui = 2; } else if(tokenLower == "entity3") { _entityGui = 3; } else { _userInterface = idE.UIManager.FindInterface(token.ToString(), true); } } // sort. else if(tokenLower == "sort") { ParseSort(lexer); } // spectrum <integer>. else if(tokenLower == "spectrum") { int.TryParse(lexer.ReadTokenOnLine().ToString(), out _spectrum); } // deform < sprite | tube | flare >. else if(tokenLower == "deform") { ParseDeform(lexer); } // decalInfo <staySeconds> <fadeSeconds> (<start rgb>) (<end rgb>). else if(tokenLower == "decalinfo") { ParseDecalInfo(lexer); } // renderbump <args...>. else if(tokenLower == "renderbump") { _renderBump = lexer.ParseRestOfLine(); } // diffusemap for stage shortcut. else if(tokenLower == "diffusemap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend diffusemap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "diffusemap"); ParseStage(newLexer, textureRepeatDefault); } // specularmap for stage shortcut. else if(tokenLower == "specularmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend specularmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "specularmap"); ParseStage(newLexer, textureRepeatDefault); } // normalmap for stage shortcut. else if(tokenLower == "bumpmap") { idLexer newLexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); newLexer.LoadMemory(string.Format("blend bumpmap\nmap {0}\n}}\n", ParsePastImageProgram(lexer)), "bumpmap"); ParseStage(newLexer, textureRepeatDefault); } // DECAL_MACRO for backwards compatibility with the preprocessor macros. else if(tokenLower == "decal_macro") { // polygonOffset this.MaterialFlag = Renderer.MaterialFlags.PolygonOffset; _polygonOffset = -1; // discrete _surfaceFlags |= SurfaceFlags.Discrete; _contentFlags &= ~ContentFlags.Solid; // sort decal. _sort = (float) MaterialSort.Decal; // noShadows this.MaterialFlag = Renderer.MaterialFlags.NoShadows; } else if(tokenValue == "{") { // create the new stage. ParseStage(lexer, textureRepeatDefault); } else { idConsole.WriteLine("unknown general material parameter '{0}' in '{1}'", tokenValue, this.Name); return; } } // add _flat or _white stages if needed. AddImplicitStages(); // order the diffuse / bump / specular stages properly. SortInteractionStages(); // if we need to do anything with normals (lighting or environment mapping) // and two sided lighting was asked for, flag // shouldCreateBackSides() and change culling back to single sided, // so we get proper tangent vectors on both sides. // we can't just call ReceivesLighting(), because the stages are still // in temporary form. if(_cullType == CullType.TwoSided) { count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if((_parsingData.Stages[i].Lighting != StageLighting.Ambient) || (_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit)) { if(_cullType == CullType.TwoSided) { _cullType = CullType.Front; _shouldCreateBackSides = true; } break; } } } // currently a surface can only have one unique texgen for all the stages on old hardware. TextureCoordinateGeneration firstGen = TextureCoordinateGeneration.Explicit; count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if(_parsingData.Stages[i].Texture.TextureCoordinates != TextureCoordinateGeneration.Explicit) { if(firstGen == TextureCoordinateGeneration.Explicit) { firstGen = _parsingData.Stages[i].Texture.TextureCoordinates; } else if(firstGen != _parsingData.Stages[i].Texture.TextureCoordinates) { idConsole.Warning("material '{0}' has multiple stages with a texgen", this.Name); break; } } } }
/// <summary> /// Parses the current material definition and finds all necessary images. /// </summary> /// <param name="text"></param> /// <returns></returns> public override bool Parse(string text) { idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); // reset to the unparsed state. Clear(); _parsingData = new MaterialParsingData(); // this is only valid during parsing. // parse it ParseMaterial(lexer); // TODO: fs_copyFiles // if we are doing an fs_copyfiles, also reference the editorImage /*if ( cvarSystem->GetCVarInteger( "fs_copyFiles" ) ) { GetEditorImage(); }*/ // count non-lit stages. _ambientStageCount = 0; _stageCount = _parsingData.Stages.Count; for(int i = 0; i < _stageCount; i++) { if(_parsingData.Stages[i].Lighting == StageLighting.Ambient) { _ambientStageCount++; } } // see if there is a subview stage if(_sort == (float) MaterialSort.Subview) { _hasSubview = true; } else { _hasSubview = false; int count = _parsingData.Stages.Count; for(int i = 0; i < count; i++) { if(_parsingData.Stages[i].Texture.Dynamic != null) { _hasSubview = true; } } } // automatically determine coverage if not explicitly set. if(_coverage == MaterialCoverage.Bad) { // automatically set MC_TRANSLUCENT if we don't have any interaction stages and // the first stage is blended and not an alpha test mask or a subview. if(_stageCount == 0) { // non-visible. _coverage = MaterialCoverage.Translucent; } else if(_stageCount != _ambientStageCount) { // we have an interaction draw. _coverage = MaterialCoverage.Opaque; } else { MaterialStates drawStateBits = _parsingData.Stages[0].DrawStateBits & MaterialStates.SourceBlendBits; if(((drawStateBits & MaterialStates.DestinationBlendBits) != MaterialStates.DestinationBlendZero) || (drawStateBits == MaterialStates.SourceBlendDestinationColor) || (drawStateBits == MaterialStates.SourceBlendOneMinusDestinationColor) || (drawStateBits == MaterialStates.SourceBlendDestinationAlpha) || (drawStateBits == MaterialStates.SourceBlendOneMinusDestinationAlpha)) { // blended with the destination _coverage = MaterialCoverage.Translucent; } else { _coverage = MaterialCoverage.Opaque; } } } // translucent automatically implies noshadows. if(_coverage == MaterialCoverage.Translucent) { this.MaterialFlag = MaterialFlags.NoShadows; } else { // mark the contents as opaque. _contentFlags |= ContentFlags.Opaque; } // the sorts can make reasonable defaults. if(_sort == (float) MaterialSort.Bad) { if(TestMaterialFlag(MaterialFlags.PolygonOffset) == true) { _sort = (float) MaterialSort.Decal; } else if(_coverage == MaterialCoverage.Translucent) { _sort = (float) MaterialSort.Medium; } else { _sort = (float) MaterialSort.Opaque; } } // anything that references _currentRender will automatically get sort = SS_POST_PROCESS // and coverage = MC_TRANSLUCENT. for(int i = 0; i < _stageCount; i++) { MaterialStage stage = _parsingData.Stages[i]; if(stage.Texture.Image == idE.ImageManager.CurrentRenderImage) { if(_sort != (float) MaterialSort.PortalSky) { _sort = (float) MaterialSort.PostProcess; _coverage = MaterialCoverage.Translucent; } break; } if(stage.NewStage.IsEmpty == false) { NewMaterialStage newShaderStage = stage.NewStage; int imageCount = newShaderStage.FragmentProgramImages.Length; for(int j = 0; j < imageCount; j++) { if(newShaderStage.FragmentProgramImages[j] == idE.ImageManager.CurrentRenderImage) { if(_sort != (float) MaterialSort.PortalSky) { _sort = (float) MaterialSort.PostProcess; _coverage = MaterialCoverage.Translucent; } i = _stageCount; break; } } } } // set the drawStateBits depth flags. for(int i = 0; i < _stageCount; i++) { MaterialStage stage = _parsingData.Stages[i]; if(_sort == (float) MaterialSort.PostProcess) { // post-process effects fill the depth buffer as they draw, so only the // topmost post-process effect is rendered. stage.DrawStateBits |= MaterialStates.DepthFunctionLess; } else if((_coverage == MaterialCoverage.Translucent) || (stage.IgnoreAlphaTest == true)) { // translucent surfaces can extend past the exactly marked depth buffer. stage.DrawStateBits |= MaterialStates.DepthFunctionLess | MaterialStates.DepthMask; } else { // opaque and perforated surfaces must exactly match the depth buffer, // which gets alpha test correct. stage.DrawStateBits |= MaterialStates.DepthFunctionEqual | MaterialStates.DepthMask; } _parsingData.Stages[i] = stage; } // determine if this surface will accept overlays / decals. if(_parsingData.ForceOverlays == true) { // explicitly flaged in material definition _allowOverlays = true; } else { if(this.IsDrawn == false) { _allowOverlays = false; } if(this.Coverage != MaterialCoverage.Opaque) { _allowOverlays = false; } if((this.SurfaceFlags & Renderer.SurfaceFlags.NoImpact) == Renderer.SurfaceFlags.NoImpact) { _allowOverlays = false; } } // add a tiny offset to the sort orders, so that different materials // that have the same sort value will at least sort consistantly, instead // of flickering back and forth. /* this messed up in-game guis if ( sort != SS_SUBVIEW ) { int hash, l; l = name.Length(); hash = 0; for ( int i = 0 ; i < l ; i++ ) { hash ^= name[i]; } sort += hash * 0.01; } */ if(_stageCount > 0) { _stages = _parsingData.Stages.ToArray(); } if(_parsingData.Operations.Count > 0) { _ops = _parsingData.Operations.ToArray(); } if(_registerCount > 0) { _expressionRegisters = new float[_registerCount]; Array.Copy(_parsingData.ShaderRegisters, _expressionRegisters, _registerCount); } // see if the registers are completely constant, and don't need to be evaluated per-surface. CheckForConstantRegisters(); _parsingData = null; // finish things up if(TestMaterialFlag(MaterialFlags.Defaulted) == true) { MakeDefault(); return false; } return true; }
/// <summary> /// Sets defaultShader and returns false if the next token doesn't match. /// </summary> /// <param name="lexer"></param> /// <param name="match"></param> /// <returns></returns> private bool MatchToken(idLexer lexer, string match) { if(lexer.ExpectTokenString(match) == false) { this.MaterialFlag = MaterialFlags.Defaulted; return false; } return true; }
private string ParsePastImageProgram(idLexer lexer) { idImageProgramParser parser = new idImageProgramParser(); parser.ParseImageProgram(lexer); return parser.Source; }
private CollisionModelNode ParseNodes(idLexer lexer, CollisionModel model, CollisionModelNode parent) { model.NodeCount++; lexer.ExpectTokenString("("); CollisionModelNode node = new CollisionModelNode(); node.Parent = parent; node.PlaneType = lexer.ParseInt(); node.PlaneDistance = lexer.ParseFloat(); lexer.ExpectTokenString(")"); if(node.PlaneType != -1) { node.Children[0] = ParseNodes(lexer, model, node); node.Children[1] = ParseNodes(lexer, model, node); } return node; }
private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for(int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
private void UpdateChoicesAndValues() { idToken token; string str2 = string.Empty; if(_latchedChoices.Equals(_choicesStr.ToString(), StringComparison.OrdinalIgnoreCase) == true) { _choices.Clear(); idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choicesStr.ToString(), "<ChoiceList>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == ";") { if(str2.Length > 0) { str2 = idE.Language.Get(str2.TrimEnd()); _choices.Add(str2); str2 = string.Empty; } continue; } str2 += token.ToString(); str2 += " "; } if(str2.Length > 0) { _choices.Add(str2.TrimEnd()); } } _latchedChoices = _choicesStr.ToString(); } if((_choiceValues.ToString() != string.Empty) && (_latchedChoices.Equals(_choiceValues.ToString(), StringComparison.OrdinalIgnoreCase) == false)) { _values.Clear(); str2 = string.Empty; bool negNum = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choiceValues.ToString(), "<ChoiceVals>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == "-") { negNum = true; } else if(token.ToString() == ";") { if(str2.Length > 0) { _values.Add(str2.TrimEnd()); str2 = string.Empty; } } else if(negNum == true) { str2 += "-"; negNum = false; } else { str2 += token.ToString(); str2 += " "; } } if(str2.Length > 0) { _values.Add(str2.TrimEnd()); } } if(_choices.Count != _values.Count) { idConsole.Warning("idChoiceWindow:: gui '{0}' window '{1}' has value count unequal to choices count", this.UserInterface.SourceFile, this.Name); } _latchedChoices = _choiceValues.ToString(); } }
private void PushScript(idLexer script) { foreach(idLexer s in _scriptStack) { if(s.FileName.Equals(script.FileName, StringComparison.OrdinalIgnoreCase) == true) { Warning("'{0}' recursively included", script.FileName); return; } } // push the script on the script stack _scriptStack.Push(script); }
public bool Load(string fileName, bool clear) { if(clear == true) { Clear(); } byte[] data = idE.FileSystem.ReadFile(fileName); if(data == null) { // let whoever called us deal with the failure (so sys_lang can be reset) return false; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName); if(lexer.IsLoaded == false) { return false; } idToken token, token2; lexer.ExpectTokenString("{"); while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { break; } else if((token2 = lexer.ReadToken()) != null) { if(token2.ToString() == "}") { break; } _regexReplaceIndex = 0; // stock d3 language files contain sprintf formatters, we need to replace them string val = token2.ToString(); val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler)); _elements.Add(token.ToString(), val); } } idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName); return true; }
private bool Directive_Include() { idLexer script; idToken token; string path; if((token = ReadSourceToken()) == null) { Error("#include without file name"); return false; } else if(token.LinesCrossed > 0) { Error("#include without file name"); return false; } else if(token.Type == TokenType.String) { script = new idLexer(); // try relative to the current file path = Path.Combine(Path.GetDirectoryName(_scriptStack.Peek().FileName), token.ToString()); if(script.LoadFile(path, _osPath) == false) { // try absolute path path = token.ToString(); if(script.LoadFile(path, _osPath) == false) { // try from the include path path = _includePath + token.ToString(); if(script.LoadFile(path, _osPath) == false) { script = null; } } } } else if((token.Type == TokenType.Punctuation) && (token.ToString() == "<")) { path = _includePath; while((token = ReadSourceToken()) != null) { if(token.LinesCrossed > 0) { UnreadSourceToken(token); break; } else if((token.Type == TokenType.Punctuation) && (token.ToString() == ">")) { break; } path += token.ToString(); } if(token.ToString() != ">") { Warning("#include missing trailing >"); } else if(path == string.Empty) { Error("#include without file name between < >"); return false; } else if((_options & LexerOptions.NoBaseIncludes) == LexerOptions.NoBaseIncludes) { return true; } script = new idLexer(); throw new Exception("ZZ"); /*if(script.LoadFile(_includePath + path, _osPath) == false) { script = null; }*/ } else { Error("#include without file name"); return false; } if(script == null) { Error("file '{0}' not found", path); return false; } script.Options = _options; script.Punctuation = _punctuation; PushScript(script); return true; }
private ContentFlags ContentsFromString(string str) { idLexer lexer = new idLexer(); lexer.LoadMemory(str, "ContentsFromString"); idToken token; ContentFlags contents = ContentFlags.None; string tmp; while((token = lexer.ReadToken()) != null) { if(token.ToString() == ",") { continue; } tmp = token.ToString(); switch(tmp) { case "aas_solid": tmp = "AasSolid"; break; case "aas_obstacle": tmp = "AasObstacle"; break; case "flashlight_trigger": tmp = "FlashlightTrigger"; break; } contents |= (ContentFlags) Enum.Parse(typeof(ContentFlags), tmp, true); } return contents; }
/// <summary> /// /// </summary> /// <remarks> /// Normally this will use a .reg file instead of a .map file if it exists, /// which is what the game and dmap want, but the editor will want to always /// load a .map file. /// </remarks> /// <param name="fileName">Does not require an extension.</param> /// <param name="ignoreRegion"></param> /// <param name="osPath"></param> /// <returns></returns> public bool Parse(string fileName, bool ignoreRegion = false, bool osPath = false) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } _hasPrimitiveData = false; _name = Path.Combine(Path.GetDirectoryName(fileName), Path.GetFileNameWithoutExtension(fileName)); string fullName = _name; // no string concatenation for epairs and allow path names for materials idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowPathNames); idMapEntity mapEnt; if(ignoreRegion == false) { // try loading a .reg file first lexer.LoadFile(fullName + ".reg", osPath); } if(lexer.IsLoaded == false) { // now try a .map file lexer.LoadFile(fullName + ".map", osPath); if(lexer.IsLoaded == false) { // didn't get anything at all return false; } } _version = idMapFile.OldMapVersion; _fileTime = lexer.FileTime; _entities.Clear(); if(lexer.CheckTokenString("Version") == true) { _version = lexer.ReadTokenOnLine().ToFloat(); } while(true) { if((mapEnt = idMapEntity.Parse(lexer, (_entities.Count == 0), _version)) == null) { break; } _entities.Add(mapEnt); } idConsole.Warning("TODO: SetGeometryCRC();"); // if the map has a worldspawn if(_entities.Count > 0) { // "removeEntities" "classname" can be set in the worldspawn to remove all entities with the given classname foreach(KeyValuePair<string, string> removeEntities in _entities[0].Dict.MatchPrefix("removeEntities")) { RemoveEntities(removeEntities.Value); } // "overrideMaterial" "material" can be set in the worldspawn to reset all materials string material; int entityCount = _entities.Count; int primitiveCount = 0; int sideCount = 0; if((material = (_entities[0].Dict.GetString("overrideMaterial", ""))) != string.Empty) { for(int i = 0; i < entityCount; i++) { mapEnt = _entities[i]; primitiveCount = mapEnt.Primitives.Count; for(int j = 0; j < primitiveCount; j++) { idMapPrimitive mapPrimitive = mapEnt.GetPrimitive(j); switch(mapPrimitive.Type) { case MapPrimitiveType.Brush: idMapBrush mapBrush = (idMapBrush) mapPrimitive; sideCount = mapBrush.SideCount; for(int k = 0; k < sideCount; k++) { mapBrush.GetSide(k).Material = material; } break; case MapPrimitiveType.Patch: idConsole.Warning("TODO: PATCH"); // TODO: ((idMapPatch) mapPrimitive).Material = material; break; } } } } // force all entities to have a name key/value pair if(_entities[0].Dict.GetBool("forceEntityNames") == true) { for(int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if(mapEnt.Dict.ContainsKey("name") == false) { mapEnt.Dict.Set("name", string.Format("{0}{1}", mapEnt.Dict.GetString("classname", "forcedName"), i)); } } } // move the primitives of any func_group entities to the worldspawn if(_entities[0].Dict.GetBool("moveFuncGroups") == true) { for(int i = 1; i < entityCount; i++) { mapEnt = _entities[i]; if(mapEnt.Dict.GetString("classname").ToLower() == "func_group") { _entities[0].Primitives.AddRange(mapEnt.Primitives); mapEnt.Primitives.Clear(); } } } } _hasPrimitiveData = true; return true; }
private bool LoadCollisionModelFile(string name, ulong mapFileCRC) { // load it string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension); idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation); if(lexer.LoadFile(fileName) == false) { return false; } idToken token; if(lexer.ExpectTokenString(TokenFileID) == false) { idConsole.Warning("{0} is not a CM file.", fileName); } else if(((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion)) { idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion); } else if((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null) { idConsole.Warning("{0} has no map file CRC", fileName); } else { ulong crc = token.ToUInt64(); if((mapFileCRC != 0) && (crc != mapFileCRC)) { idConsole.WriteLine("{0} is out of date", fileName); } else { // parse the file while(true) { if((token = lexer.ReadToken()) == null) { break; } if(token.ToString().ToLower() == "collisionmodel") { if(ParseCollisionModel(lexer) == false) { return false; } } else { lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token); } } return true; } } return false; }
public static idMapBrush Parse(idLexer lexer, Vector3 origin, bool newFormat = true, float version = idMapFile.CurrentMapVersion) { idToken token; idMapBrushSide side; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idDict dict = new idDict(); Vector3[] planePoints = new Vector3[3]; if(lexer.ExpectTokenString("{") == false) { return null; } do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } if(token.ToString() == "}") { break; } // here we may have to jump over brush epairs ( only used in editor ) do { // if token is a brace if(token.ToString() == "(") { break; } // the token should be a key string for a key/value pair if(token.Type != TokenType.String) { lexer.Error("idMapBrush::Parse: unexpected {0}, expected ( or epair key string", token.ToString()); return null; } string key = token.ToString(); if(((token = lexer.ReadTokenOnLine()) == null) || (token.Type != TokenType.String)) { lexer.Error("idMapBrush::Parse: expected epair value string not found"); return null; } dict.Set(key, token.ToString()); // try to read the next key if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } } while(true); lexer.UnreadToken = token; side = new idMapBrushSide(); sides.Add(side); if(newFormat == true) { float[] tmp = lexer.Parse1DMatrix(4); if(tmp == null) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } else { side.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); } } else { // read the three point plane definition float[] tmp, tmp2, tmp3; if(((tmp = lexer.Parse1DMatrix(3)) == null) || ((tmp2 = lexer.Parse1DMatrix(3)) == null) || ((tmp3 = lexer.Parse1DMatrix(3)) == null)) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); } // read the texture matrix // this is odd, because the texmat is 2D relative to default planar texture axis float[,] tmp5 = lexer.Parse2DMatrix(2, 3); if(tmp5 == null) { lexer.Error("idMapBrush::Parse: unable to read brush side texture matrix"); return null; } side.TextureMatrix[0] = new Vector3(tmp5[0, 0], tmp5[0, 1], tmp5[0, 2]); side.TextureMatrix[1] = new Vector3(tmp5[1, 0], tmp5[1, 1], tmp5[1, 2]); side.Origin = origin; // read the material if((token = lexer.ReadTokenOnLine()) == null) { lexer.Error("idMapBrush::Parse: unable to read brush side material"); return null; } // we had an implicit 'textures/' in the old format... if(version < 2.0f) { side.Material = "textures/" + token.ToString(); } else { side.Material = token.ToString(); } // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); if(lexer.ExpectTokenString("}") == false) { return null; } idMapBrush brush = new idMapBrush(); foreach(idMapBrushSide s in sides) { brush.AddSide(s); } brush.Dict = dict; return brush; }
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for(int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if(token.Type == TokenType.Number) { b.Contents = (ContentFlags) token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; idToken token2; string value; while(true) { if((token = lexer.ReadToken()) == null) { break; } value = token.ToString(); if(value == "}") { break; } if(token.Type != TokenType.String) { lexer.Warning("Expected quoted string, but found '{0}'", value); MakeDefault(); return false; } if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } if(_dict.ContainsKey(value) == true) { lexer.Warning("'{0}' already defined", value); } _dict.Set(value, token2.ToString()); } // we always automatically set a "classname" key to our name _dict.Set("classname", this.Name); // "inherit" keys will cause all values from another entityDef to be copied into this one // if they don't conflict. We can't have circular recursions, because each entityDef will // never be parsed more than once // find all of the dicts first, because copying inherited values will modify the dict List<idDeclEntity> defList = new List<idDeclEntity>(); List<string> keysToRemove = new List<string>(); foreach(KeyValuePair<string, string> kvp in _dict.MatchPrefix("inherit")) { idDeclEntity copy = idE.DeclManager.FindType<idDeclEntity>(DeclType.EntityDef, kvp.Value, false); if(copy == null) { lexer.Warning("Unknown entityDef '{0}' inherited by '{1}'", kvp.Value, this.Name); } else { defList.Add(copy); } // delete this key/value pair keysToRemove.Add(kvp.Key); } _dict.Remove(keysToRemove.ToArray()); // now copy over the inherited key / value pairs foreach(idDeclEntity def in defList) { _dict.SetDefaults(def._dict); } // precache all referenced media // do this as long as we arent in modview idE.Game.CacheDictionaryMedia(_dict); return true; }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "vertices") { ParseVertices(lexer, model); } else if(tokenLower == "edges") { ParseEdges(lexer, model); } else if(tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if(tokenLower == "polygons") { ParsePolygons(lexer, model); } else if(tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + model->numEdges * sizeof(cm_edge_t) + model->polygonMemory + model->brushMemory + model->numNodes * sizeof(cm_node_t) + model->numPolygonRefs * sizeof(cm_polygonRef_t) + model->numBrushRefs * sizeof(cm_brushRef_t);*/ return true; }
public bool LoadAnimation(string fileName) { idToken token; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.NoStringConcatination); if(lexer.LoadFile(fileName) == false) { return false; } Clear(); _name = fileName; lexer.ExpectTokenString(idRenderModel_MD5.VersionString); int version = lexer.ParseInt(); if(version != idRenderModel_MD5.Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, idRenderModel_MD5.Version); } // skip the commandline lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num frames lexer.ExpectTokenString("numFrames"); int frameCount = lexer.ParseInt(); if(frameCount <= 0) { lexer.Error("Invalid number of frames: {0}", frameCount); } // parse num joints lexer.ExpectTokenString("numJoints"); int jointCount = lexer.ParseInt(); if(jointCount <= 0) { lexer.Error("Invalid number of joints: {0}", jointCount); } // parse frame rate lexer.ExpectTokenString("frameRate"); _frameRate = lexer.ParseInt(); if(_frameRate < 0) { lexer.Error("Invalid frame rate: {0}", _frameRate); } // parse number of animated components lexer.ExpectTokenString("numAnimatedComponents"); _animatedComponentCount = lexer.ParseInt(); if((_animatedComponentCount < 0) || (_animatedComponentCount > (jointCount * 6))) { lexer.Error("Invalid number of animated components: {0}", _animatedComponentCount); } // parse the hierarchy _jointInfo = new JointAnimationInfo[jointCount]; lexer.ExpectTokenString("hierarchy"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { token = lexer.ReadToken(); _jointInfo[i] = new JointAnimationInfo(); _jointInfo[i].NameIndex = idR.AnimManager.GetJointIndex(token.ToString()); // parse parent num _jointInfo[i].ParentIndex = lexer.ParseInt(); if(_jointInfo[i].ParentIndex >= i) { lexer.Error("Invalid parent num: {0}", _jointInfo[i].ParentIndex); } if((i != 0) && (_jointInfo[i].ParentIndex < 0)) { lexer.Error("Animations may have only one root joint"); } // parse anim bits _jointInfo[i].AnimationBits = (AnimationBits) lexer.ParseInt(); if(((int) _jointInfo[i].AnimationBits & ~63) != 0) { lexer.Error("Invalid anim bits: {0}", _jointInfo[i].AnimationBits); } // parse first component _jointInfo[i].FirstComponent = lexer.ParseInt(); if((_animatedComponentCount > 0) && ((_jointInfo[i].FirstComponent < 0) || (_jointInfo[i].FirstComponent >= _animatedComponentCount))) { lexer.Error("Invalid first component: {0}", _jointInfo[i].FirstComponent); } } lexer.ExpectTokenString("}"); // parse bounds lexer.ExpectTokenString("bounds"); lexer.ExpectTokenString("{"); _bounds = new idBounds[frameCount]; for(int i = 0; i < frameCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); _bounds[i] = new idBounds( new Vector3(tmp[0], tmp[1], tmp[2]), new Vector3(tmp2[0], tmp2[1], tmp2[2]) ); } lexer.ExpectTokenString("}"); // parse base frame _baseFrame = new idJointQuaternion[jointCount]; lexer.ExpectTokenString("baseframe"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); idCompressedQuaternion q = new idCompressedQuaternion(tmp2[0], tmp2[1], tmp2[2]); _baseFrame[i] = new idJointQuaternion(); _baseFrame[i].Translation = new Vector3(tmp[0], tmp[1], tmp[2]); _baseFrame[i].Quaternion = q.ToQuaternion(); } lexer.ExpectTokenString("}"); // parse frames _componentFrames = new float[_animatedComponentCount * frameCount]; int frameOffset = 0; for(int i = 0; i < frameCount; i++) { lexer.ExpectTokenString("frame"); int count = lexer.ParseInt(); if(count != i) { lexer.Error("Expected frame number {0}", i); } lexer.ExpectTokenString("{"); for(int j = 0; j < _animatedComponentCount; j++, frameOffset++) { _componentFrames[frameOffset] = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); } // get total move delta if(_animatedComponentCount == 0) { _totalDelta = Vector3.Zero; } else { int componentOffset = _jointInfo[0].FirstComponent; if((_jointInfo[0].AnimationBits & AnimationBits.TranslationX) == AnimationBits.TranslationX) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.X; } _totalDelta.X = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.X = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationY) == AnimationBits.TranslationY) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Y; } _totalDelta.Y = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.Y = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationZ) == AnimationBits.TranslationZ) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Z; } _totalDelta.Z = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; } else { _totalDelta.Z = 0; } } _baseFrame[0].Translation = Vector3.Zero; // we don't count last frame because it would cause a 1 frame pause at the end _animLength = ((frameCount - 1) * 1000 + _frameRate - 1) / _frameRate; // done return true; }
private void ParseEdges(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int edgeCount = lexer.ParseInt(); model.Edges = new CollisionModelEdge[edgeCount]; for(int i = 0; i < edgeCount; i++) { lexer.ExpectTokenString("("); model.Edges[i].VertexCount = new int[] { lexer.ParseInt(), lexer.ParseInt() }; lexer.ExpectTokenString(")"); model.Edges[i].Side = 0; model.Edges[i].SideSet = 0; model.Edges[i].Internal = (ushort) lexer.ParseInt(); model.Edges[i].UserCount = (ushort) lexer.ParseInt(); model.Edges[i].Normal = Vector3.Zero; model.Edges[i].CheckCount = 0; model.InternalEdgeCount += model.Edges[i].Internal; } lexer.ExpectTokenString("}"); }