private void ParseJoint(idLexer lexer, idMD5Joint joint, ref idJointQuaternion defaultPose) { // // parse name // joint.Name = lexer.ReadToken().ToString(); // // parse parent // int parentIndex = lexer.ParseInt(); if(parentIndex >= 0) { if(parentIndex >= (_joints.Length - 1)) { lexer.Error("Invalid parent for joint '{0}'", joint.Name); } joint.Parent = _joints[parentIndex]; } // // parse default pose // float[] tmp = lexer.Parse1DMatrix(3); defaultPose.Translation = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); defaultPose.Quaternion = new Quaternion(tmp[0], tmp[1], tmp[2], 0); defaultPose.Quaternion.W = idHelper.CalculateW(defaultPose.Quaternion); }
private ContentFlags ContentsFromString(string str) { idLexer lexer = new idLexer(); lexer.LoadMemory(str, "ContentsFromString"); idToken token; ContentFlags contents = ContentFlags.None; string tmp; while((token = lexer.ReadToken()) != null) { if(token.ToString() == ",") { continue; } tmp = token.ToString(); switch(tmp) { case "aas_solid": tmp = "AasSolid"; break; case "aas_obstacle": tmp = "AasObstacle"; break; case "flashlight_trigger": tmp = "FlashlightTrigger"; break; } contents |= (ContentFlags) Enum.Parse(typeof(ContentFlags), tmp, true); } return contents; }
public bool Load(string fileName, bool clear) { if(clear == true) { Clear(); } byte[] data = idE.FileSystem.ReadFile(fileName); if(data == null) { // let whoever called us deal with the failure (so sys_lang can be reset) return false; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName); if(lexer.IsLoaded == false) { return false; } idToken token, token2; lexer.ExpectTokenString("{"); while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { break; } else if((token2 = lexer.ReadToken()) != null) { if(token2.ToString() == "}") { break; } _regexReplaceIndex = 0; // stock d3 language files contain sprintf formatters, we need to replace them string val = token2.ToString(); val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler)); _elements.Add(token.ToString(), val); } } idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName); return true; }
private void UpdateChoicesAndValues() { idToken token; string str2 = string.Empty; if(_latchedChoices.Equals(_choicesStr.ToString(), StringComparison.OrdinalIgnoreCase) == true) { _choices.Clear(); idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choicesStr.ToString(), "<ChoiceList>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == ";") { if(str2.Length > 0) { str2 = idE.Language.Get(str2.TrimEnd()); _choices.Add(str2); str2 = string.Empty; } continue; } str2 += token.ToString(); str2 += " "; } if(str2.Length > 0) { _choices.Add(str2.TrimEnd()); } } _latchedChoices = _choicesStr.ToString(); } if((_choiceValues.ToString() != string.Empty) && (_latchedChoices.Equals(_choiceValues.ToString(), StringComparison.OrdinalIgnoreCase) == false)) { _values.Clear(); str2 = string.Empty; bool negNum = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); if(lexer.LoadMemory(_choiceValues.ToString(), "<ChoiceVals>") == true) { while((token = lexer.ReadToken()) != null) { if(token.ToString() == "-") { negNum = true; } else if(token.ToString() == ";") { if(str2.Length > 0) { _values.Add(str2.TrimEnd()); str2 = string.Empty; } } else if(negNum == true) { str2 += "-"; negNum = false; } else { str2 += token.ToString(); str2 += " "; } } if(str2.Length > 0) { _values.Add(str2.TrimEnd()); } } if(_choices.Count != _values.Count) { idConsole.Warning("idChoiceWindow:: gui '{0}' window '{1}' has value count unequal to choices count", this.UserInterface.SourceFile, this.Name); } _latchedChoices = _choiceValues.ToString(); } }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); int defaultAnimationCount = 0; idToken token; idToken token2; string tokenValue; string fileName; string extension; int count; idMD5Joint[] md5Joints; while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if(tokenValue == "}") { break; } if(tokenValue == "inherit") { idConsole.WriteLine("TODO: inherit"); /*if( !src.ReadToken( &token2 ) ) { src.Warning( "Unexpected end of file" ); MakeDefault(); return false; } const idDeclModelDef *copy = static_cast<const idDeclModelDef *>( declManager->FindType( DECL_MODELDEF, token2, false ) ); if ( !copy ) { common->Warning( "Unknown model definition '%s'", token2.c_str() ); } else if ( copy->GetState() == DS_DEFAULTED ) { common->Warning( "inherited model definition '%s' defaulted", token2.c_str() ); MakeDefault(); return false; } else { CopyDecl( copy ); numDefaultAnims = anims.Num(); }*/ } else if(tokenValue == "skin") { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } _skin = idE.DeclManager.FindSkin(token2.ToString()); if(_skin == null) { lexer.Warning("Skin '{0}' not found", token2.ToString()); MakeDefault(); return false; } } else if(tokenValue == "mesh") { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } fileName = token2.ToString(); extension = Path.GetExtension(fileName); if(extension != idRenderModel_MD5.MeshExtension) { lexer.Warning("Invalid model for MD5 mesh"); MakeDefault(); return false; } _model = idE.RenderModelManager.FindModel(fileName); if(_model == null) { lexer.Warning("Model '{0}' not found", fileName); MakeDefault(); return false; } else if(_model.IsDefault == true) { lexer.Warning("Model '{0}' defaulted", fileName); MakeDefault(); return false; } // get the number of joints count = _model.JointCount; if(count == 0) { lexer.Warning("Model '{0}' has no joints", fileName); } // set up the joint hierarchy md5Joints = _model.Joints; _joints = new JointInfo[count]; _jointParents = new int[count]; _channelJoints = new int[(int) AnimationChannel.Count][]; _channelJoints[0] = new int[count]; for(int i = 0; i < count; i++) { _joints[i] = new JointInfo(); _joints[i].Channel = AnimationChannel.All; _joints[i].Index = i; if(md5Joints[i].Parent != null) { _joints[i].ParentIndex = _model.GetJointIndex(md5Joints[i].Parent); } else { _joints[i].ParentIndex = -1; } _jointParents[i] = _joints[i].ParentIndex; _channelJoints[0][i] = i; } } else if(tokenValue == "remove") { idConsole.Warning("TODO: remove"); // removes any anims whos name matches /*if( !src.ReadToken( &token2 ) ) { src.Warning( "Unexpected end of file" ); MakeDefault(); return false; } num = 0; for( i = 0; i < anims.Num(); i++ ) { if ( ( token2 == anims[ i ]->Name() ) || ( token2 == anims[ i ]->FullName() ) ) { delete anims[ i ]; anims.RemoveIndex( i ); if ( i >= numDefaultAnims ) { src.Warning( "Anim '%s' was not inherited. Anim should be removed from the model def.", token2.c_str() ); MakeDefault(); return false; } i--; numDefaultAnims--; num++; continue; } } if ( !num ) { src.Warning( "Couldn't find anim '%s' to remove", token2.c_str() ); MakeDefault(); return false; }*/ } else if(tokenValue == "anim") { if(_model == null) { lexer.Warning("Must specify mesh before defining anims"); MakeDefault(); return false; } else if(ParseAnimation(lexer, defaultAnimationCount) == false) { MakeDefault(); return false; } } else if(tokenValue == "offset") { float[] tmp = lexer.Parse1DMatrix(3); if(tmp == null) { lexer.Warning("Expected vector following 'offset'"); MakeDefault(); return false; } _offset = new Vector3(tmp[0], tmp[1], tmp[2]); } else if(tokenValue == "channel") { if(_model == null) { lexer.Warning("Must specify mesh before defining channels"); MakeDefault(); return false; } // set the channel for a group of joints if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } if(lexer.CheckTokenString("(") == false) { lexer.Warning("Expected { after '{0}'", token2.ToString()); MakeDefault(); return false; } int i; int channelCount = (int) AnimationChannel.Count; for(i = (int) AnimationChannel.All + 1; i < channelCount; i++) { if(ChannelNames[i].Equals(token2.ToString(), StringComparison.OrdinalIgnoreCase) == true) { break; } } if(i >= channelCount) { lexer.Warning("Unknown channel '{0}'", token2.ToString()); MakeDefault(); return false; } int channel = i; StringBuilder jointNames = new StringBuilder(); string token2Value; while(lexer.CheckTokenString(")") == false) { if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } token2Value = token2.ToString(); jointNames.Append(token2Value); if((token2Value != "*") && (token2Value != "-")) { jointNames.Append(" "); } } int[] jointList = GetJointList(jointNames.ToString()); int jointLength = jointList.Length; List<int> channelJoints = new List<int>(); for(count = i = 0; i < jointLength; i++) { int jointIndex = jointList[i]; if(_joints[jointIndex].Channel != AnimationChannel.All) { lexer.Warning("Join '{0}' assigned to multiple channels", _model.GetJointName(jointIndex)); continue; } _joints[jointIndex].Channel = (AnimationChannel) channel; channelJoints.Add(jointIndex); } _channelJoints[channel] = channelJoints.ToArray(); } else { lexer.Warning("unknown token '{0}'", token.ToString()); MakeDefault(); return false; } } return true; }
public override bool Parse(string text) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } idLexer lexer = new idLexer(idDeclFile.LexerOptions); lexer.LoadMemory(text, this.FileName, this.LineNumber); lexer.SkipUntilString("{"); idToken token; idToken token2; string value; while(true) { if((token = lexer.ReadToken()) == null) { break; } value = token.ToString(); if(value == "}") { break; } if(token.Type != TokenType.String) { lexer.Warning("Expected quoted string, but found '{0}'", value); MakeDefault(); return false; } if((token2 = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } if(_dict.ContainsKey(value) == true) { lexer.Warning("'{0}' already defined", value); } _dict.Set(value, token2.ToString()); } // we always automatically set a "classname" key to our name _dict.Set("classname", this.Name); // "inherit" keys will cause all values from another entityDef to be copied into this one // if they don't conflict. We can't have circular recursions, because each entityDef will // never be parsed more than once // find all of the dicts first, because copying inherited values will modify the dict List<idDeclEntity> defList = new List<idDeclEntity>(); List<string> keysToRemove = new List<string>(); foreach(KeyValuePair<string, string> kvp in _dict.MatchPrefix("inherit")) { idDeclEntity copy = idE.DeclManager.FindType<idDeclEntity>(DeclType.EntityDef, kvp.Value, false); if(copy == null) { lexer.Warning("Unknown entityDef '{0}' inherited by '{1}'", kvp.Value, this.Name); } else { defList.Add(copy); } // delete this key/value pair keysToRemove.Add(kvp.Key); } _dict.Remove(keysToRemove.ToArray()); // now copy over the inherited key / value pairs foreach(idDeclEntity def in defList) { _dict.SetDefaults(def._dict); } // precache all referenced media // do this as long as we arent in modview idE.Game.CacheDictionaryMedia(_dict); return true; }
public static idMapEntity Parse(idLexer lexer, bool isWordSpawn = false, float version = idMapFile.CurrentMapVersion) { idToken token; if((token = lexer.ReadToken()) == null) { return null; } if(token.ToString() != "{") { lexer.Error("idMapEntity.Parse: {{ not found, found {0}", token.ToString()); return null; } idMapEntity mapEnt = new idMapEntity(); idMapBrush mapBrush = null; idMapPatch mapPatch = null; Vector3 origin = Vector3.Zero; bool worldEnt = false; string tokenValue; do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: EOF without closing brace"); return null; } if(token.ToString() == "}") { break; } if(token.ToString() == "{") { // parse a brush or patch if((token = lexer.ReadToken()) == null) { lexer.Error("idMapEntity.Parse: unexpected EOF"); return null; } if(worldEnt == true) { origin = Vector3.Zero; } tokenValue = token.ToString(); // if is it a brush: brush, brushDef, brushDef2, brushDef3 if(tokenValue.StartsWith("brush", StringComparison.OrdinalIgnoreCase) == true) { mapBrush = idMapBrush.Parse(lexer, origin, (tokenValue.Equals("brushDef2", StringComparison.OrdinalIgnoreCase) || tokenValue.Equals("brushDef3", StringComparison.OrdinalIgnoreCase)), version); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } // if is it a patch: patchDef2, patchDef3 else if(tokenValue.StartsWith("patch", StringComparison.OrdinalIgnoreCase) == true) { mapPatch = idMapPatch.Parse(lexer, origin, tokenValue.Equals("patchDef3", StringComparison.OrdinalIgnoreCase), version); if(mapPatch == null) { return null; } mapEnt.AddPrimitive(mapPatch); } // assume it's a brush in Q3 or older style else { lexer.UnreadToken = token; mapBrush = idMapBrush.ParseQ3(lexer, origin); if(mapBrush == null) { return null; } mapEnt.AddPrimitive(mapBrush); } } else { // parse a key / value pair string key = token.ToString(); token = lexer.ReadTokenOnLine(); string value = token.ToString(); // strip trailing spaces that sometimes get accidentally added in the editor value = value.Trim(); key = key.Trim(); mapEnt.Dict.Set(key, value); if(key.Equals("origin", StringComparison.OrdinalIgnoreCase) == true) { // scanf into doubles, then assign, so it is idVec size independent string[] parts = value.Split(' '); float.TryParse(parts[0], out origin.X); float.TryParse(parts[1], out origin.Y); float.TryParse(parts[2], out origin.Z); } else if((key.Equals("classname", StringComparison.OrdinalIgnoreCase) == true) && (value.Equals("worldspawn", StringComparison.OrdinalIgnoreCase) == true)) { worldEnt = true; } } } while(true); return mapEnt; }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "vertices") { ParseVertices(lexer, model); } else if(tokenLower == "edges") { ParseEdges(lexer, model); } else if(tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if(tokenLower == "polygons") { ParsePolygons(lexer, model); } else if(tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + model->numEdges * sizeof(cm_edge_t) + model->polygonMemory + model->brushMemory + model->numNodes * sizeof(cm_node_t) + model->numPolygonRefs * sizeof(cm_polygonRef_t) + model->numBrushRefs * sizeof(cm_brushRef_t);*/ return true; }
public void Parse(idLexer lexer, idJointMatrix[] joints) { lexer.ExpectTokenString("{"); // // parse name // if(lexer.CheckTokenString("name") == true) { lexer.ReadToken(); } // // parse shader // lexer.ExpectTokenString("shader"); idToken token = lexer.ReadToken(); string materialName = token.ToString(); _material = idE.DeclManager.FindMaterial(materialName); // // parse texture coordinates // lexer.ExpectTokenString("numverts"); int count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", token.ToString()); } _texCoords = new Vector2[count]; int[] firstWeightForVertex = new int[count]; int[] weightCountForVertex = new int[count]; int maxWeight = 0; int coordCount = _texCoords.Length; _weightCount = 0; for(int i = 0; i < coordCount; i++) { lexer.ExpectTokenString("vert"); lexer.ParseInt(); float[] tmp = lexer.Parse1DMatrix(2); _texCoords[i] = new Vector2(tmp[0], tmp[1]); firstWeightForVertex[i] = lexer.ParseInt(); weightCountForVertex[i] = lexer.ParseInt(); if(weightCountForVertex[i] == 0) { lexer.Error("Vertex without any joint weights."); } _weightCount += weightCountForVertex[i]; if((weightCountForVertex[i] + firstWeightForVertex[i]) > maxWeight) { maxWeight = weightCountForVertex[i] + firstWeightForVertex[i]; } } // // parse tris // lexer.ExpectTokenString("numtris"); _triangleCount = lexer.ParseInt(); if(_triangleCount < 0) { lexer.Error("Invalid size: {0}", _triangleCount); } int[] tris = new int[_triangleCount * 3]; for(int i = 0; i < _triangleCount; i++) { lexer.ExpectTokenString("tri"); lexer.ParseInt(); tris[i * 3 + 0] = lexer.ParseInt(); tris[i * 3 + 1] = lexer.ParseInt(); tris[i * 3 + 2] = lexer.ParseInt(); } // // parse weights // lexer.ExpectTokenString("numweights"); count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", count); } if(maxWeight > count) { lexer.Warning("Vertices reference out of range weights in model ({0} of {1} weights).", maxWeight, count); } VertexWeight[] tempWeights = new VertexWeight[count]; for(int i = 0; i < count; i++) { lexer.ExpectTokenString("weight"); lexer.ParseInt(); int jointIndex = lexer.ParseInt(); if((jointIndex < 0) || (jointIndex >= joints.Length)) { lexer.Error("Joint index out of range({0}): {1}", joints.Length, jointIndex); } tempWeights[i].JointIndex = jointIndex; tempWeights[i].JointWeight = lexer.ParseFloat(); float[] tmp = lexer.Parse1DMatrix(3); tempWeights[i].Offset = new Vector3(tmp[0], tmp[1], tmp[2]); } // create pre-scaled weights and an index for the vertex/joint lookup _scaledWeights = new Vector4[_weightCount]; _weightIndex = new int[_weightCount * 2]; count = 0; coordCount = _texCoords.Length; for(int i = 0; i < coordCount; i++) { int num = firstWeightForVertex[i]; int weightCount = weightCountForVertex[i]; for(int j = 0; j < weightCount; j++, num++, count++) { Vector3 tmp = tempWeights[num].Offset * tempWeights[num].JointWeight; _scaledWeights[count].X = tmp.X; _scaledWeights[count].Y = tmp.Y; _scaledWeights[count].Z = tmp.Z; _scaledWeights[count].W = tempWeights[num].JointWeight; _weightIndex[count * 2 + 0] = tempWeights[num].JointIndex; } _weightIndex[count * 2 - 1] = 1; } lexer.ExpectTokenString("}"); // update counters idConsole.Warning("TODO: idRenderModel_MD5 update counters"); /*c_numVerts += texCoords.Num(); c_numWeights += numWeights; c_numWeightJoints++; for ( i = 0; i < numWeights; i++ ) { c_numWeightJoints += weightIndex[i*2+1]; }*/ // // build the information that will be common to all animations of this mesh: // silhouette edge connectivity and normal / tangent generation information // Vertex[] verts = new Vertex[_texCoords.Length]; int vertCount = verts.Length; for(int i = 0; i < vertCount; i++) { verts[i].TextureCoordinates = _texCoords[i]; } TransformVertices(verts, joints); idConsole.Warning("TODO: idMD5Mesh Deform"); //_deformInfo = idE.RenderSystem.BuildDeformInformation(verts, tris, _material.UseUnsmoothedTangents); }
/// <summary> /// Takes a string and breaks it up into arg tokens. /// </summary> /// <param name="text"></param> /// <param name="keepAsStrings">true to only seperate tokens from whitespace and comments, ignoring punctuation.</param> public void TokenizeString(string text, bool keepAsStrings) { // clear previous args. _args = new string[] { }; if(text.Length == 0) { return; } idLexer lexer = new idLexer(); lexer.LoadMemory(text, "idCmdSystem.TokenizeString"); lexer.Options = LexerOptions.NoErrors | LexerOptions.NoWarnings | LexerOptions.NoStringConcatination | LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.AllowIPAddresses | ((keepAsStrings == true) ? LexerOptions.OnlyStrings : 0); idToken token = null, number = null; List<string> newArgs = new List<string>(); int len = 0, totalLength = 0; string tokenValue; while(true) { if(newArgs.Count == idE.MaxCommandArgs) { break; // this is usually something malicious. } if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if((keepAsStrings == false) && (tokenValue == "-")) { // check for negative numbers. if((number = lexer.CheckTokenType(TokenType.Number, 0)) != null) { token.Set("-" + number); } } // check for cvar expansion if(tokenValue == "$") { if((token = lexer.ReadToken()) == null) { break; } if(idE.CvarSystem.IsInitialized == true) { token.Set(idE.CvarSystem.GetString(token.ToString())); } else { token.Set("<unknown>"); } } tokenValue = token.ToString(); len = tokenValue.Length; totalLength += len + 1; // regular token newArgs.Add(tokenValue); } _args = newArgs.ToArray(); }
/// <summary> /// Used for initial loads, reloadModel, and reloading the data of purged models. /// </summary> /// <remarks> /// Upon exit, the model will absolutely be valid, but possibly as a default model. /// </remarks> public override void Load() { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } if(_purged == false) { Purge(); } _purged = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters); if(lexer.LoadFile(Name) == false) { MakeDefault(); return; } lexer.ExpectTokenString(VersionString); int version = lexer.ParseInt(); int count = 0; idToken token; if(version != Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, Version); } // // skip commandline // lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num joints lexer.ExpectTokenString("numJoints"); count = lexer.ParseInt(); _joints = new idMD5Joint[count]; _defaultPose = new idJointQuaternion[count]; idJointMatrix[] poseMat3 = new idJointMatrix[count]; // parse num meshes lexer.ExpectTokenString("numMeshes"); count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", count); } _meshes = new idMD5Mesh[count]; // // parse joints // lexer.ExpectTokenString("joints"); lexer.ExpectTokenString("{"); int jointCount = _joints.Length; for(int i = 0; i < jointCount; i++) { idMD5Joint joint = _joints[i] = new idMD5Joint(); idJointQuaternion pose = new idJointQuaternion(); ParseJoint(lexer, joint, ref pose); poseMat3[i] = idJointMatrix.Zero; poseMat3[i].Rotation = Matrix.CreateFromQuaternion(pose.Quaternion); poseMat3[i].Translation = pose.Translation; if(joint.Parent != null) { int parentIndex = GetJointIndex(joint.Parent); pose.Quaternion = Quaternion.CreateFromRotationMatrix(poseMat3[i].ToMatrix() * Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); pose.Translation = Vector3.Transform(poseMat3[i].ToVector3() - poseMat3[parentIndex].ToVector3(), Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); } _defaultPose[i] = pose; } lexer.ExpectTokenString("}"); int meshCount = _meshes.Length; for(int i = 0; i < meshCount; i++) { lexer.ExpectTokenString("mesh"); _meshes[i] = new idMD5Mesh(); _meshes[i].Parse(lexer, poseMat3); } // // calculate the bounds of the model // CalculateBounds(poseMat3); // set the timestamp for reloadmodels idConsole.Warning("TODO: fileSystem->ReadFile( name, NULL, &timeStamp );"); }
/// <summary> /// This is used during both the initial load, and any reloads. /// </summary> /// <returns></returns> public int LoadAndParse() { // load the text idConsole.DeveloperWriteLine("...loading '{0}'", this.FileName); byte[] data = idE.FileSystem.ReadFile(this.FileName); if(data == null) { idConsole.FatalError("couldn't load {0}", this.FileName); return 0; } string content = UTF8Encoding.UTF8.GetString(data); idLexer lexer = new idLexer(); lexer.Options = LexerOptions; if(lexer.LoadMemory(content, this.FileName) == false) { idConsole.Error("Couldn't parse {0}", this.FileName); return 0; } // mark all the defs that were from the last reload of this file foreach(idDecl decl in _decls) { decl.RedefinedInReload = false; } // TODO: checksum = MD5_BlockChecksum( buffer, length ); _fileSize = content.Length; int startMarker, sourceLine; int size; string name; bool reparse; idToken token; idDecl newDecl; DeclType identifiedType; string tokenValue; // scan through, identifying each individual declaration while(true) { startMarker = lexer.FileOffset; sourceLine = lexer.LineNumber; // parse the decl type name if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); // get the decl type from the type name identifiedType = idE.DeclManager.GetDeclTypeFromName(tokenValue); if(identifiedType == DeclType.Unknown) { if(tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } else { if(this.DefaultType == DeclType.Unknown) { lexer.Warning("No type"); continue; } lexer.UnreadToken = token; // use the default type identifiedType = this.DefaultType; } } // now parse the name if((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at the end of file"); break; } tokenValue = token.ToString(); if(tokenValue == "{") { // if we ever see an open brace, we somehow missed the [type] <name> prefix lexer.Warning("Missing decl name"); lexer.SkipBracedSection(false); continue; } // FIXME: export decls are only used by the model exporter, they are skipped here for now if(identifiedType == DeclType.ModelExport) { lexer.SkipBracedSection(); continue; } name = tokenValue; // make sure there's a '{' if((token = lexer.ReadToken()) == null) { lexer.Warning("Type without definition at end of file"); break; } tokenValue = token.ToString(); if(tokenValue != "{") { lexer.Warning("Expecting '{{' but found '{0}'", tokenValue); continue; } lexer.UnreadToken = token; // now take everything until a matched closing brace lexer.SkipBracedSection(); size = lexer.FileOffset - startMarker; // look it up, possibly getting a newly created default decl reparse = false; newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, false); if(newDecl != null) { // update the existing copy if((newDecl.SourceFile != this) || (newDecl.RedefinedInReload == true)) { lexer.Warning("{0} '{1}' previously defined at {2}:{3}", identifiedType.ToString().ToLower(), name, newDecl.FileName, newDecl.LineNumber); continue; } if(newDecl.State != DeclState.Unparsed) { reparse = true; } } else { // allow it to be created as a default, then add it to the per-file list newDecl = idE.DeclManager.FindTypeWithoutParsing(identifiedType, name, true); if(newDecl == null) { lexer.Warning("could not instanciate decl '{0}' with name '{1}'", identifiedType.ToString().ToLower(), name); continue; } _decls.Add(newDecl); } newDecl.RedefinedInReload = true; newDecl.SourceText = content.Substring(startMarker, size); newDecl.SourceFile = this; newDecl.SourceTextOffset = startMarker; newDecl.SourceTextLength = size; newDecl.SourceLine = sourceLine; newDecl.State = DeclState.Unparsed; // if it is currently in use, reparse it immedaitely if(reparse) { newDecl.ParseLocal(); } } _lineCount = lexer.LineNumber; // any defs that weren't redefinedInReload should now be defaulted foreach(idDecl decl in _decls) { if(decl.RedefinedInReload == false) { decl.MakeDefault(); decl.SourceTextOffset = decl.SourceFile.FileSize; decl.SourceTextLength = 0; decl.SourceLine = decl.SourceFile.LineCount; } } return _checksum; }
/// <summary> /// /// </summary> /// <remarks> /// A NULL or empty name will make a world without a map model, which /// is still useful for displaying a bare model. /// </remarks> /// <param name="name"></param> /// <returns></returns> public bool InitFromMap(string name) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } // if this is an empty world, initialize manually if((name == null) || (name == string.Empty)) { FreeWorld(); ClearWorld(); _mapName = string.Empty; return true; } // load it string fileName = string.Format("{0}.{1}", Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name)), idE.ProcFileExtension); // if we are reloading the same map, check the timestamp // and try to skip all the work // TODO: timestamps /*ID_TIME_T currentTimeStamp; fileSystem->ReadFile( filename, NULL, ¤tTimeStamp );*/ /*if ( name == mapName ) { if ( currentTimeStamp != FILE_NOT_FOUND_TIMESTAMP && currentTimeStamp == mapTimeStamp ) { common->Printf( "idRenderWorldLocal::InitFromMap: retaining existing map\n" ); FreeDefs(); TouchWorldModels(); AddWorldModelEntities(); ClearPortalStates(); return true; } common->Printf( "idRenderWorldLocal::InitFromMap: timestamp has changed, reloading.\n" ); }*/ FreeWorld(); idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation); if(lexer.LoadFile(fileName) == false) { idConsole.WriteLine("idRenderWorld.InitFromMap: {0} not found", fileName); ClearWorld(); return false; } _mapName = name; // TODO: mapTimeStamp = currentTimeStamp; // if we are writing a demo, archive the load command // TODO: demo /*if ( session->writeDemo ) { WriteLoadMap(); }*/ idToken token; idRenderModel lastModel; string tokenValue; if(((token = lexer.ReadToken()) == null) || (token.ToString().Equals(idE.ProcFileID, StringComparison.OrdinalIgnoreCase) == false)) { idConsole.WriteLine("idRenderWorld.InitFromMap: bad id '{0}' instead of '{1}'", token, idE.ProcFileID); return false; } // parse the file while(true) { if((token = lexer.ReadToken()) == null) { break; } tokenValue = token.ToString(); if((tokenValue == "model") || (tokenValue == "shadowModel")) { if(tokenValue == "model") { lastModel = ParseModel(lexer); } else { lastModel = ParseShadowModel(lexer); } // add it to the model manager list idE.RenderModelManager.AddModel(lastModel); // save it in the list to free when clearing this map _localModels.Add(lastModel); } else if(tokenValue == "interAreaPortals") { ParseInterAreaPortals(lexer); } else if(tokenValue == "nodes") { ParseNodes(lexer); } else { lexer.Error("idRenderWorld.InitFromMap: bad token \"{0}\"", tokenValue); } } // if it was a trivial map without any areas, create a single area if(_portalAreaCount == 0) { ClearWorld(); } // find the points where we can early-our of reference pushing into the BSP tree CommonChildrenArea(_areaNodes[0]); AddWorldModelEntities(); ClearPortalStates(); // done! return true; }
private bool ParseMaterial(idLexer lexer) { _parameters.MinDistance = 1; _parameters.MaxDistance = 10; _parameters.Volume = 1; _speakerMask = 0; _altSound = null; idToken token; string tokenValue; int sampleCount = 0; while(true) { if((token = lexer.ExpectAnyToken()) == null) { return false; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } // minimum number of sounds else if(tokenValue == "minsamples") { sampleCount = lexer.ParseInt(); } else if(tokenValue == "description") { _description = lexer.ReadTokenOnLine().ToString(); } else if(tokenValue == "mindistance") { _parameters.MinDistance = lexer.ParseFloat(); } else if(tokenValue == "maxdistance") { _parameters.MaxDistance = lexer.ParseFloat(); } else if(tokenValue == "shakes") { token = lexer.ExpectAnyToken(); if(token.Type == TokenType.Number) { _parameters.Shakes = token.ToFloat(); } else { lexer.UnreadToken = token; _parameters.Shakes = 1.0f; } } else if(tokenValue == "reverb") { float reg0 = lexer.ParseFloat(); if(lexer.ExpectTokenString(",") == false) { return false; } float reg1 = lexer.ParseFloat(); // no longer supported } else if(tokenValue == "volume") { _parameters.Volume = lexer.ParseFloat(); } // leadinVolume is used to allow light breaking leadin sounds to be much louder than the broken loop else if(tokenValue == "leadinvolume") { _leadInVolume = lexer.ParseFloat(); } else if(tokenValue == "mask_center") { _speakerMask |= 1 << (int) Speakers.Center; } else if(tokenValue == "mask_left") { _speakerMask |= 1 << (int) Speakers.Left; } else if(tokenValue == "mask_right") { _speakerMask |= 1 << (int) Speakers.Right; } else if(tokenValue == "mask_backright") { _speakerMask |= 1 << (int) Speakers.BackRight; } else if(tokenValue == "mask_backleft") { _speakerMask |= 1 << (int) Speakers.BackLeft; } else if(tokenValue == "mask_lfe") { _speakerMask |= 1 << (int) Speakers.Lfe; } else if(tokenValue == "soundclass") { _parameters.SoundClass = lexer.ParseInt(); if(_parameters.SoundClass < 0) { lexer.Warning("SoundClass out of range"); return false; } } else if(tokenValue == "altsound") { if((token = lexer.ExpectAnyToken()) == null) { return false; } _altSound = idE.DeclManager.FindSound(token.ToString()); } else if(tokenValue == "ordered") { // no longer supported } else if(tokenValue == "no_dups") { _parameters.Flags |= SoundMaterialFlags.NoDuplicates; } else if(tokenValue == "no_flicker") { _parameters.Flags |= SoundMaterialFlags.NoFlicker; } else if(tokenValue == "plain") { // no longer supported } else if(tokenValue == "looping") { _parameters.Flags |= SoundMaterialFlags.Looping; } else if(tokenValue == "no_occlusion") { _parameters.Flags |= SoundMaterialFlags.NoOcclusion; } else if(tokenValue == "private") { _parameters.Flags |= SoundMaterialFlags.PrivateSound; } else if(tokenValue == "antiprivate") { _parameters.Flags |= SoundMaterialFlags.AntiPrivateSound; } else if(tokenValue == "playonce") { _parameters.Flags |= SoundMaterialFlags.PlayOnce; } else if(tokenValue == "global") { _parameters.Flags |= SoundMaterialFlags.Global; } else if(tokenValue == "unclamped") { _parameters.Flags |= SoundMaterialFlags.Unclamped; } else if(tokenValue == "omnidirectional") { _parameters.Flags |= SoundMaterialFlags.OmniDirectional; } // onDemand can't be a parms, because we must track all references and overrides would confuse it else if(tokenValue == "ondemand") { // no longer loading sounds on demand // _onDemand = true; } // the wave files else if(tokenValue == "leadin") { // add to the leadin list if((token = lexer.ReadToken()) == null) { lexer.Warning("Expected sound after leadin"); return false; } idConsole.Warning("TODO: leadin"); /*if(soundSystemLocal.soundCache && numLeadins < maxSamples) { leadins[numLeadins] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numLeadins++; }*/ } else if((tokenValue.EndsWith(".wav") == true) || (tokenValue.EndsWith(".ogg") == true)) { idConsole.Warning("TODO: .wav|.ogg"); /*// add to the wav list if(soundSystemLocal.soundCache && numEntries < maxSamples) { token.BackSlashesToSlashes(); idStr lang = cvarSystem->GetCVarString("sys_lang"); if(lang.Icmp("english") != 0 && token.Find("sound/vo/", false) >= 0) { idStr work = token; work.ToLower(); work.StripLeading("sound/vo/"); work = va("sound/vo/%s/%s", lang.c_str(), work.c_str()); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } else { // also try to find it with the .ogg extension work.SetFileExtension(".ogg"); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } } } entries[numEntries] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numEntries++; }*/ } else { lexer.Warning("unknown token '{0}'", token.ToString()); return false; } } if(_parameters.Shakes > 0.0f) { idConsole.Warning("TODO: CheckShakesAndOgg()"); } return true; }
private bool LoadCollisionModelFile(string name, ulong mapFileCRC) { // load it string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension); idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation); if(lexer.LoadFile(fileName) == false) { return false; } idToken token; if(lexer.ExpectTokenString(TokenFileID) == false) { idConsole.Warning("{0} is not a CM file.", fileName); } else if(((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion)) { idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion); } else if((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null) { idConsole.Warning("{0} has no map file CRC", fileName); } else { ulong crc = token.ToUInt64(); if((mapFileCRC != 0) && (crc != mapFileCRC)) { idConsole.WriteLine("{0} is out of date", fileName); } else { // parse the file while(true) { if((token = lexer.ReadToken()) == null) { break; } if(token.ToString().ToLower() == "collisionmodel") { if(ParseCollisionModel(lexer) == false) { return false; } } else { lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token); } } return true; } } return false; }
private int ParseExpressionPriority(idLexer lexer, int priority) { idToken token; if(priority == 0) { return ParseTerm(lexer); } int a = ParseExpressionPriority(lexer, priority - 1); if(TestMaterialFlag(MaterialFlags.Defaulted) == true) { // we have a parse error. return 0; } if((token = lexer.ReadToken()) == null) { // we won't get EOF in a real file, but we can // when parsing from generated strings return a; } string tokenValue = token.ToString(); if((priority == 1) && (tokenValue == "*")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Multiply, priority); } else if((priority == 1) && (tokenValue == "/")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Divide, priority); } else if((priority == 1) && (tokenValue == "%")) { // implied truncate both to integer. return ParseEmitOp(lexer, a, ExpressionOperationType.Modulo, priority); } else if((priority == 2) && (tokenValue == "+")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Add, priority); } else if((priority == 2) && (tokenValue == "-")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Subtract, priority); } else if((priority == 3) && (tokenValue == ">")) { return ParseEmitOp(lexer, a, ExpressionOperationType.GreaterThan, priority); } else if((priority == 3) && (tokenValue == ">=")) { return ParseEmitOp(lexer, a, ExpressionOperationType.GreaterThanOrEquals, priority); } else if((priority == 3) && (tokenValue == ">")) { return ParseEmitOp(lexer, a, ExpressionOperationType.GreaterThan, priority); } else if((priority == 3) && (tokenValue == ">=")) { return ParseEmitOp(lexer, a, ExpressionOperationType.GreaterThanOrEquals, priority); } else if((priority == 3) && (tokenValue == "<")) { return ParseEmitOp(lexer, a, ExpressionOperationType.LessThan, priority); } else if((priority == 3) && (tokenValue == "<=")) { return ParseEmitOp(lexer, a, ExpressionOperationType.LessThanOrEquals, priority); } else if((priority == 3) && (tokenValue == "==")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Equals, priority); } else if((priority == 3) && (tokenValue == "!=")) { return ParseEmitOp(lexer, a, ExpressionOperationType.NotEquals, priority); } else if((priority == 4) && (tokenValue == "&&")) { return ParseEmitOp(lexer, a, ExpressionOperationType.And, priority); } else if((priority == 4) && (tokenValue == "||")) { return ParseEmitOp(lexer, a, ExpressionOperationType.Or, priority); } // assume that anything else terminates the expression // not too robust error checking... lexer.UnreadToken = token; return a; }
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for(int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if(token.Type == TokenType.Number) { b.Contents = (ContentFlags) token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
/// <summary> /// /// </summary> /// <param name="lexer"></param> /// <returns>A register index.</returns> private int ParseTerm(idLexer lexer) { idToken token = lexer.ReadToken(); string tokenValue = token.ToString(); string tokenLower = tokenValue.ToLower(); if(tokenValue == "(") { int a = ParseExpression(lexer); MatchToken(lexer, ")"); return a; } ExpressionRegister tmpReg; if(Enum.TryParse<ExpressionRegister>(tokenValue, true, out tmpReg) == true) { _parsingData.RegistersAreConstant = false; return (int) tmpReg; } if(tokenLower == "fragmentPrograms") { return GetExpressionConstant(1.0f); } else if(tokenLower == "sound") { _parsingData.RegistersAreConstant = false; return EmitOp(0, 0, ExpressionOperationType.Sound); } // parse negative numbers else if(tokenLower == "-") { token = lexer.ReadToken(); if((token.Type == TokenType.Number) || (token.ToString() == ".")) { return GetExpressionConstant(-token.ToFloat()); } lexer.Warning("Bad negative number '{0}'", token.ToString()); this.MaterialFlag = MaterialFlags.Defaulted; return 0; } else if((token.Type == TokenType.Number) || (tokenValue == ".") || (tokenValue == "-")) { return GetExpressionConstant(token.ToFloat()); } // see if it is a table name idDeclTable table = idE.DeclManager.FindType<idDeclTable>(DeclType.Table, tokenValue, false); if(table == null) { lexer.Warning("Bad term '{0}'", tokenValue); this.MaterialFlag = MaterialFlags.Defaulted; return 0; } // parse a table expression MatchToken(lexer, "["); int b = ParseExpression(lexer); MatchToken(lexer, "]"); return EmitOp(table.Index, b, ExpressionOperationType.Table); }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if(lexer.ExpectTokenString("{") == false) { return null; } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if(token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return null; } // Parse it float[] info; if(patchDef3 == true) { info = lexer.Parse1DMatrix(7); if(info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return null; } } else { info = lexer.Parse1DMatrix(5); if(info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return null; } } idMapPatch patch = new idMapPatch((int) info[0], (int) info[1]); if(version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if(patchDef3 == true) { patch.HorizontalSubdivisions = (int) info[2]; patch.VerticalSubdivisions = (int) info[3]; patch.ExplicitlySubdivided = true; } if((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return null; } // these were written out in the wrong order, IMHO if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return null; } for(int j = 0; j < patch.Width; j++) { if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return null; } for(int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if(v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return null; } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return null; } } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure" ); return null; } // read any key/value pairs while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if(token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return patch; }
private void ParseStage(idLexer lexer, TextureRepeat textureRepeatDefault) { TextureFilter textureFilter = TextureFilter.Default; TextureRepeat textureRepeat = textureRepeatDefault; TextureDepth textureDepth = TextureDepth.Default; CubeFiles cubeMap = CubeFiles.TwoD; bool allowPicmip = true; string imageName = string.Empty; NewMaterialStage newStage = new NewMaterialStage(); newStage.VertexParameters = new int[4, 4]; MaterialStage materialStage = new MaterialStage(); materialStage.ConditionRegister = GetExpressionConstant(1); materialStage.Color.Registers = new int[] { GetExpressionConstant(1), GetExpressionConstant(1), GetExpressionConstant(1), GetExpressionConstant(1) }; int[,] matrix = new int[2, 3]; idToken token; int a, b; string tokenValue; string tokenLower; while(true) { if(TestMaterialFlag(MaterialFlags.Defaulted) == true) { // we have a parse error return; } else if((token = lexer.ExpectAnyToken()) == null) { this.MaterialFlag = MaterialFlags.Defaulted; return; } tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); // the close brace for the entire material ends the draw block if(tokenLower == "}") { break; } // BSM Nerve: Added for stage naming in the material editor else if(tokenLower == "name") { lexer.SkipRestOfLine(); } // image options else if(tokenLower == "blend") { ParseBlend(lexer, ref materialStage); } else if(tokenLower == "map") { imageName = ParsePastImageProgram(lexer); } else if(tokenLower == "remoterendermap") { materialStage.Texture.Dynamic = DynamicImageType.RemoteRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); } else if(tokenLower == "mirrorrendermap") { materialStage.Texture.Dynamic = DynamicImageType.MirrorRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "xrayrendermap") { materialStage.Texture.Dynamic = DynamicImageType.XRayRender; materialStage.Texture.Width = lexer.ParseInt(); materialStage.Texture.Height = lexer.ParseInt(); materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "screen") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "screen2") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.Screen; } else if(tokenLower == "glasswarp") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.GlassWarp; } else if(tokenLower == "videomap") { // note that videomaps will always be in clamp mode, so texture // coordinates had better be in the 0 to 1 range if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'videoMap' keyword in material '{0}'", this.Name); } else { bool loop = false; if(token.ToString().Equals("loop", StringComparison.OrdinalIgnoreCase) == true) { loop = true; if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'videoMap' keyword in material '{0}'", this.Name); continue; } } idConsole.Warning("TODO: material videoMap keyword"); // TODO: cinematic /*ts->cinematic = idCinematic::Alloc(); ts->cinematic->InitFromFile( token.c_str(), loop );*/ } } else if(tokenLower == "soundmap") { if((token = lexer.ReadToken()) == null) { idConsole.Warning("missing parameter for 'soundMap' keyword in material '{0}'", this.Name); } else { idConsole.Warning("TODO: material soundMap keyword"); // TODO /*ts->cinematic = new idSndWindow(); ts->cinematic->InitFromFile( token.c_str(), true );*/ } } else if(tokenLower == "cubemap") { imageName = ParsePastImageProgram(lexer); cubeMap = CubeFiles.Native; } else if(tokenLower == "cameracubemap") { imageName = ParsePastImageProgram(lexer); cubeMap = CubeFiles.Camera; } else if(tokenLower == "ignorealphatest") { materialStage.IgnoreAlphaTest = true; } else if(tokenLower == "nearest") { textureFilter = TextureFilter.Nearest; } else if(tokenLower == "linear") { textureFilter = TextureFilter.Linear; } else if(tokenLower == "clamp") { textureRepeat = TextureRepeat.Clamp; } else if(tokenLower == "noclamp") { textureRepeat = TextureRepeat.Repeat; } else if(tokenLower == "zeroclamp") { textureRepeat = TextureRepeat.ClampToZero; } else if(tokenLower == "alphazeroclamp") { textureRepeat = TextureRepeat.ClampToZeroAlpha; } else if((tokenLower == "uncompressed") || (tokenLower == "highquality")) { if(idE.CvarSystem.GetInteger("image_ignoreHighQuality") == 0) { textureDepth = TextureDepth.HighQuality; } } else if(tokenLower == "forcehighquality") { textureDepth = TextureDepth.HighQuality; } else if(tokenLower == "nopicmip") { allowPicmip = false; } else if(tokenLower == "vertexcolor") { materialStage.VertexColor = StageVertexColor.Modulate; } else if(tokenLower == "inversevertexcolor") { materialStage.VertexColor = StageVertexColor.InverseModulate; } // privatePolygonOffset else if(tokenLower == "privatepolygonoffset") { if((token = lexer.ReadTokenOnLine()) == null) { materialStage.PrivatePolygonOffset = 1; } else { // explict larger (or negative) offset lexer.UnreadToken = token; materialStage.PrivatePolygonOffset = lexer.ParseFloat(); } } // texture coordinate generation else if(tokenLower == "texgen") { token = lexer.ExpectAnyToken(); tokenValue = token.ToString(); tokenLower = tokenValue.ToLower(); if(tokenLower == "normal") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.DiffuseCube; } else if(tokenLower == "reflect") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.ReflectCube; } else if(tokenLower == "skybox") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.SkyboxCube; } else if(tokenLower == "wobblesky") { materialStage.Texture.TextureCoordinates = TextureCoordinateGeneration.WobbleSkyCube; _texGenRegisters = new int[4]; _texGenRegisters[0] = ParseExpression(lexer); _texGenRegisters[1] = ParseExpression(lexer); _texGenRegisters[2] = ParseExpression(lexer); } else { idConsole.Warning("bad texGen '{0}' in material {1}", tokenValue, this.Name); this.MaterialFlag = MaterialFlags.Defaulted; } } else if((tokenLower == "scroll") || (tokenLower == "translate")) { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); matrix[0, 0] = GetExpressionConstant(1); matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = a; matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = GetExpressionConstant(1); matrix[1, 2] = b; MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "scale") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this just scales without a centering matrix[0, 0] = a; matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = GetExpressionConstant(0); matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = b; matrix[1, 2] = GetExpressionConstant(0); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "centerscale") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this subtracts 0.5, then scales, then adds 0.5 matrix[0, 0] = a; matrix[0, 1] = GetExpressionConstant(0); matrix[0, 2] = EmitOp(GetExpressionConstant(0.5f), EmitOp(GetExpressionConstant(0.5f), a, ExpressionOperationType.Multiply), ExpressionOperationType.Subtract); matrix[1, 0] = GetExpressionConstant(0); matrix[1, 1] = b; matrix[1, 2] = EmitOp(GetExpressionConstant(0.5f), EmitOp(GetExpressionConstant(0.5f), b, ExpressionOperationType.Multiply), ExpressionOperationType.Subtract); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "shear") { a = ParseExpression(lexer); MatchToken(lexer, ","); b = ParseExpression(lexer); // this subtracts 0.5, then shears, then adds 0.5 matrix[0, 0] = GetExpressionConstant(1); matrix[0, 1] = a; matrix[0, 2] = EmitOp(GetExpressionConstant(-0.5f), a, ExpressionOperationType.Multiply); matrix[1, 0] = b; matrix[1, 1] = GetExpressionConstant(1); matrix[1, 2] = EmitOp(GetExpressionConstant(-0.5f), b, ExpressionOperationType.Multiply); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } else if(tokenLower == "rotate") { int sinReg, cosReg; // in cycles a = ParseExpression(lexer); idDeclTable table = idE.DeclManager.FindType<idDeclTable>(DeclType.Table, "sinTable", false); if(table == null) { idConsole.Warning("no sinTable for rotate defined"); this.MaterialFlag = MaterialFlags.Defaulted; return; } sinReg = EmitOp(table.Index, a, ExpressionOperationType.Table); table = idE.DeclManager.FindType<idDeclTable>(DeclType.Table, "cosTable", false); if(table == null) { idConsole.Warning("no cosTable for rotate defined"); this.MaterialFlag = MaterialFlags.Defaulted; return; } cosReg = EmitOp(table.Index, a, ExpressionOperationType.Table); // this subtracts 0.5, then rotates, then adds 0.5 matrix[0, 0] = cosReg; matrix[0, 1] = EmitOp(GetExpressionConstant(0), sinReg, ExpressionOperationType.Subtract); matrix[0, 2] = EmitOp(EmitOp(EmitOp(GetExpressionConstant(-0.5f), cosReg, ExpressionOperationType.Multiply), EmitOp(GetExpressionConstant(0.5f), sinReg, ExpressionOperationType.Multiply), ExpressionOperationType.Add), GetExpressionConstant(0.5f), ExpressionOperationType.Add); matrix[1, 0] = sinReg; matrix[1, 1] = cosReg; matrix[1, 2] = EmitOp(EmitOp(EmitOp(GetExpressionConstant(-0.5f), sinReg, ExpressionOperationType.Multiply), EmitOp(GetExpressionConstant(-0.5f), cosReg, ExpressionOperationType.Multiply), ExpressionOperationType.Add), GetExpressionConstant(0.5f), ExpressionOperationType.Add); MultiplyTextureMatrix(ref materialStage.Texture, matrix); } // color mask options else if(tokenLower == "maskred") { materialStage.DrawStateBits |= MaterialStates.RedMask; } else if(tokenLower == "maskgreen") { materialStage.DrawStateBits |= MaterialStates.GreenMask; } else if(tokenLower == "maskblue") { materialStage.DrawStateBits |= MaterialStates.BlueMask; } else if(tokenLower == "maskalpha") { materialStage.DrawStateBits |= MaterialStates.AlphaMask; } else if(tokenLower == "maskcolor") { materialStage.DrawStateBits |= MaterialStates.ColorMask; } else if(tokenLower == "maskdepth") { materialStage.DrawStateBits |= MaterialStates.DepthMask; } else if(tokenLower == "alphatest") { materialStage.HasAlphaTest = true; materialStage.AlphaTestRegister = ParseExpression(lexer); _coverage = MaterialCoverage.Perforated; } // shorthand for 2D modulated else if(tokenLower == "colored") { materialStage.Color.Registers[0] = (int) ExpressionRegister.Parm0; materialStage.Color.Registers[1] = (int) ExpressionRegister.Parm1; materialStage.Color.Registers[2] = (int) ExpressionRegister.Parm2; materialStage.Color.Registers[3] = (int) ExpressionRegister.Parm3; _parsingData.RegistersAreConstant = false; } else if(tokenLower == "color") { materialStage.Color.Registers[0] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[1] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[2] = ParseExpression(lexer); MatchToken(lexer, ","); materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "red") { materialStage.Color.Registers[0] = ParseExpression(lexer); } else if(tokenLower == "green") { materialStage.Color.Registers[1] = ParseExpression(lexer); } else if(tokenLower == "blue") { materialStage.Color.Registers[2] = ParseExpression(lexer); } else if(tokenLower == "alpha") { materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "rgb") { materialStage.Color.Registers[0] = materialStage.Color.Registers[1] = materialStage.Color.Registers[2] = ParseExpression(lexer); } else if(tokenLower == "rgba") { materialStage.Color.Registers[0] = materialStage.Color.Registers[1] = materialStage.Color.Registers[2] = materialStage.Color.Registers[3] = ParseExpression(lexer); } else if(tokenLower == "if") { materialStage.ConditionRegister = ParseExpression(lexer); } else if(tokenLower == "program") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material program keyword"); // TODO /*newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, token.c_str() ); newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, token.c_str() );*/ } } else if(tokenLower == "fragmentprogram") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material fragmentProgram keyword"); // TODO //newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, token.c_str() ); } } else if(tokenLower == "vertexprogram") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material vertexProgram keyword"); // TODO //newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, token.c_str() ); } } else if(tokenLower == "megatexture") { if((token = lexer.ReadTokenOnLine()) != null) { idConsole.Warning("TODO: material megaTexture keyword"); // TODO /*newStage.megaTexture = new idMegaTexture; if ( !newStage.megaTexture->InitFromMegaFile( token.c_str() ) ) { delete newStage.megaTexture; SetMaterialFlag( MF_DEFAULTED ); continue; } newStage.vertexProgram = R_FindARBProgram( GL_VERTEX_PROGRAM_ARB, "megaTexture.vfp" ); newStage.fragmentProgram = R_FindARBProgram( GL_FRAGMENT_PROGRAM_ARB, "megaTexture.vfp" );*/ } } else if(tokenLower == "vertexparm") { ParseVertexParameter(lexer, ref newStage); } else if(tokenLower == "fragmentmap") { ParseFragmentMap(lexer, ref newStage); } else { idConsole.Warning("unknown token '{0}' in material '{1}'", tokenValue, this.Name); this.MaterialFlag = MaterialFlags.Defaulted; return; } } // if we are using newStage, allocate a copy of it if((newStage.FragmentProgram != 0) || (newStage.VertexProgram != 0)) { materialStage.NewStage = newStage; } // select a compressed depth based on what the stage is if(textureDepth == TextureDepth.Default) { switch(materialStage.Lighting) { case StageLighting.Bump: textureDepth = TextureDepth.Bump; break; case StageLighting.Diffuse: textureDepth = TextureDepth.Diffuse; break; case StageLighting.Specular: textureDepth = TextureDepth.Specular; break; } } // now load the image with all the parms we parsed if((imageName != null) && (imageName != string.Empty)) { materialStage.Texture.Image = idE.ImageManager.ImageFromFile(imageName, textureFilter, allowPicmip, textureRepeat, textureDepth, cubeMap); if(materialStage.Texture.Image == null) { materialStage.Texture.Image = idE.ImageManager.DefaultImage; } } else if(/*TODO: !ts->cinematic &&*/ (materialStage.Texture.Dynamic == 0) && (materialStage.NewStage.IsEmpty == true)) { idConsole.Warning("material '{0}' had stage with no image", this.Name); materialStage.Texture.Image = idE.ImageManager.DefaultImage; } // successfully parsed a stage. _parsingData.Stages.Add(materialStage); }
public static idMapBrush Parse(idLexer lexer, Vector3 origin, bool newFormat = true, float version = idMapFile.CurrentMapVersion) { idToken token; idMapBrushSide side; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idDict dict = new idDict(); Vector3[] planePoints = new Vector3[3]; if(lexer.ExpectTokenString("{") == false) { return null; } do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } if(token.ToString() == "}") { break; } // here we may have to jump over brush epairs ( only used in editor ) do { // if token is a brace if(token.ToString() == "(") { break; } // the token should be a key string for a key/value pair if(token.Type != TokenType.String) { lexer.Error("idMapBrush::Parse: unexpected {0}, expected ( or epair key string", token.ToString()); return null; } string key = token.ToString(); if(((token = lexer.ReadTokenOnLine()) == null) || (token.Type != TokenType.String)) { lexer.Error("idMapBrush::Parse: expected epair value string not found"); return null; } dict.Set(key, token.ToString()); // try to read the next key if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } } while(true); lexer.UnreadToken = token; side = new idMapBrushSide(); sides.Add(side); if(newFormat == true) { float[] tmp = lexer.Parse1DMatrix(4); if(tmp == null) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } else { side.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); } } else { // read the three point plane definition float[] tmp, tmp2, tmp3; if(((tmp = lexer.Parse1DMatrix(3)) == null) || ((tmp2 = lexer.Parse1DMatrix(3)) == null) || ((tmp3 = lexer.Parse1DMatrix(3)) == null)) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); } // read the texture matrix // this is odd, because the texmat is 2D relative to default planar texture axis float[,] tmp5 = lexer.Parse2DMatrix(2, 3); if(tmp5 == null) { lexer.Error("idMapBrush::Parse: unable to read brush side texture matrix"); return null; } side.TextureMatrix[0] = new Vector3(tmp5[0, 0], tmp5[0, 1], tmp5[0, 2]); side.TextureMatrix[1] = new Vector3(tmp5[1, 0], tmp5[1, 1], tmp5[1, 2]); side.Origin = origin; // read the material if((token = lexer.ReadTokenOnLine()) == null) { lexer.Error("idMapBrush::Parse: unable to read brush side material"); return null; } // we had an implicit 'textures/' in the old format... if(version < 2.0f) { side.Material = "textures/" + token.ToString(); } else { side.Material = token.ToString(); } // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); if(lexer.ExpectTokenString("}") == false) { return null; } idMapBrush brush = new idMapBrush(); foreach(idMapBrushSide s in sides) { brush.AddSide(s); } brush.Dict = dict; return brush; }
private void ParseBlend(idLexer lexer, ref MaterialStage stage) { idToken token; if((token = lexer.ReadToken()) == null) { return; } string tokenValue = token.ToString(); string tokenLower = tokenValue.ToLower(); // blending combinations if(tokenLower == "blend") { stage.DrawStateBits = MaterialStates.SourceBlendSourceAlpha | MaterialStates.DestinationBlendOneMinusSourceAlpha; } else if(tokenLower == "add") { stage.DrawStateBits = MaterialStates.SourceBlendOne | MaterialStates.DestinationBlendOne; } else if((tokenLower == "filter") || (tokenLower == "modulate")) { stage.DrawStateBits = MaterialStates.SourceBlendDestinationColor | MaterialStates.DestinationBlendZero; } else if(tokenLower == "none") { // none is used when defining an alpha mask that doesn't draw stage.DrawStateBits = MaterialStates.SourceBlendZero | MaterialStates.DestinationBlendOne; } else if(tokenLower == "bumpmap") { stage.Lighting = StageLighting.Bump; } else if(tokenLower == "diffusemap") { stage.Lighting = StageLighting.Diffuse; } else if(tokenLower == "specularmap") { stage.Lighting = StageLighting.Specular; } else { MaterialStates sourceBlendMode = GetSourceBlendMode(tokenLower); MatchToken(lexer, ","); if((token = lexer.ReadToken()) == null) { return; } tokenLower = token.ToString().ToLower(); MaterialStates destinationBlendMode = GetDestinationBlendMode(tokenLower); stage.DrawStateBits = sourceBlendMode | destinationBlendMode; } }
public bool LoadAnimation(string fileName) { idToken token; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.NoStringConcatination); if(lexer.LoadFile(fileName) == false) { return false; } Clear(); _name = fileName; lexer.ExpectTokenString(idRenderModel_MD5.VersionString); int version = lexer.ParseInt(); if(version != idRenderModel_MD5.Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, idRenderModel_MD5.Version); } // skip the commandline lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num frames lexer.ExpectTokenString("numFrames"); int frameCount = lexer.ParseInt(); if(frameCount <= 0) { lexer.Error("Invalid number of frames: {0}", frameCount); } // parse num joints lexer.ExpectTokenString("numJoints"); int jointCount = lexer.ParseInt(); if(jointCount <= 0) { lexer.Error("Invalid number of joints: {0}", jointCount); } // parse frame rate lexer.ExpectTokenString("frameRate"); _frameRate = lexer.ParseInt(); if(_frameRate < 0) { lexer.Error("Invalid frame rate: {0}", _frameRate); } // parse number of animated components lexer.ExpectTokenString("numAnimatedComponents"); _animatedComponentCount = lexer.ParseInt(); if((_animatedComponentCount < 0) || (_animatedComponentCount > (jointCount * 6))) { lexer.Error("Invalid number of animated components: {0}", _animatedComponentCount); } // parse the hierarchy _jointInfo = new JointAnimationInfo[jointCount]; lexer.ExpectTokenString("hierarchy"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { token = lexer.ReadToken(); _jointInfo[i] = new JointAnimationInfo(); _jointInfo[i].NameIndex = idR.AnimManager.GetJointIndex(token.ToString()); // parse parent num _jointInfo[i].ParentIndex = lexer.ParseInt(); if(_jointInfo[i].ParentIndex >= i) { lexer.Error("Invalid parent num: {0}", _jointInfo[i].ParentIndex); } if((i != 0) && (_jointInfo[i].ParentIndex < 0)) { lexer.Error("Animations may have only one root joint"); } // parse anim bits _jointInfo[i].AnimationBits = (AnimationBits) lexer.ParseInt(); if(((int) _jointInfo[i].AnimationBits & ~63) != 0) { lexer.Error("Invalid anim bits: {0}", _jointInfo[i].AnimationBits); } // parse first component _jointInfo[i].FirstComponent = lexer.ParseInt(); if((_animatedComponentCount > 0) && ((_jointInfo[i].FirstComponent < 0) || (_jointInfo[i].FirstComponent >= _animatedComponentCount))) { lexer.Error("Invalid first component: {0}", _jointInfo[i].FirstComponent); } } lexer.ExpectTokenString("}"); // parse bounds lexer.ExpectTokenString("bounds"); lexer.ExpectTokenString("{"); _bounds = new idBounds[frameCount]; for(int i = 0; i < frameCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); _bounds[i] = new idBounds( new Vector3(tmp[0], tmp[1], tmp[2]), new Vector3(tmp2[0], tmp2[1], tmp2[2]) ); } lexer.ExpectTokenString("}"); // parse base frame _baseFrame = new idJointQuaternion[jointCount]; lexer.ExpectTokenString("baseframe"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); idCompressedQuaternion q = new idCompressedQuaternion(tmp2[0], tmp2[1], tmp2[2]); _baseFrame[i] = new idJointQuaternion(); _baseFrame[i].Translation = new Vector3(tmp[0], tmp[1], tmp[2]); _baseFrame[i].Quaternion = q.ToQuaternion(); } lexer.ExpectTokenString("}"); // parse frames _componentFrames = new float[_animatedComponentCount * frameCount]; int frameOffset = 0; for(int i = 0; i < frameCount; i++) { lexer.ExpectTokenString("frame"); int count = lexer.ParseInt(); if(count != i) { lexer.Error("Expected frame number {0}", i); } lexer.ExpectTokenString("{"); for(int j = 0; j < _animatedComponentCount; j++, frameOffset++) { _componentFrames[frameOffset] = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); } // get total move delta if(_animatedComponentCount == 0) { _totalDelta = Vector3.Zero; } else { int componentOffset = _jointInfo[0].FirstComponent; if((_jointInfo[0].AnimationBits & AnimationBits.TranslationX) == AnimationBits.TranslationX) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.X; } _totalDelta.X = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.X = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationY) == AnimationBits.TranslationY) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Y; } _totalDelta.Y = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.Y = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationZ) == AnimationBits.TranslationZ) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Z; } _totalDelta.Z = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; } else { _totalDelta.Z = 0; } } _baseFrame[0].Translation = Vector3.Zero; // we don't count last frame because it would cause a 1 frame pause at the end _animLength = ((frameCount - 1) * 1000 + _frameRate - 1) / _frameRate; // done return true; }
private bool ParseAnimation(idLexer lexer, int defaultAnimCount) { List<idMD5Anim> md5anims = new List<idMD5Anim>(); idMD5Anim md5anim; idAnim anim; AnimationFlags flags = new AnimationFlags(); idToken token; idToken realName = lexer.ReadToken(); if(realName == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } string alias = realName.ToString(); int i; int count = _anims.Count; for(i = 0; i < count; i++) { if(_anims[i].FullName.Equals(alias, StringComparison.OrdinalIgnoreCase) == true) { break; } } if((i < count) && (i >= defaultAnimCount)) { lexer.Warning("Duplicate anim '{0}'", realName); MakeDefault(); return false; } if(i < defaultAnimCount) { anim = _anims[i]; } else { // create the alias associated with this animation anim = new idAnim(); _anims.Add(anim); } // random anims end with a number. find the numeric suffix of the animation. int len = alias.Length; for(i = len - 1; i > 0; i--) { if(Char.IsNumber(alias[i]) == false) { break; } } // check for zero length name, or a purely numeric name if(i <= 0) { lexer.Warning("Invalid animation name '{0}'", alias); MakeDefault(); return false; } // remove the numeric suffix alias = alias.Substring(0, i + 1); // parse the anims from the string do { if((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } // lookup the animation md5anim = idR.AnimManager.GetAnimation(token.ToString()); if(md5anim == null) { lexer.Warning("Couldn't load anim '{0}'", token); return false; } md5anim.CheckModelHierarchy(_model); if(md5anims.Count > 0) { // make sure it's the same length as the other anims if(md5anim.Length != md5anims[0].Length) { lexer.Warning("Anim '{0}' does not match length of anim '{1}'", md5anim.Name, md5anims[0].Name); MakeDefault(); return false; } } // add it to our list md5anims.Add(md5anim); } while(lexer.CheckTokenString(",") == true); if(md5anims.Count == 0) { lexer.Warning("No animation specified"); MakeDefault(); return false; } anim.SetAnimation(this, realName.ToString(), alias, md5anims.ToArray()); // parse any frame commands or animflags if(lexer.CheckTokenString("{") == true) { while(true) { if((token = lexer.ReadToken()) == null) { lexer.Warning("Unexpected end of file"); MakeDefault(); return false; } string tokenValue = token.ToString(); if(tokenValue == "}") { break; } else if(tokenValue == "prevent_idle_override") { flags.PreventIdleOverride = true; } else if(tokenValue == "random_cycle_start") { flags.RandomCycleStart = true; } else if(tokenValue == "ai_no_turn") { flags.AINoTurn = true; } else if(tokenValue == "anim_turn") { flags.AnimationTurn = true; } else if(tokenValue == "frame") { // create a frame command int frameIndex; string err; // make sure we don't have any line breaks while reading the frame command so the error line # will be correct if((token = lexer.ReadTokenOnLine()) == null) { lexer.Warning("Missing frame # after 'frame'"); MakeDefault(); return false; } else if((token.Type == TokenType.Punctuation) && (token.ToString() == "-")) { lexer.Warning("Invalid frame # after 'frame'"); MakeDefault(); return false; } else if((token.Type != TokenType.Number) || (token.SubType == TokenSubType.Float)) { lexer.Error("expected integer value, found '{0}'", token); } // get the frame number frameIndex = token.ToInt32(); // put the command on the specified frame of the animation if((err = anim.AddFrameCommand(this, frameIndex, lexer, null)) != null) { lexer.Warning(err.ToString()); MakeDefault(); return false; } } else { lexer.Warning("Unknown command '{0}'", token); MakeDefault(); return false; } } } // set the flags anim.Flags = flags; return true; }