public AntlrGrammarDetails(Workspaces.Document item) : base(item) { // Passes executed in order for all files. Passes.Add(() => { // Gather Imports from grammars. // Gather _dependent_grammars map. int before_count = 0; foreach (KeyValuePair <string, List <string> > x in AntlrGrammarDetails._dependent_grammars) { before_count++; before_count = before_count + x.Value.Count; } ParseTreeWalker.Default.Walk(new Pass3Listener(this), ParseTree); int after_count = 0; foreach (KeyValuePair <string, List <string> > dep in AntlrGrammarDetails._dependent_grammars) { string name = dep.Key; Workspaces.Document x = Workspaces.Workspace.Instance.FindDocument(name); if (x == null) { // Add document. Workspaces.Container proj = Item.Parent; Workspaces.Document new_doc = new Workspaces.Document(name); proj.AddChild(new_doc); after_count++; } after_count++; after_count = after_count + dep.Value.Count; } return(before_count != after_count); }); Passes.Add(() => { // For all imported grammars across the entire universe, // make sure all are loaded in the workspace, // then restart. foreach (KeyValuePair <string, List <string> > dep in AntlrGrammarDetails._dependent_grammars) { string name = dep.Key; Workspaces.Document x = Workspaces.Workspace.Instance.FindDocument(name); if (x == null) { // Add document. Workspaces.Container proj = Item.Parent; Workspaces.Document new_doc = new Workspaces.Document(name); proj.AddChild(new_doc); return(true); } foreach (string y in dep.Value) { Workspaces.Document z = Workspaces.Workspace.Instance.FindDocument(y); if (z == null) { // Add document. Workspaces.Container proj = Item.Parent; Workspaces.Document new_doc = new Workspaces.Document(y); proj.AddChild(new_doc); return(true); } } } // The workspace is completely loaded. Create scopes for all files in workspace // if they don't already exist. foreach (KeyValuePair <string, List <string> > dep in _dependent_grammars) { string name = dep.Key; _scopes.TryGetValue(name, out IScope file_scope); if (file_scope != null) { continue; } _scopes[name] = new FileScope(name, null); } // Set up search path scopes for Imports relationship. IScope root = _scopes[FullFileName]; foreach (string dep in Imports) { // Don't add if already have this search path. IScope dep_scope = _scopes[dep]; bool found = false; foreach (IScope scope in root.NestedScopes) { if (scope is SearchPathScope) { SearchPathScope spc = scope as SearchPathScope; if (spc.NestedScopes.First() == dep_scope) { found = true; break; } } } if (!found) { SearchPathScope import = new SearchPathScope(root); import.nest(dep_scope); root.nest(import); } } root.empty(); RootScope = root; return(false); }); Passes.Add(() => { ParseTreeWalker.Default.Walk(new Pass1Listener(this), ParseTree); return(false); }); Passes.Add(() => { ParseTreeWalker.Default.Walk(new Pass2Listener(this), ParseTree); return(false); }); }
/// <summary> /// Parses an instance from a stream to a CGP file /// </summary> public RetroShaderPreset(Stream stream) { var content = new StreamReader(stream).ReadToEnd(); var dict = new Dictionary <string, string>(); //parse the key-value-pair format of the file content = content.Replace("\r", ""); foreach (var splitLine in content.Split('\n')) { var line = splitLine.Trim(); if (line.StartsWith("#")) { continue; //lines that are solely comments } if (line == "") { continue; //empty line } int eq = line.IndexOf('='); var key = line.Substring(0, eq).Trim(); var value = line.Substring(eq + 1).Trim(); int quote = value.IndexOf('\"'); if (quote != -1) { value = value.Substring(quote + 1, value.IndexOf('\"', quote + 1) - (quote + 1)); } else { //remove comments from end of value. exclusive from above condition, since comments after quoted strings would be snipped by the quoted string extraction int hash = value.IndexOf('#'); if (hash != -1) { value = value.Substring(0, hash); } value = value.Trim(); } dict[key.ToLower()] = value; } // process the keys int nShaders = FetchInt(dict, "shaders", 0); for (int i = 0; i < nShaders; i++) { var sp = new ShaderPass { Index = i }; Passes.Add(sp); sp.InputFilterLinear = FetchBool(dict, $"filter_linear{i}", false); //Should this value not be defined, the filtering option is implementation defined. sp.OutputFloat = FetchBool(dict, $"float_framebuffer{i}", false); sp.FrameCountMod = FetchInt(dict, $"frame_count_mod{i}", 1); sp.ShaderPath = FetchString(dict, $"shader{i}", "?"); //todo - change extension to .cg for better compatibility? just change .cg to .glsl transparently at last second? // If no scale type is assumed, it is assumed that it is set to "source" with scaleN set to 1.0. // It is possible to set scale_type_xN and scale_type_yN to specialize the scaling type in either direction. scale_typeN however overrides both of these. sp.ScaleTypeX = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, $"scale_type_x{i}", "Source"), true); sp.ScaleTypeY = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, $"scale_type_y{i}", "Source"), true); ScaleType st = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, $"scale_type{i}", "NotSet"), true); if (st != ScaleType.NotSet) { sp.ScaleTypeX = sp.ScaleTypeY = st; } // scaleN controls both scaling type in horizontal and vertical directions. If scaleN is defined, scale_xN and scale_yN have no effect. sp.Scale.X = FetchFloat(dict, $"scale_x{i}", 1); sp.Scale.Y = FetchFloat(dict, $"scale_y{i}", 1); float scale = FetchFloat(dict, $"scale{i}", -999); if (scale != -999) { sp.Scale.X = sp.Scale.Y = FetchFloat(dict, $"scale{i}", 1); } //TODO - LUTs } }
public lbnfParsingResults(Document item) : base(item) { Passes.Add(() => { int before_count = 0; if (!ParsingResults.InverseImports.ContainsKey(this.FullFileName)) { ParsingResults.InverseImports.Add(this.FullFileName, new HashSet <string>()); } foreach (KeyValuePair <string, HashSet <string> > x in ParsingResults.InverseImports) { before_count++; before_count = before_count + x.Value.Count; } if (ParseTree == null) { return(false); } int after_count = 0; foreach (KeyValuePair <string, HashSet <string> > dep in ParsingResults.InverseImports) { string name = dep.Key; Workspaces.Document x = item.Workspace.FindDocument(name); if (x == null) { // Add document. Workspaces.Container proj = Item.Parent; Workspaces.Document new_doc = new Workspaces.Document(name); proj.AddChild(new_doc); after_count++; } after_count++; after_count = after_count + dep.Value.Count; } return(before_count != after_count); }); Passes.Add(() => { // The workspace is completely loaded. Create scopes for all files in workspace // if they don't already exist. foreach (KeyValuePair <string, HashSet <string> > dep in InverseImports) { string name = dep.Key; _scopes.TryGetValue(name, out IScope file_scope); if (file_scope != null) { continue; } _scopes[name] = new FileScope(name, null); } // Set up search path scopes for Imports relationship. IScope root = _scopes[FullFileName]; foreach (string dep in Imports) { // Don't add if already have this search path. IScope dep_scope = _scopes[dep]; bool found = false; foreach (IScope scope in root.NestedScopes) { if (scope is SearchPathScope) { SearchPathScope spc = scope as SearchPathScope; if (spc.NestedScopes.First() == dep_scope) { found = true; break; } } } if (!found) { SearchPathScope import = new SearchPathScope(root); import.nest(dep_scope); root.nest(import); } } root.empty(); RootScope = root; return(false); }); Passes.Add(() => { if (ParseTree == null) { return(false); } ParseTreeWalker.Default.Walk(new Pass2Listener(this), ParseTree); return(false); }); Passes.Add(() => { if (ParseTree == null) { return(false); } ParseTreeWalker.Default.Walk(new Pass3Listener(this), ParseTree); return(false); }); }
public static void Init() { Passes.Add(new FreeProxyPass()); }
private void LoadSkins() { string skinFile = FileDirectory + ModelName + "00.skin"; Stormlib.MPQFile skin = new Stormlib.MPQFile(skinFile); skin.Dispose(); SKINView mView = skin.Read <SKINView>(); ushort[] indexLookup = new ushort[mView.nIndices]; skin.Position = mView.ofsIndices; skin.Read(indexLookup); ushort[] triangles = new ushort[mView.nTriangles]; skin.Position = mView.ofsTriangles; skin.Read(triangles); SKINSubMesh[] SubMeshes = new SKINSubMesh[mView.nSubMeshes]; skin.Position = mView.ofsSubMeshes; skin.Read(SubMeshes); SKINTexUnit[] TexUnits = new SKINTexUnit[mView.nTexUnits]; skin.Position = mView.ofsTexUnits; skin.Read(TexUnits); ushort[] texLookUp = new ushort[Header.nTexLookups]; mFile.Position = Header.ofsTexLookups; mFile.Read(texLookUp); ushort[] texUnitLookUp = new ushort[Header.nTexUnits]; mFile.Position = Header.ofsTexUnits; mFile.Read(texUnitLookUp); M2RenderFlags[] renderFlags = new M2RenderFlags[Header.nRenderFlags]; mFile.Position = Header.ofsRenderFlags; mFile.Read(renderFlags); ushort[] indices = new ushort[mView.nTriangles]; for (int i = 0; i < mView.nTriangles; ++i) { indices[i] = indexLookup[triangles[i]]; } var bones = new M2Bone[Header.nBones]; mFile.Position = Header.ofsBones; mFile.Read(bones); M2BoneAnimator mba = new M2BoneAnimator(mFile, this); BoneLookupTable = new ushort[Header.nBoneLookupTables]; mFile.Position = Header.ofsBoneLookupTables; mFile.Read(BoneLookupTable); GlobalSequences = new uint[Header.nGlobalSequences]; mFile.Position = Header.ofsGlobalSequences; mFile.Read(GlobalSequences); for (int i = 0; i < mView.nTexUnits; ++i) { M2RenderPass pass = new M2RenderPass(); SKINSubMesh mesh = SubMeshes[TexUnits[i].SubMesh1]; pass.Vertices = new MdxVertex[mesh.nTriangles]; pass.Texture = Textures[(int)texLookUp[TexUnits[i].Texture]]; pass.BlendMode = renderFlags[TexUnits[i].RenderFlags]; pass.BoneMatrices = new SlimDX.Matrix[mesh.nBones]; pass.BoneBaseIndex = mesh.startBone; pass.ParentModel = this; pass.Index = TexUnits[i].TexUnitNumber; for (uint q = 0; q < mesh.nBones; ++q) { pass.BoneMatrices[q] = mba.GetBone((short)(BoneLookupTable[mesh.startBone + q])).Matrix; } for (ushort t = mesh.startTriangle, k = 0; k < mesh.nTriangles; ++t, ++k) { ushort index = indices[t]; pass.Vertices[k] = Vertices[index]; pass.Vertices[k].bi1 = (byte)(Vertices[i].bi1); pass.Vertices[k].bi2 = (byte)(Vertices[i].bi2); pass.Vertices[k].bi3 = (byte)(Vertices[i].bi3); pass.Vertices[k].bi4 = (byte)(Vertices[i].bi4); } pass.SetVertexIndices(); Passes.Add(pass); } Passes.Sort((g1, g2) => { if (g1.BlendMode.blend == 2 && g2.BlendMode.blend != 2) { return(1); } if (g2.BlendMode.blend == 2 && g1.BlendMode.blend != 2) { return(-1); } if (g1.BlendMode.blend < g2.BlendMode.blend) { return(-1); } if (g2.BlendMode.blend < g1.BlendMode.blend) { return(1); } if (g1.Index == g2.Index) { return(0); } if (g1.Index < g2.Index) { return(-1); } if (g1.Index > g2.Index) { return(1); } return(0); } ); BoneAnimator = mba; }
private void LoadSkins(BinaryReader reader) { mSkin = new M2SkinFile(ModelRoot, mModelName, 0); if (mSkin.Load() == false) { throw new InvalidOperationException("Unable to load skin file"); } Indices = mSkin.Indices; var texLookup = ReadArrayOf <ushort>(reader, mHeader.OfsTexLookup, mHeader.NTexLookup); var renderFlags = ReadArrayOf <uint>(reader, mHeader.OfsRenderFlags, mHeader.NRenderFlags); var uvAnimLookup = ReadArrayOf <short>(reader, mHeader.OfsUvAnimLookup, mHeader.NUvAnimLookup); mSubMeshes = mSkin.SubMeshes.Select(sm => new M2SubMeshInfo { BoundingSphere = new BoundingSphere(new Vector3(sm.centerBoundingBox.X, -sm.centerBoundingBox.Y, sm.centerBoundingBox.Z), sm.radius), NumIndices = sm.nTriangles, StartIndex = sm.startTriangle + (((sm.unk1 & 1) != 0) ? (ushort.MaxValue + 1) : 0) }).ToArray(); foreach (var texUnit in mSkin.TexUnits) { var mesh = mSkin.SubMeshes[texUnit.submeshIndex]; int uvIndex; if (texUnit.textureAnimIndex >= uvAnimLookup.Length || uvAnimLookup[texUnit.textureAnimIndex] < 0) { uvIndex = -1; } else { uvIndex = uvAnimLookup[texUnit.textureAnimIndex]; } var startTriangle = (int)mesh.startTriangle; if ((mesh.unk1 & 1) != 0) { startTriangle += ushort.MaxValue + 1; } var textures = new List <Graphics.Texture>(); var texIndices = new List <int>(); switch (texUnit.op_count) { case 2: textures.Add(mTextures[texLookup[texUnit.texture]]); textures.Add(mTextures[texLookup[texUnit.texture + 1]]); texIndices.Add(texLookup[texUnit.texture]); texIndices.Add(texLookup[texUnit.texture + 1]); break; case 3: textures.Add(mTextures[texLookup[texUnit.texture]]); textures.Add(mTextures[texLookup[texUnit.texture + 1]]); textures.Add(mTextures[texLookup[texUnit.texture + 2]]); texIndices.Add(texLookup[texUnit.texture]); texIndices.Add(texLookup[texUnit.texture + 1]); texIndices.Add(texLookup[texUnit.texture + 2]); break; case 4: textures.Add(mTextures[texLookup[texUnit.texture]]); textures.Add(mTextures[texLookup[texUnit.texture + 1]]); textures.Add(mTextures[texLookup[texUnit.texture + 2]]); textures.Add(mTextures[texLookup[texUnit.texture + 3]]); texIndices.Add(texLookup[texUnit.texture]); texIndices.Add(texLookup[texUnit.texture + 1]); texIndices.Add(texLookup[texUnit.texture + 2]); texIndices.Add(texLookup[texUnit.texture + 3]); break; default: textures.Add(mTextures[texLookup[texUnit.texture]]); texIndices.Add(texLookup[texUnit.texture]); break; } var flags = renderFlags[texUnit.renderFlags]; var blendMode = flags >> 16; var flag = flags & 0xFFFF; if (mRemapBlend && texUnit.shaderId < mBlendMap.Length) { blendMode = mBlendMap[texUnit.shaderId]; } blendMode %= 7; if (blendMode != 0 && blendMode != 1) { HasBlendPass = true; } else { HasOpaquePass = true; } Passes.Add(new M2RenderPass { TextureIndices = texIndices, Textures = textures, AlphaAnimIndex = texUnit.transparencyIndex, ColorAnimIndex = texUnit.colorIndex, IndexCount = mesh.nTriangles, RenderFlag = flag, BlendMode = blendMode, StartIndex = startTriangle, TexAnimIndex = uvIndex, TexUnitNumber = texUnit.texUnitNumber, OpCount = texUnit.op_count, VertexShaderType = M2ShadersClass.GetVertexShaderTypeOld(texUnit.shaderId, texUnit.op_count), PixelShaderType = M2ShadersClass.GetPixelShaderTypeOld(texUnit.shaderId, texUnit.op_count), }); } SortPasses(); }