/// <summary> /// For Testing purposes while I code the Script Engine /// </summary> private async void aboutToolStripMenuItem_Click(object sender, EventArgs e) { string formated; try { //var type = new ReferenceType("test", typeof(TestObj)); //type.Mappings.Add("create", TestObj.Create); //ReferenceManager.AddType(type); Scope scope = new Scope(); //scope.Execute("test.create objname"); string path = @"D:\Programming\C#\Projects\Bf2Editor\Bf2Editor\bin\Debug\Temp\Server Objects\bf2\Kits\US"; ConFile file = await ScriptEngine.LoadFileAsync(Path.Combine(path, "us_kits.con"), scope); formated = file.ToFileFormat(); Clipboard.SetText(formated); ConFile file3 = await ScriptEngine.LoadFileAsync(Path.Combine(path, "us_common.con"), scope); formated = file3.ToFileFormat(); ConFile file2 = await ScriptEngine.LoadFileAsync(Path.Combine(path, "US_Specops.con"), scope); formated = file2.ToFileFormat(); Clipboard.SetText(formated); } catch { } var c = Logger.Errors; }
private void AddDirectory(string DirectoryName) { // Recursively add a directory of simulations to the run queue. foreach (string FileName in Directory.GetFiles(DirectoryName)) { string Extension = Path.GetExtension(FileName).ToLower(); if (Extension == ".con") { foreach (string SimulationName in ConFile.GetSimsInConFile(FileName)) { _JobRunner.Add(new RunConJob(FileName, SimulationName, _JobRunner)); } } else if (Extension == ".apsim") { _JobRunner.Add(new RunEntireApsimFileJob(FileName, _JobRunner)); } } foreach (string ChildDirectoryName in Directory.GetDirectories(DirectoryName)) { if (ChildDirectoryName != ".svn") { AddDirectory(ChildDirectoryName); } } }
/// <summary> /// Create a job for each simulation in the specified .con file. /// </summary> private void CreateJobsFromCON(string FileName, string[] SimulationPaths, ref List <IJob> jobs) { // Run ConToSim first. string ConToSimExe = Path.Combine(Configuration.ApsimBinDirectory(), "ConToSim.exe"); Process ConToSim = Utility.RunProcess(ConToSimExe, StringManip.DQuote(FileName), Path.GetDirectoryName(FileName)); Utility.CheckProcessExitedProperly(ConToSim); // If no paths were specified then get a list of all paths. if (SimulationPaths == null || SimulationPaths.Length == 0) { List <String> AllPaths = new List <String> (); AllPaths = ConFile.GetSimsInConFile(FileName); SimulationPaths = AllPaths.ToArray(); } // Create a series of jobs for each simulation in the .con file. foreach (string SimulationPath in SimulationPaths) { string SimFileName = Path.Combine(Path.GetDirectoryName(FileName), Path.GetFileNameWithoutExtension(FileName) + "." + SimulationPath + ".sim"); Job J = CreateJob(SimFileName, SimFileName.Replace(".sim", ".sum")); jobs.Add(J); J = CleanupJob(SimFileName, J.Name); jobs.Add(J); } }
/// <summary> /// Creates a new, empty object of this type /// </summary> /// <returns></returns> public ConFileObject Clone(ConFile file = null) { // Create a new token, with no reference to our current one Type type = GetType(); Token newToken = CreateToken(Name, type, file ?? new ConFile(this.File?.FilePath)); // Return a new instance of this object, with the same name return((ConFileObject)Activator.CreateInstance(type, this.Name, newToken)); }
/// <summary> /// Breaks an array of indexed strings into recognizable tokens /// </summary> /// <param name="File">The ConFile object where these lines are located</param> /// <param name="Lines">The indexed input strings to break up into tokens</param> /// <returns>The set of tokens located within the string. /// <paramref name="Lines"/> is updated as a result of this call, and will contain entries that did not /// match any of the token expressions</returns> public static Token[] Tokenize(ConFile File, ref Dictionary <int, string> Lines) { List <Token> tokens = new List <Token>(); // Tokenize and add each token to the list of matched rules foreach (KeyValuePair <TokenType, string> token in TokenExpressions) { tokens.AddRange(Tokenize(token.Key, token.Value, File, ref Lines)); } return(tokens.OrderBy(x => x.Position).ToArray()); }
public void CreateConFile(string documentSetId, string fileName, string parentFileName, DateTime received) { try { ConFile conFile = new ConFile(documentSetId, fileName, string.Empty, received); _conFileRepository.Create(conFile); } catch (Exception e) { throw new UnityException("Unable to create con file", e); } }
/// <summary> /// Breaks an array of indexed strings into recognizable tokens /// </summary> /// <param name="File">The ConFile object where these lines are located</param> /// <param name="Lines">The indexed input strings to break up into tokens</param> /// <returns>The set of tokens located within the string. /// <paramref name="Lines"/> is updated as a result of this call, and will contain entries that did not /// match any of the token expressions</returns> public static Token[] Tokenize(ConFile File, ref Dictionary<int, string> Lines) { List<Token> tokens = new List<Token>(); // Tokenize and add each token to the list of matched rules foreach (KeyValuePair<TokenType, string> token in TokenExpressions) { tokens.AddRange(Tokenize(token.Key, token.Value, File, ref Lines)); } return tokens.OrderBy(x => x.Position).ToArray(); }
static void Main(string[] args) { con_code = "internal class ConScript\n"; con_code += "{\n"; ConFile conFile = new ConFile("game.con"); con_code += "\tpublic ConScript()\n"; con_code += "\t{\n"; con_code += con_construct_code; con_code += "\t}\n"; con_code += "}\n"; File.WriteAllText("conscript.cs", con_code); }
/// <summary> /// Creates a new Token /// </summary> /// <param name="kind">The kind of this token</param> /// <param name="match">The Match the token is to be generated from</param> /// <param name="file">The con file this token is generating from</param> /// <param name="index">The line number this match is found on</param> /// <returns>The newly created token</returns> internal static Token Create(TokenType kind, Match match, ConFile file, int index) { Token token = new Token() { File = file, Position = match.Index + index, Kind = kind, Match = match, Value = match.Value }; // We only create token args for object property types if (token.Kind == TokenType.ObjectProperty || token.Kind == TokenType.ActiveSwitch) SetTokenArgs(token); return token; }
/// <summary> /// Creates a new Token /// </summary> /// <param name="kind">The kind of this token</param> /// <param name="match">The Match the token is to be generated from</param> /// <param name="file">The con file this token is generating from</param> /// <param name="index">The line number this match is found on</param> /// <returns>The newly created token</returns> internal static Token Create(TokenType kind, Match match, ConFile file, int index) { Token token = new Token() { File = file, Position = match.Index + index, Kind = kind, Match = match, Value = match.Value }; // We only create token args for object property types if (token.Kind == TokenType.ObjectProperty || token.Kind == TokenType.ActiveSwitch) { SetTokenArgs(token); } return(token); }
/// <summary> /// Creates a new token instance, that can be used to create a new /// instance of this object type. /// </summary> /// <param name="name">The unique name of the object</param> /// <param name="type">The derived type of this object</param> /// <returns></returns> private static Token CreateToken(string name, Type type, ConFile file) { Token token; // NOTE: an exception will be thrown in method if this object // type isnt added to the ReferenceManager var refType = ReferenceManager.GetReferenceType(type); var method = refType.Mappings.FirstOrDefault(); // If we have a Mapping, we assume a non-static object if (method.Key != null && method.Value != null) { // Build a create string... this is pretty Generic // and may not cover custom types very well!!! string input = $"{refType.Name}.{method.Key} {type.Name} {name}"; // Create a new Token token = Tokenizer.Tokenize(input); token.File = file; } else { // Must be a static class, just create a very generic token! token = new Token() { Kind = TokenType.StaticObject, File = file, Value = name, TokenArgs = new TokenArgs() { Arguments = new string[0], ReferenceType = refType } }; } return(token); }
/// <summary> /// Performs tokenization of a collection of non-tokenized data parts with a specific pattern /// </summary> /// <param name="tokenKind">The name to give the located tokens</param> /// <param name="pattern">The pattern to use to match the tokens</param> /// <param name="untokenizedParts">The portions of the input that have yet to be tokenized (organized as position vs. text in source)</param> /// <returns> /// The set of tokens matching the given pattern located in the untokenized portions of the input, /// <paramref name="untokenizedParts"/> is updated as a result of this call /// </returns> private static IEnumerable<Token> Tokenize( TokenType tokenKind, string pattern, ConFile file, ref Dictionary<int, string> untokenizedParts) { // Do a bit of setup var unMatchedParts = new Dictionary<int, string>(); var resultTokens = new List<Token>(); var regex = new Regex(pattern, RegexOptions.Multiline | RegexOptions.IgnoreCase); // Look through all of our currently untokenized data foreach (KeyValuePair<int, string> part in untokenizedParts) { // Trim our line, and remove empty ones string line = part.Value.Trim(); //if (line.Length == 0) continue; // Check to see if we have a match Match match = regex.Match(line); // If we don't have any, keep the data as untokenized and move to the next chunk if (!match.Success) { unMatchedParts.Add(part.Key, line); continue; } // Store the untokenized data in a working copy and save the absolute index it reported itself at in the source file resultTokens.Add(Token.Create(tokenKind, match, file, part.Key)); } // Update the untokenized data to contain what we couldn't process with this pattern untokenizedParts = unMatchedParts; // Return the tokens we were able to extract return resultTokens; }
/// <summary> /// Performs tokenization of a collection of non-tokenized data parts with a specific pattern /// </summary> /// <param name="tokenKind">The name to give the located tokens</param> /// <param name="pattern">The pattern to use to match the tokens</param> /// <param name="untokenizedParts">The portions of the input that have yet to be tokenized (organized as position vs. text in source)</param> /// <returns> /// The set of tokens matching the given pattern located in the untokenized portions of the input, /// <paramref name="untokenizedParts"/> is updated as a result of this call /// </returns> private static IEnumerable <Token> Tokenize( TokenType tokenKind, string pattern, ConFile file, ref Dictionary <int, string> untokenizedParts) { // Do a bit of setup var unMatchedParts = new Dictionary <int, string>(); var resultTokens = new List <Token>(); var regex = new Regex(pattern, RegexOptions.Multiline | RegexOptions.IgnoreCase); // Look through all of our currently untokenized data foreach (KeyValuePair <int, string> part in untokenizedParts) { // Trim our line, and remove empty ones string line = part.Value.Trim(); //if (line.Length == 0) continue; // Check to see if we have a match Match match = regex.Match(line); // If we don't have any, keep the data as untokenized and move to the next chunk if (!match.Success) { unMatchedParts.Add(part.Key, line); continue; } // Store the untokenized data in a working copy and save the absolute index it reported itself at in the source file resultTokens.Add(Token.Create(tokenKind, match, file, part.Key)); } // Update the untokenized data to contain what we couldn't process with this pattern untokenizedParts = unMatchedParts; // Return the tokens we were able to extract return(resultTokens); }
/// <summary> /// Creates a new, empty object of this type /// </summary> /// <returns></returns> public ConFileObject Clone(ConFile file = null) { // Create a new token, with no reference to our current one Type type = GetType(); Token newToken = CreateToken(Name, type, file ?? new ConFile(this.File?.FilePath)); // Return a new instance of this object, with the same name return (ConFileObject)Activator.CreateInstance(type, this.Name, newToken); }
/// <summary> /// Creates a new token instance, that can be used to create a new /// instance of this object type. /// </summary> /// <param name="name">The unique name of the object</param> /// <param name="type">The derived type of this object</param> /// <returns></returns> private static Token CreateToken(string name, Type type, ConFile file) { Token token; // NOTE: an exception will be thrown in method if this object // type isnt added to the ReferenceManager var refType = ReferenceManager.GetReferenceType(type); var method = refType.Mappings.FirstOrDefault(); // If we have a Mapping, we assume a non-static object if (method.Key != null && method.Value != null) { // Build a create string... this is pretty Generic // and may not cover custom types very well!!! string input = $"{refType.Name}.{method.Key} {type.Name} {name}"; // Create a new Token token = Tokenizer.Tokenize(input); token.File = file; } else { // Must be a static class, just create a very generic token! token = new Token() { Kind = TokenType.StaticObject, File = file, Value = name, TokenArgs = new TokenArgs() { Arguments = new string[0], ReferenceType = refType } }; } return token; }
private void AddFile(string FileName, bool JustDoIt) { List <string> SimulationsToRun = null; if (Directory.Exists(FileName)) { _JobRunner.Add(new RunApsimDirectory(FileName, _JobRunner)); return; } else if (Path.GetExtension(FileName).ToLower() == ".con") { SimulationsToRun = ConFile.GetSimsInConFile(FileName); } else if (Path.GetExtension(FileName).ToLower() == ".apsim") { SimulationsToRun = ApsimFile.ApsimFile.GetSimsInApsimFile(FileName); } else if (Path.GetExtension(FileName).ToLower() == ".txt") { StreamReader In = new StreamReader(FileName); string Line = In.ReadLine(); while (Line != "" && Line != null) { AddFile(Line, JustDoIt); // recursion Line = In.ReadLine(); } In.Close(); } else { throw new Exception("Unknown simulation file type: " + FileName); } // Display a selection form if there are more than 1 simulations and this isn't an AutoRun if (SimulationsToRun != null && SimulationsToRun.Count > 1 && !JustDoIt) { SelectionForm Form = new SelectionForm(SimulationsToRun); if (Form.ShowDialog() == DialogResult.OK) { SimulationsToRun = Form.Selections; } else { return; } } if (SimulationsToRun != null) { foreach (string SimulationName in SimulationsToRun) { if (Path.GetExtension(FileName).ToLower() == ".con") { _JobRunner.Add(new RunConJob(FileName, SimulationName, _JobRunner)); } else if (Path.GetExtension(FileName).ToLower() == ".apsim") { _JobRunner.Add(new RunApsimFileJob(FileName, SimulationName, _JobRunner)); } } } else if (Path.GetExtension(FileName).ToLower() != ".txt") { _JobRunner.Add(new RunEntireApsimFileJob(FileName, _JobRunner)); } }
public ConFile(string fileName) { Console.Write("Compiling " + fileName + " \n"); Dictionary <string, Action> keywords = new Dictionary <string, Action> { ["define"] = () => { string _name = token; string _value = token; con_code += "public const int " + _name + " = " + _value + ";\n"; }, ["include"] = () => { ConFile file = new ConFile(token); }, ["definequote"] = () => { int id = int.Parse(token); string str = ""; while (_tokens[currentToken] != "NEWLINE") { str += _tokens[currentToken]; str += " "; currentToken++; } con_construct_code += "\tGlobalMembers.ConActions.definequote("; con_construct_code += id; con_construct_code += ","; con_construct_code += "\"" + str + "\""; con_construct_code += ");\n"; }, ["definevolumename"] = () => { int id = int.Parse(token); string str = ""; while (_tokens[currentToken] != "NEWLINE") { str += _tokens[currentToken]; str += " "; currentToken++; } con_construct_code += "\tGlobalMembers.ConActions.definevolumename("; con_construct_code += id; con_construct_code += ","; con_construct_code += "\"" + str + "\""; con_construct_code += ");\n"; }, ["defineskillname"] = () => { int id = int.Parse(token); string str = ""; while (_tokens[currentToken] != "NEWLINE") { str += _tokens[currentToken]; str += " "; currentToken++; } con_construct_code += "\tGlobalMembers.ConActions.defineskillname("; con_construct_code += id; con_construct_code += ","; con_construct_code += "\"" + str + "\""; con_construct_code += ");\n"; }, ["music"] = () => { int id = int.Parse(token); con_construct_code += "\tGlobalMembers.ConActions.definemusic("; con_construct_code += id; while (_tokens[currentToken] != "NEWLINE") { con_construct_code += ","; con_construct_code += "\"" + token + "\""; } con_construct_code += ");\n"; }, ["definelevelname"] = () => { int id = int.Parse(token); int id2 = int.Parse(token); string mapname = token; string time1 = token; string time2 = token; string str = ""; while (_tokens[currentToken] != "NEWLINE") { str += _tokens[currentToken]; str += " "; currentToken++; } con_construct_code += "\tGlobalMembers.ConActions.definelevelname("; con_construct_code += id; con_construct_code += ","; con_construct_code += id2; con_construct_code += ","; con_construct_code += "\"" + mapname + "\""; con_construct_code += ","; con_construct_code += "\"" + time1 + "\""; con_construct_code += ","; con_construct_code += "\"" + time2 + "\""; con_construct_code += ","; con_construct_code += "\"" + str + "\""; con_construct_code += ");\n"; }, ["action"] = () => { string name = token; if (inState == false) { con_code += "static GlobalMembers.ConActions.ConAction " + name + " = new GlobalMembers.ConActions.ConAction("; while (_tokens[currentToken] != "NEWLINE") { con_code += _tokens[currentToken]; if (_tokens[currentToken + 1] != "NEWLINE") { con_code += ","; } currentToken++; } con_code += ");\n"; } else { con_code += "\tGlobalMembers.ConActions.SetAction(" + name + ");\n"; } }, ["actor"] = () => { string name = token; if (inState == false) { inState = true; con_construct_code += "GlobalMembers.ConActions.RegisterActor(A_" + name + "," + name + ", enemy"; while (_tokens[currentToken] != "NEWLINE" && _tokens[currentToken] != "enda" && _tokens[currentToken] != "state") { con_construct_code += ","; con_construct_code += _tokens[currentToken]; currentToken++; } con_construct_code += ");\n"; con_code += "private void A_"; con_code += name; con_code += "()\n"; con_code += "{\n"; } else { con_code += name; con_code += "();\n"; } }, ["useractor"] = () => { string aitype = token; string name = token; inState = true; con_construct_code += "GlobalMembers.ConActions.RegisterActor(A_" + name + "," + name + ", " + aitype; while (_tokens[currentToken] != "NEWLINE" && _tokens[currentToken] != "enda" && _tokens[currentToken] != "state") { con_construct_code += ","; con_construct_code += _tokens[currentToken]; currentToken++; } con_construct_code += ");\n"; con_code += "private void A_"; con_code += name; con_code += "()\n"; con_code += "{\n"; }, ["seekplayer"] = () => { // Not sure what to do here? }, ["move"] = () => { if (inState) { con_code += "GlobalMembers.ConActions.Move("; while (_tokens[currentToken] != "NEWLINE") { con_code += _tokens[currentToken]; if (_tokens[currentToken + 1] != "NEWLINE") { con_code += ","; } currentToken++; } con_code += ");\n"; } else { string name = token; con_code += "static GlobalMembers.ConActions.MoveAction " + name + " = new GlobalMembers.ConActions.MoveAction("; while (_tokens[currentToken] != "NEWLINE") { con_code += _tokens[currentToken]; if (_tokens[currentToken + 1] != "NEWLINE") { con_code += ","; } currentToken++; } con_code += ");\n"; } }, ["ai"] = () => { if (inState) { con_code += "GlobalMembers.ConActions.Ai("; while (_tokens[currentToken] != "NEWLINE") { con_code += _tokens[currentToken]; if (_tokens[currentToken + 1] != "NEWLINE") { con_code += ","; } currentToken++; } con_code += ");\n"; } else { string name = token; con_code += "static GlobalMembers.ConActions.AIAction " + name + " = new GlobalMembers.ConActions.AIAction("; while (_tokens[currentToken] != "NEWLINE") { con_code += _tokens[currentToken]; if (_tokens[currentToken + 1] != "NEWLINE") { con_code += ","; } currentToken++; } con_code += ");\n"; } }, ["state"] = () => { string name = token; if (!inState) { inState = true; con_code += "private void "; con_code += name; con_code += "()"; con_code += "{\n"; } else { con_code += name; con_code += "();\n"; } }, ["ifpdistl"] = () => { string name = token; con_code += "if(GlobalMembers.ConActions.ifpdistl(" + name + "))\n"; }, ["ifpdistg"] = () => { string name = token; con_code += "if(GlobalMembers.ConActions.ifpdistg(" + name + "))\n"; }, ["ifcansee"] = () => { con_code += "if(GlobalMembers.ConActions.ifcansee())\n"; }, ["ifhitweapon"] = () => { con_code += "if(GlobalMembers.ConActions.ifhitweapon())\n"; }, ["{"] = () => { con_code += "{\n"; }, ["}"] = () => { con_code += "}\n"; }, ["else"] = () => { con_code += "else\n"; }, ["break"] = () => { con_code += "return;\n"; }, ["fall"] = () => { con_code += "GlobalMembers.ConActions.fall();\n"; }, ["pstomp"] = () => { con_code += "GlobalMembers.ConActions.pstomp();\n"; }, ["killit"] = () => { con_code += "GlobalMembers.ConActions.killit();\n"; }, ["wackplayer"] = () => { con_code += "GlobalMembers.ConActions.wackplayer();\n"; }, ["resetcount"] = () => { con_code += "GlobalMembers.ConActions.resetcount();\n"; }, ["resetplayer"] = () => { con_code += "GlobalMembers.ConActions.resetplayer();\n"; }, ["operate"] = () => { con_code += "GlobalMembers.ConActions.operate();\n"; }, ["mikesnd"] = () => { con_code += "GlobalMembers.ConActions.mikesnd();\n"; }, ["tossweapon"] = () => { con_code += "GlobalMembers.ConActions.tossweapon();\n"; }, ["respawnhitag"] = () => { con_code += "GlobalMembers.ConActions.respawnhitag();\n"; }, ["pkick"] = () => { con_code += "GlobalMembers.ConActions.pkick();\n"; }, ["tip"] = () => { con_code += "GlobalMembers.ConActions.tip();\n"; }, ["getlastpal"] = () => { con_code += "GlobalMembers.ConActions.getlastpal();\n"; }, ["resetactioncount"] = () => { con_code += "GlobalMembers.ConActions.resetactioncount();\n"; }, ["endofgame"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.endofgame(" + v1 + ");\n"; }, ["spritepal"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.spritepal(" + v1 + ");\n"; }, ["quote"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.quote(" + v1 + ");\n"; }, ["shoot"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.shoot(" + v1 + ");\n"; }, ["cstator"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.cstator(" + v1 + ");\n"; }, ["stopsound"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.stopsound(" + v1 + ");\n"; }, ["lotsofglass"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.lotsofglass(" + v1 + ");\n"; }, ["mail"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.mail(" + v1 + ");\n"; }, ["clipdist"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.clipdist(" + v1 + ");\n"; }, ["cstat"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.cstat(" + v1 + ");\n"; }, ["paper"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.paper(" + v1 + ");\n"; }, ["addstrength"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.addstrength(" + v1 + ");\n"; }, ["money"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.money(" + v1 + ");\n"; }, ["soundonce"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.soundonce(" + v1 + ");\n"; }, ["addkills"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.addkills(" + v1 + ");\n"; }, ["sleeptime"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.sleeptime(" + v1 + ");\n"; }, ["count"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.count(" + v1 + ");\n"; }, ["cactor"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.cactor(" + v1 + ");\n"; }, ["debris"] = () => { string v1 = token; string v2 = token; con_code += "\tGlobalMembers.ConActions.debris(" + v1 + "," + v2 + ");\n"; }, ["sizeat"] = () => { string v1 = token; string v2 = token; con_code += "\tGlobalMembers.ConActions.sizeat(" + v1 + "," + v2 + ");\n"; }, ["sizeto"] = () => { string v1 = token; string v2 = token; con_code += "\tGlobalMembers.ConActions.sizeto(" + v1 + "," + v2 + ");\n"; }, ["addinventory"] = () => { string v1 = token; string v2 = token; con_code += "\tGlobalMembers.ConActions.addinventory(" + v1 + "," + v2 + ");\n"; }, ["sound"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.sound(" + v1 + ");\n"; }, ["addphealth"] = () => { string v1 = token; con_code += "\tGlobalMembers.ConActions.addphealth(" + v1 + ");\n"; }, ["palfrom"] = () => { string parms = token; while (_tokens[currentToken] != "NEWLINE") { parms += ","; parms += token; } con_code += "\tGlobalMembers.ConActions.palfrom(" + parms + ");\n"; }, ["ifrnd"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifrnd(" + v1 + "))\n"; }, ["ifcount"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifcount(" + v1 + "))\n"; }, ["ifai"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifai(" + v1 + "))\n"; }, ["ifwasweapon"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifwasweapon(" + v1 + "))\n"; }, ["ifpinventory"] = () => { string v1 = token; string v2 = token; con_code += "\tif(GlobalMembers.ConActions.ifwasweapon(" + v1 + "," + v2 + "))\n"; }, ["ifaction"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifaction(" + v1 + "))\n"; }, ["ifp"] = () => { string parms = token; while (_tokens[currentToken] != "NEWLINE" && _tokens[currentToken] != "nullop") { parms += ","; parms += token; } con_code += "\tif(GlobalMembers.ConActions.ifp(" + parms + "))\n"; }, ["ifphealthl"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifphealthl(" + v1 + "))\n"; }, ["ifspritepal"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifspritepal(" + v1 + "))\n"; }, ["ifgotweaponce"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifgotweaponce(" + v1 + "))\n"; }, ["ifactor"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifactor(" + v1 + "))\n"; }, ["ifangdiffl"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifangdiffl(" + v1 + "))\n"; }, ["ifactioncount"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifactioncount(" + v1 + "))\n"; }, ["ifmove"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifmove(" + v1 + "))\n"; }, ["ifstrength"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifstrength(" + v1 + "))\n"; }, ["iffloordistl"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.iffloordistl(" + v1 + "))\n"; }, ["ifceilingdistl"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifceilingdistl(" + v1 + "))\n"; }, ["ifspawnedby"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifspawnedby(" + v1 + "))\n"; }, ["ifgapzl"] = () => { string v1 = token; con_code += "\tif(GlobalMembers.ConActions.ifgapzl(" + v1 + "))\n"; }, ["ifhitspace"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifhitspace())\n"; }, ["ifbulletnear"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifbulletnear())\n"; }, ["ifrespawn"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifrespawn())\n"; }, ["ifcanshoottarget"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifcanshoottarget())\n"; }, ["ifoutside"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifoutside())\n"; }, ["ifmultiplayer"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifmultiplayer())\n"; }, ["ifnosounds"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifnosounds())\n"; }, ["ifinspace"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifinspace())\n"; }, ["ifawayfromwall"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifawayfromwall())\n"; }, ["ifactornotstayput"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifactornotstayput())\n"; }, ["ifonwater"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifonwater())\n"; }, ["ifinwater"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifinwater())\n"; }, ["ifcanseetarget"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifcanseetarget())\n"; }, ["ifinouterspace"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifinouterspace())\n"; }, ["ifnotmoving"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifnotmoving())\n"; }, ["nullop"] = () => { con_code += "{\n }\n"; }, ["ifdead"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifdead())\n"; }, ["ifsquished"] = () => { con_code += "\tif(GlobalMembers.ConActions.ifsquished())\n"; }, ["ends"] = () => { inState = false; con_code += "}\n"; }, ["enda"] = () => { inState = false; con_code += "}\n"; }, ["strength"] = () => { string v1 = token; con_code += "GlobalMembers.ConActions.strength(" + v1 + ");\n"; }, ["spawn"] = () => { string v1 = token; con_code += "GlobalMembers.ConActions.spawn(" + v1 + ");\n"; }, ["globalsound"] = () => { string v1 = token; con_code += "GlobalMembers.ConActions.globalsound(" + v1 + ");\n"; }, ["addkills"] = () => { string v1 = token; con_code += "GlobalMembers.ConActions.addkills(" + v1 + ");\n"; }, ["guts"] = () => { string v1 = token; string v2 = token; con_code += "GlobalMembers.ConActions.guts(" + v1 + "," + v2 + ");\n"; }, ["hitradius"] = () => { string v1 = token; string v2 = token; string v3 = token; string v4 = token; string v5 = token; con_code += "GlobalMembers.ConActions.hitradius(" + v1 + "," + v2 + "," + v3 + "," + v4 + "," + v5 + ");\n"; }, ["addweapon"] = () => { string name = token; string v1 = token; con_code += "\tGlobalMembers.ConActions.addweapon("; con_code += name; con_code += ","; con_code += v1; con_code += ");\n"; }, ["addammo"] = () => { string name = token; string v1 = token; con_code += "\tGlobalMembers.ConActions.addammo("; con_code += name; con_code += ","; con_code += v1; con_code += ");\n"; }, ["definesound"] = () => { string name = token; string sndfile = token; int v1 = int.Parse(token); int v2 = int.Parse(token); int v3 = int.Parse(token); int v4 = int.Parse(token); int v5 = int.Parse(token); con_construct_code += "\tGlobalMembers.ConActions.definesound("; con_construct_code += name; con_construct_code += ","; con_construct_code += "\"" + sndfile + "\""; con_construct_code += ","; con_construct_code += v1; con_construct_code += ","; con_construct_code += v2; con_construct_code += ","; con_construct_code += v3; con_construct_code += ","; con_construct_code += v4; con_construct_code += ","; con_construct_code += v5; con_construct_code += ");\n"; }, ["gamestartup"] = () => { con_construct_code += "\tGlobalMembers.ConActions.gamestartup("; con_construct_code += token; for (int i = 1; i < 30; i++) { con_construct_code += ","; con_construct_code += token; } con_construct_code += ");\n"; } }; byte[] script = File.ReadAllBytes(fileName); char[] cArray = System.Text.Encoding.ASCII.GetString(script).ToCharArray(); string parseData = NukeComments(cArray); _tokens = parseData.Split(new char[] { '\n', '\r', '\t', ' ' }, StringSplitOptions.RemoveEmptyEntries); currentToken = 0; string lastValidToken = ""; while (currentToken < _tokens.Length) { string t = token; if (t.Length <= 0) { break; } try { if (keywords[t] == null) { throw new Exception("Unknown token " + keywords[t]); } } catch { Console.WriteLine("Aborting fatal error! " + t + " " + lastValidToken); return; } lastValidToken = t; keywords[t](); } }