private void InputTextBox_TextChanged(object sender, EventArgs e) { if (selectChanged) { if (changed == false) { changed = true; } Analizer a = new Analizer(InputTextBox.Text); selectChanged = false; string parsedText = a.Parse(); OutputTextBox.Text = parsedText; if (a.errorDescription != null) { OutputTextBox.ForeColor = Color.Gray; ErrorLogTextBox.Text = a.errorDescription + "\r\nDouble click to set position onto the error."; } else { OutputTextBox.ForeColor = Color.Black; ErrorLogTextBox.Text = ""; } a.Paint(InputTextBox); selectChanged = true; } }
private void Tick(object sender, EventArgs e) { User32.GetCursorPos(out var p); this.Text = $"({p.X}, {p.Y})"; using (var screenshot = this.tracker.GetScreenshot()) { var grid = GridUtil.GetGridFromScreeshot(screenshot, this.CurrentGameMode); var blocks = Analizer.AnalizeGridImage(this.templates, grid); this.lastGrid = blocks; (var output, var overlay) = GenerateOutput(blocks); this.pcGrid.Image = grid; this.pcOutput.Image = output; if (this.overlayForm != null) { this.overlayForm.SetImage(overlay); var offset = GridLocator.LocateGrid(this.CurrentGameMode); var rect = this.tracker.GetWindowRect(); this.overlayForm.Location = new Point(rect.Left + offset.X, rect.Top + offset.Y); } } }
public void GetStatsForRunTests() { Zakaz zak; int run; OutStats result; zak = new Zakaz(16000); run = 1000; result = new OutStats(zak, run, 0, 16); Assert.AreEqual(Analizer.GetStatsForRun(zak, run), result); zak = new Zakaz(15000); run = 1000; result = new OutStats(zak, run, 0, 15); Assert.AreEqual(Analizer.GetStatsForRun(zak, run), result); zak = new Zakaz(800, 2000, 2000, 26000); run = 2000; result = new OutStats(zak, run, 1200, 16); Assert.AreEqual(Analizer.GetStatsForRun(zak, run), result); zak = new Zakaz(69600, 91500); run = 2000; result = new OutStats(zak, run, 1200, 16); }
static void Main(string[] args) { var analizer = new Analizer("../../../twister.gpx.txt", "http://www.topografix.com/GPX/1/1"); Console.WriteLine($"Climbing distance {analizer.ClimbingDistance()}"); Console.WriteLine($"Descent distance {analizer.DescentDistance()}"); Console.WriteLine($"Flat distance {analizer.FlatDistance()}"); Console.WriteLine($"Total distance {analizer.TotalDistance()}"); Console.ReadKey(); }
public void DecreaseRunTests() { Zakaz zak; int run; OutStats result; zak = new Zakaz(4500); run = 1000; result = new OutStats(zak, run, 0, 16); Assert.AreEqual((Analizer.DecreaseRun(zak, run)).ToString(), result.ToString()); }
public void FirstPassTests() { Zakaz zak; StatsList actual; zak = new Zakaz(10000, 10000, 20000, 15000, 15000, 10000, 20000, 30000, 10000); actual = Analizer.FirstPass(zak); Assert.AreEqual(1, actual.Count, "FirstPass cannot split zakaz with count <= ETS_ON_SHEET"); Assert.AreEqual(10000, actual[0].Run, "Optimal run for this zakaz is 10000"); Assert.AreEqual(15, actual[0].EtsOnSheetCount, "For run 10000 ets on sheet count is 15"); Assert.AreEqual(5000 + 5000, actual[0].OverprintsSum, "For run 10000 overprint sum is 10000"); Assert.AreEqual(5000 + 5000 + 10000, actual[0].CorrectedOverprintsSum, "For run 10000 corrected overprint sum is 20000"); }
public static void Main(string[] args) { string pathToResourceFile = "..\\..\\resources\\resource.db"; string pathToResultFile = "..\\..\\result.db"; Analizer analizer = new Analizer(pathToResourceFile); ResultsWriter writer = new ResultsWriter(pathToResultFile); writer.WriteResultToFile(analizer.GetIdentsFormattedString()); writer.WriteResultToFile(analizer.GetConstantsFormattedString()); writer.WriteResultToFile(analizer.GetOtherLexemsWordsFormattedString()); writer.Finish(); }
}//end:Main private static void Analyze(Zakaz ets) { Console.WriteLine("============================================"); Console.WriteLine("Given data: "); Console.WriteLine("\t"); foreach (Etyketka et in ets) { Console.Write("{0} ", et.Run); } Console.WriteLine(); Console.WriteLine("Results:"); //StatsList results = Analizer.ImproveStats(Analizer.FirstPass(ets)); //StatsList results2 = Analizer.ImproveStats(Analizer.SplitByGcd(ets)); //StatsList results3 = Analizer.ImproveStats(Analizer.PackIntoSheet(ets)); StatsList results = (Analizer.FirstPass(ets)); StatsList results2 = (Analizer.SplitByGcd(ets)); if (results > results2) { results = results2; } foreach (var r in results) { Console.WriteLine("\n\t{0}", r); foreach (var e in r.Ets) { Console.WriteLine("\t\t{0}", e); } } Console.WriteLine("============================================"); Console.WriteLine(); if (ets.Count <= Constants.ETS_ON_SHEET) { results = Analizer.FindBestRunWithShifting(ets); foreach (var r in results) { Console.WriteLine("\n\t{0}", r); foreach (var e in r.Ets) { Console.WriteLine("\t\t{0}", e); } } Console.WriteLine("============================================"); Console.WriteLine(); } }
/// <summary> /// Простой пример работы функции /// </summary> /// <param name="fileName"></param> public void Execute(string fileName) { string title = Parser.Parse(fileName); FileAssociationManager associationManager = new FileAssociationManager(); //Временная заглушка (названия процесса) string processName = associationManager.GetAssociatedApplication(".pdf").Path; using (var context = new Model_PDFFinder()) { Report_Template printerSettings = Analizer.GetPrinterSettings(title, context); if (printerSettings != null) { Printer.Print(fileName, printerSettings); Logger.LogOpenForPrinting(title); } else { Viewer.View(fileName, processName); Logger.LogOpenForView(); } } }
}//end:printButton_Click /// <summary> /// Handles the click on the calculateButton. /// Calculates results and shows them in printPreview. /// </summary> private void calculateButton_Click(object sender, EventArgs e) { //Get input data to analize Zakaz z = new Zakaz(); if (!ParseDataTable(ref z)) { ShowErrorMessage("Немає даних для обчислення"); return; } //Analize data Dictionary <String, StatsList> results = new Dictionary <String, StatsList>(); results.Add("SplitFromMinToMax", Analizer.FirstPass(z)); results.Add("SplitByGcd", Analizer.SplitByGcd(z)); results.Add("FindBestRunWithShifting", Analizer.FindBestRunWithShifting(z)); //Setup initial values and text styles Font titleFont = new Font("Calibri", 10, FontStyle.Bold); Font textFont = new Font("Calibri", 10, FontStyle.Regular); Font addedTextFont = new Font("Calibri", 8, FontStyle.Regular); richTextBox.ResetText(); richTextBox.SelectionIndent = 5; richTextBox.SelectionRightIndent = 5; StringBuilder resultString; //Generate title richTextBox.SelectionFont = titleFont; richTextBox.SelectionAlignment = HorizontalAlignment.Center; richTextBox.AppendText("Аналіз замовлення\n"); resultString = new StringBuilder(); resultString.Append(DateTime.Now.ToLongDateString()); resultString.Append(" / "); resultString.Append(DateTime.Now.ToLongTimeString()); resultString.Append("\n"); resultString.Append(string.Format("В замовленні {0} етикеток.", (from et in z select et.Run).Sum())); resultString.Append(" Наклади етикеток: "); foreach (Etyketka et in z) { resultString.Append(string.Format("{0} ", et.Run)); } resultString.Append("\n\n"); richTextBox.SelectionFont = addedTextFont; richTextBox.SelectionAlignment = HorizontalAlignment.Left; richTextBox.AppendText(resultString.ToString()); //Generate results foreach (var item in results) { richTextBox.SelectionFont = titleFont; richTextBox.SelectionAlignment = HorizontalAlignment.Left; richTextBox.AppendText(string.Format("Результати за алгоритмом {0} (спусків {1}, передрук {2} тис. етикеток).\n", item.Key, item.Value.Count, ((float)(item.Value.OverprintsSum)) / 1000)); int spuskCounter = 1; foreach (var r in item.Value) { richTextBox.SelectionFont = textFont; richTextBox.AppendText(string.Format("Спуск №{0}. Зайнято {1} з {2} позицій на спуску. Наклад {3} аркушів. Передрук {4} етикеток.\n", spuskCounter++, r.EtsOnSheetCount, Constants.ETS_ON_SHEET, r.Run, r.OverprintsSum)); resultString = new StringBuilder(); resultString.Append(string.Format("\t№ позицій передрук (наклад) назва\n")); foreach (var et in r.Ets) { resultString.Append(string.Format("\t{1} \t{3} \t{4} \t({2}) \t{0}.\n", et.Name, et.Id, et.Run, et.CountOnSheet, et.Overprint)); } richTextBox.SelectionFont = addedTextFont; richTextBox.AppendText(resultString.ToString()); } richTextBox.AppendText("\n"); } }//end:calculateButton_Click
private void ScannerExploit(ThreadScanner o) { bool flag = false; try { Types types = new Types(); List <string> list = Analizer.BuildTraject(o.URL, "'A=0", true); using (List <string> .Enumerator enumerator = list.GetEnumerator()) { while (enumerator.MoveNext()) { int num = 0; string current = enumerator.Current; goto Label_004F; Label_002A: num++; if (num > 5) { continue; } if (this.bckWorkerSQL.CancellationPending | !__RunningWorker) { return; } Label_004F: this.ScannerExploitCheckDelay(o.Delay); string sPostData = ""; string sErrDesc = ""; string str = o.HTTP.GetHTML(current, enHTTPMethod.GET, ref sPostData, null, null, false, ref sErrDesc, true); if (string.IsNullOrEmpty(str)) { goto Label_002A; } types = Utls.CheckSyntaxError(str); switch (types) { case Types.None: { continue; } case Types.Unknown: goto Label_00E3; case Types.MsAccess: goto Label_00F3; } goto Label_00EF; } goto Label_0112; Label_00E3: flag = !o.TrashUnknown; goto Label_0112; Label_00EF: flag = true; goto Label_0112; Label_00F3: flag = !o.TrashMsAccess; } Label_0112: if (flag) { this.AddURL(o.OriginalUrl, new string[] { Utls.TypeToString(types) }); this.__SearchSummary.Found++; } } catch (Exception exception1) { ProjectData.SetProjectError(exception1); ProjectData.ClearProjectError(); } finally { try { Console.WriteLine("Thread Count =>" + __ThreadPoolScanner.ThreadCount); this.__ThreadPoolScanner.Close(o.Thread); } catch (Exception exception2) { ProjectData.SetProjectError(exception2); ProjectData.ClearProjectError(); } Application.DoEvents(); } }
private void btnScript_Click(object sender, EventArgs e, StreamWriter statusWriter) { set2Config(); Refresh(); u_config.printConfig(@"C:\Wavelets decomposition\config.txt", null); AmazonS3Client client = Helpers.configAmazonS3ClientS3Client(); UseS3 = UseS3CB.Checked; rumPrallel = rumPrallelCB.Checked; runBoosting = runBoostingCB.Checked; runProoning = runProoningCB.Checked; runBoostingProoning = runBoostingProoningCB.Checked; runRFProoning = runRFProoningCB.Checked; runRf = runRfCB.Checked; runBoostingLearningRate = runBoostingLearningRateCB.Checked; bucketName = bucketTB.Text; string results_path = @ResultsTB.Text; string db_path = @DBTB.Text + "\\";//@"C:\Users\Administrator\Dropbox\ADA\ada_valid\"; //"D:\\Phd\\Shai\\code\\tests\\helix tests\\noise_5\\noise_5\\"; // "C:\\reasearch\\tests\\lena\\"; //get dir MainFolderName = results_path; Helpers.createMainDirectoryOrResultPath(results_path, bucketName, client); //READ DATA DB db = new DB(); db.training_dt = db.getDataTable(db_path + "trainingData.txt"); db.testing_dt = db.getDataTable(db_path + "testingData.txt"); db.validation_dt = db.getDataTable(db_path + "ValidData.txt"); db.training_label = db.getDataTable(db_path + "trainingLabel.txt"); db.testing_label = db.getDataTable(db_path + "testingLabel.txt"); db.validation_label = db.getDataTable(db_path + "ValidLabel.txt"); upper_label = db.training_label.Max(); lower_label = db.training_label.Min(); double trainingPercent = double.Parse(trainingPercentTB.Text); // 0.02; long rowToRemoveFrom = Convert.ToInt64(db.training_dt.Count() * trainingPercent); db.training_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.training_label = db.training_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_dt = db.testing_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_label = db.testing_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_label = db.validation_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); //REDUCE DIM, GLOBAL PCA if (usePCA.Checked) { DimReduction dimreduction = new DimReduction(db.training_dt); db.PCAtraining_dt = dimreduction.getGlobalPca(db.training_dt); db.PCAtesting_dt = dimreduction.getGlobalPca(db.testing_dt); db.PCAvalidation_dt = dimreduction.getGlobalPca(db.validation_dt); } else { //de-activate pca for dbg db.PCAtraining_dt = db.training_dt; db.PCAtesting_dt = db.testing_dt; db.PCAvalidation_dt = db.validation_dt; } db.PCAtraining_GridIndex_dt = new long[db.PCAtraining_dt.Count()][]; for (int i = 0; i < db.PCAtraining_dt.Count(); i++) { db.PCAtraining_GridIndex_dt[i] = new long[db.PCAtraining_dt[i].Count()]; } //BOUNDING BOX AND MAIN GRID boundingBox = db.getboundingBox(db.PCAtraining_dt); MainGrid = db.getMainGrid(db.PCAtraining_dt, boundingBox, ref db.PCAtraining_GridIndex_dt); //READ CONFIG methodConfig mc = new methodConfig(true); int Nloops = int.Parse(NloopsTB.Text) - 1; int Kfolds = 0; if (int.TryParse(croosValidTB.Text, out Kfolds)) { Nloops = Kfolds - 1; } for (int k = 0; k < Nloops; k++) { mc.boostlamda_0.Add(3.8); // - create variant in number of pixels } //mc.boostlamda_0.Add(1500);// - create variant in number of pixels //mc.boostlamda_0.Add(2500);// - create variant in number of pixels //mc.boostlamda_0.Add(3000);// - create variant in number of pixels mc.generateRecordConfigArr(); for (int k = 0; k < mc.recArr.Count(); k++) { mc.recArr[k].dim = NfeaturesTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesTB.Text, k)); mc.recArr[k].approxThresh = double.Parse(evaluateString(approxThreshTB.Text, k)); // 0.1; mc.recArr[k].partitionErrType = int.Parse(evaluateString(partitionTypeTB.Text, k)); //2; mc.recArr[k].minWaveSize = int.Parse(evaluateString(minNodeSizeTB.Text, k)); //1;//CHANGE AFTER DBG mc.recArr[k].hopping_size = int.Parse(evaluateString(waveletsSkipEstimationTB.Text, k)); //25;// 10 + 5 * (k + 1);// +5 * (k % 10);// 1;//25; mc.recArr[k].test_error_size = double.Parse(evaluateString(waveletsPercentEstimationTB.Text, k)); // +0.05 * (k % 10);// 1;// 0.1;//percent of waves to check mc.recArr[k].NskipsinKfunc = double.Parse(evaluateString(boostingKfuncPercentTB.Text, k)); // 0.0025; mc.recArr[k].rfBaggingPercent = double.Parse(evaluateString(bagginPercentTB.Text, k)); // 0.6; mc.recArr[k].rfNum = int.Parse(evaluateString(NrfTB.Text, k)); // k + 1;//10 + k*10;// 100 / (k + 46) * 2;// int.Parse(Math.Pow(10, k + 1).ToString()); mc.recArr[k].boostNum = int.Parse(evaluateString(NboostTB.Text, k)); // 10; mc.recArr[k].boostProoning_0 = int.Parse(evaluateString(NfirstPruninginBoostingTB.Text, k)); //13 mc.recArr[k].boostlamda_0 = double.Parse(evaluateString(boostingLamda0TB.Text, k)); // 0.01 - (k + 1) * 0.001; //0.05;// 0.0801 + k * 0.001;// Math.Pow(0.1, k);// 0.22 + k*0.005; mc.recArr[k].NwaveletsBoosting = int.Parse(evaluateString(NfirstwaveletsBoostingTB.Text, k)); // 4;// k + 1; //mc.recArr[k].learningRate = 0;// 0.01; mc.recArr[k].boostNumLearningRate = int.Parse(evaluateString(NboostingLearningRateTB.Text, k)); // 55;// 18; mc.recArr[k].percent_training_db = trainingPercent; mc.recArr[k].BoundLevel = int.Parse(evaluateString(boundLevelTB.Text, k)); //1024; mc.recArr[k].NDimsinRF = NfeaturesrfTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesrfTB.Text, k)); mc.recArr[k].split_type = int.Parse(evaluateString(splitTypeTB.Text, k)); //0 mc.recArr[k].NormLPType = int.Parse(evaluateString(errTypeEstimationTB.Text, k)); mc.recArr[k].RFpruningTestRange[1] = int.Parse(evaluateString(RFpruningEstimationRange1TB.Text, k)); // 12;// k + 9; mc.recArr[k].boundDepthTree = int.Parse(evaluateString(boundDepthTB.Text, k)); //1024; mc.recArr[k].CrossValidFold = k; // 2m0rr0w2 save labels dim in confif mc.recArr[k].labelDim = db.training_label[0].Count(); //mc.recArr[k].boostNum = t ;// tmp to delete !!!!!!! //mc.recArr[k].RFwaveletsTestRange[0] = 25; //mc.recArr[k].RFwaveletsTestRange[1] = 50; } Helpers.createOutputDirectories(mc.recArr, client, u_config, bucketName, results_path); //SET ID ARRAY LIST List <int> trainingID = Enumerable.Range(0, db.PCAtraining_dt.Count()).ToList(); List <int> testingID = Enumerable.Range(0, db.PCAtesting_dt.Count()).ToList(); //cross validation List <List <int> > trainingFoldId = new List <List <int> >(); List <List <int> > testingFoldId = new List <List <int> >(); Random ran = new Random(2); List <int> training_rand = trainingID.OrderBy(x => ran.Next()).ToList().GetRange(0, trainingID.Count); //THE LARGEST GROUP IS TRAINING if (int.TryParse(croosValidTB.Text, out Kfolds)) { createCrossValid(Kfolds, training_rand, trainingFoldId, testingFoldId); } //bounding intervals int[][] BB = new int[2][]; BB[0] = new int[boundingBox[0].Count()]; BB[1] = new int[boundingBox[0].Count()]; for (int i = 0; i < boundingBox[0].Count(); i++) { BB[1][i] = MainGrid[i].Count() - 1; //set last index in each dim } for (int i = 0; i < mc.recArr.Count; i++) { Analizer Analizer = new Analizer(MainFolderName + "\\" + mc.recArr[i].getShortName(), MainGrid, db, mc.recArr[i]); if (!croosValidCB.Checked) { Analizer.analize(trainingID, testingID, BB); } else { Analizer.analize(trainingFoldId[i], testingFoldId[i], BB); //cross validation } statusWriter.WriteLine("fold " + i + " ready!!!!"); } //btnScript.BackColor = Color.Green; }
public SkryptEngine(string code = "") { _code = code; Tokenizer = new Tokenizer(this); TokenProcessor = new TokenProcessor(this); StatementParser = new StatementParser(this); ExpressionParser = new ExpressionParser(this); GeneralParser = new GeneralParser(this); MethodParser = new FunctionParser(this); ModifierChecker = new ModifierChecker(this); ClassParser = new ClassParser(this); Analizer = new Analizer(this); Executor = new Executor(this); var systemObject = ObjectGenerator.MakeObjectFromClass(typeof(Library.Native.System), this); GlobalScope.SetVariable(systemObject.Name, systemObject, Modifier.Const); CurrentScope = GlobalScope; // Tokens that are found using a token rule with type defined as 'null' won't get added to the token list. // This means you can ignore certain characters, like whitespace in this case, that way. Tokenizer.AddRule( new Regex(@"\s"), TokenTypes.None ); Tokenizer.AddRule( new Regex(@"\d+(\.\d+)?([eE][-+]?\d+)?"), TokenTypes.NumericLiteral ); Tokenizer.AddRule( new Regex(@"0x([A-Fa-f\d])*"), TokenTypes.HexadecimalLiteral ); Tokenizer.AddRule( new Regex(@"0b([01])*"), TokenTypes.BinaryLiteral ); Tokenizer.AddRule( new Regex(@"[_a-zA-Z]+[_a-zA-Z0-9]*"), TokenTypes.Identifier ); Tokenizer.AddRule( new Regex(@"include|const|using|public|private|strong|in|class|fn|if|elseif|else|while"), TokenTypes.Keyword ); Tokenizer.AddRule( new Regex("true|false"), TokenTypes.BooleanLiteral ); Tokenizer.AddRule( new Regex("null"), TokenTypes.NullLiteral ); Tokenizer.AddRule( new Regex(@"[;]"), TokenTypes.EndOfExpression ); Tokenizer.AddRule( new Regex( @"(import)|(return)|(continue)|(break)|(&&)|(\+=)|(\-=)|(\/=)|(\*=)|(\%=)|(\^=)|(\&=)|(\|=)|(\|\|\|=)|(\|\|\|)|(\|\|)|(=>)|(==)|(!=)|(>=)|(<=)|(<<)|(>>>)|(>>)|(\+\+)|(--)|[~=<>+\-*/%^&|!\[\]\(\)\.\,{}\?\:]"), TokenTypes.Punctuator ); // Single line comment Tokenizer.AddRule( new Regex(@"\/\/.*\n?"), TokenTypes.None ); // Multi line comment Tokenizer.AddRule( new Regex(@"\/\*.*?\*\/", RegexOptions.Singleline), TokenTypes.None ); Tokenizer.AddRule( new Regex(@""".*?(?<!\\)""", RegexOptions.Singleline), TokenTypes.StringLiteral ); }
public Map(T[,] heightmap) { _initialMap = heightmap; _analizer = new Analizer(this); }
//List<SkryptClass> Classes = new List<SkryptClass>(); public SkryptEngine() { tokenizer = new Tokenizer(this); statementParser = new StatementParser(this); expressionParser = new ExpressionParser(this); generalParser = new GeneralParser(this); methodParser = new MethodParser(this); analizer = new Analizer(this); executor = new Executor(this); standardMethods = new StandardMethods(this); standardMethods.AddMethodsToEngine(); //Constants["_PI"] = new Numeric(Math.PI); //Constants["_E"] = new Numeric(Math.E); SkryptObject SystemObject = ObjectGenerator.MakeObjectFromClass(typeof(Library.Native.System)); foreach (SkryptProperty property in SystemObject.Properties) { GlobalScope.AddVariable(property.Name, property.Value, true); } // Tokens that are found using a token rule with type defined as 'null' won't get added to the token list. // This means you can ignore certain characters, like whitespace in this case, that way. tokenizer.AddRule( new Regex(@"\s"), TokenTypes.None ); tokenizer.AddRule( new Regex(@"\d+(\.\d+)?"), TokenTypes.NumericLiteral ); tokenizer.AddRule( new Regex(@"[_a-zA-Z]+[_a-zA-Z0-9]*"), TokenTypes.Identifier ); tokenizer.AddRule( new Regex(@"class|func|if|elseif|else|while"), TokenTypes.Keyword ); tokenizer.AddRule( new Regex("true|false"), TokenTypes.BooleanLiteral ); tokenizer.AddRule( new Regex("null"), TokenTypes.NullLiteral ); tokenizer.AddRule( new Regex(@"[;]"), TokenTypes.EndOfExpression ); tokenizer.AddRule( new Regex(@"(return)|(&&)|(\|\|)|(\|\|\|)|(==)|(!=)|(>=)|(<=)|(<<)|(>>)|(>>>)|(\+\+)|(--)|[~=:<>+\-*/%^&|!\[\]\(\)\.\,{}]"), TokenTypes.Punctuator ); tokenizer.AddRule( new Regex(@""".*?(?<!\\)"""), TokenTypes.StringLiteral ); // Multi line comment tokenizer.AddRule( new Regex(@"\/\*(.|\n)*\*\/"), TokenTypes.None ); // Single line comment tokenizer.AddRule( new Regex(@"\/\/.*\n"), TokenTypes.None ); }