public unsafe void PerformanceTest_NodeParser_Step_MockEntities(int count) { var json = JsonTestData.GetMockEntities(count); fixed(char *ptr = json) { m_Tokenizer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } Measure.Method(() => { using (var parser = new NodeParser(m_Tokenizer)) { parser.Step(NodeType.None); } }) .Definition("NodeParserStep") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }
public static void TestParsing(string representation, NodeParser parser = null, bool shouldFail = false) { SourceCode source = new SourceCode(representation); TokenWalker walker = new Compiler(source).Walker; NodeParser nodeParser = parser ?? Parsers.Node.Program; Node node = null; Console.WriteLine(source.WithLineNumbers()); //Console.WriteLine(nodeParser.ToStringRecursively(enableHighlighting: false)); ParseResultHistory resultHistory = null; bool failed; try { nodeParser.Parse(ref node, walker, out resultHistory); failed = false; if (shouldFail) { Assert.IsNull(node); } } catch (FailedParsingNodeException e) { failed = true; } Console.WriteLine(resultHistory.ParseHistory(source)); Assert.IsNotNull(node); Assert.IsTrue(walker.IsLast()); Assert.IsFalse(failed); }
public BufferParser(ITextBuffer text) { Buffer = text; parser = new NodeParser(); text.Changed += Text_Changed; Reparse(); }
public void Saves_To_Cache() { var mockVertexFactory = new Mock <IVertexFactory>(); var mockProfileFactory = new Mock <IProfileFactory>(); var mockHyperlinkParser = new Mock <IHyperLinkParser>(); var mockVertexCache = new Mock <IVertexCache>(); var mockPause = new Mock <IPause>(); mockVertexFactory.Setup(v => v.Create(It.IsAny <IProfile>(), It.IsAny <IEnumerable <IProfile> >())) .Returns(new Vertex("johndoe", Create_Profiles())); mockProfileFactory.Setup(p => p.Create(It.IsAny <Uri>())) .Returns(new Profile("johndoe", @"http://steamcommunity.com/id/johndoe/friends/")); mockHyperlinkParser.Setup(h => h.ParseUser(It.IsAny <IProfile>())) .Returns(new List <string> { @"http://steamcommunity.com/id/one/friends/", "http://steamcommunity.com/id/two/friends/" }); mockVertexCache.Setup(v => v.Retrieve(It.IsAny <string>())) .Returns((IVertex)null); var nodeParser = new NodeParser(mockHyperlinkParser.Object, mockVertexFactory.Object, mockProfileFactory.Object, mockVertexCache.Object, mockPause.Object); var vertex = Create_Vertex(); var friends = nodeParser.Parse(new Uri($"http://steamcommunity.com/id/{vertex.Id}")); mockVertexCache.Verify(v => v.Save(vertex)); }
public NodeParseInfo( NodeParser parseFunction, SymbolDict <Tup <FieldParser, object> > fields) { m_parseFunction = parseFunction; FieldDefs = fields; }
protected override void Initialize() { worklist = new Stack <InstructionNode>(); transformRules = new List <TransformRule>(); var symbolDictionary = new SymbolDictionary(); symbolDictionary.Add(Architecture); symbolDictionary.Add(IRInstructionList.List); foreach (var rule in Rules.List) { var matchTokens = Tokenizer.Parse(rule.Match, ParseType.Instructions); var criteriaTokens = Tokenizer.Parse(rule.Criteria, ParseType.Expression); var transformTokens = Tokenizer.Parse(rule.Transform, ParseType.Instructions); var matchNodes = NodeParser.Parse(matchTokens, symbolDictionary); var criteriaExpression = ExpressionParser.Parse(criteriaTokens); var transformNodes = NodeParser.Parse(transformTokens, symbolDictionary); var transformRule = new TransformRule(matchNodes, criteriaExpression, transformNodes); transformRules.Add(transformRule); } }
protected static IList<LexicalElement> ParseAndGetElements(string subject) { var parser = new NodeParser(); IList<LexicalElement> elements; parser.Parse(subject, null, out elements); return elements; }
private static IEnumerable <NodeType> StepNodes(IEnumerable <string> parts) { using (var tokenizer = new JsonTokenizer()) using (var parser = new NodeParser(tokenizer)) { foreach (var json in parts) { // Tokenize a part of the input data. Write(tokenizer, json); // Read until we consume all input data. while (parser.TokenNextIndex < tokenizer.TokenNextIndex) { var node = parser.Step(); if (node == NodeType.None) { continue; } yield return(node); } } // Flush the parser. while (parser.NodeType != NodeType.None) { yield return(parser.Step()); } } }
private static IEnumerable <NodeType> StepNodes(string json) { using (var tokenizer = new JsonTokenizer()) using (var parser = new NodeParser(tokenizer)) { // Tokenize the entire input data. Write(tokenizer, json); // Read until we have no more input. while (parser.TokenNextIndex < tokenizer.TokenNextIndex) { var node = parser.Step(); if (node == NodeType.None) { continue; } yield return(node); } // Flush the parser. while (parser.NodeType != NodeType.None) { yield return(parser.Step()); } } }
// Start is called before the first frame update void Start() { saveLoad = SaveLoad.instance; parser = NodeParser.instance; ids = 0; nodes = new Dictionary <int, Node>(); }
protected static IList <LexicalElement> ParseAndGetElements(string subject) { var parser = new NodeParser(); IList <LexicalElement> elements; parser.Parse(subject, null, out elements); return(elements); }
public void NodeCollectionCreationTest() { List <IEvent> eventList = new List <IEvent>(); IEvent aEvent = new MockEvent("item1", DateTime.Now, null, "Access"); eventList.Add(aEvent); App.nodeCollection.nodeList = NodeParser.GetNodes(eventList); Assert.AreNotEqual(App.nodeCollection.nodeList.Count, 0); }
public void SetupTests() { var path = TestContext.CurrentContext.TestDirectory + "\\TestFiles\\lantern\\viewer.html"; _htmlMarkup = File.ReadAllText(path); _nodeParser = new NodeParser(); _htmlDocument = new HtmlDocument(); _htmlDocument.LoadHtml(_htmlMarkup); }
private void Import_Click(object sender, RoutedEventArgs e) { OpenFileDialog openFileDialog = new OpenFileDialog { InitialDirectory = Directory.GetCurrentDirectory(), Filter = "CSV File (*.csv)|*.csv", ReadOnlyChecked = true }; if (openFileDialog.ShowDialog() != true) { return; } var file = openFileDialog.FileName; List <IShellItem> csvShelltems = CsvIO.ImportCSVFile(file); if (csvShelltems.Count == 0) { LogAggregator.Instance.ShowIfNotEmpty(); return; } if (App.ShellItems != null) { App.ShellItems.Clear(); } if (App.nodeCollection.nodeList != null) { App.nodeCollection.nodeList.Clear(); } App.ShellItems = csvShelltems; List <IEvent> events = EventParser.GetEvents(App.ShellItems); App.nodeCollection.ClearAllFilters(); App.nodeCollection.nodeList.AddRange(NodeParser.GetNodes(events)); if (Home.timelinePage == null) { Home.timelinePage = new TimelinePage(); App.NavigationService.Navigate(Home.timelinePage); } else { Home.timelinePage.RebuildTimeline(); string timelinePageKey = "timelinepage"; if (App.pages.ContainsKey(timelinePageKey)) { App.NavigationService.Navigate(App.pages[timelinePageKey]); } else { App.NavigationService.Navigate(Home.timelinePage); } } LogAggregator.Instance.ShowIfNotEmpty(); }
public ActionResult GetNodes() { Reader reader = new DatDocumentReader(); InputParser <Node> nodeInput = new NodeParser(); List <string> nodesData = reader.GetData(Server.MapPath("~/DataFiles/nodes.dat")); List <Node> result = nodeInput.ParseInput(nodesData); return(Json(result, JsonRequestBehavior.AllowGet)); }
public virtual object EvalPostTimeClause(ParseNode node) { decimal?timeLeft = null; if (node[TokenType.DECIMAL, 1] != null) { timeLeft = NodeParser.GetTime(node[TokenType.DECIMAL, 1]); } return(ObjectFactory.GetInstance <IActionFactory>().CreatePostTimeAction(NodeParser.GetTime(node[TokenType.DECIMAL, 0]), timeLeft)); }
public void TestParseSingleToken() { string source = "variable123"; TokenWalker Walker = TestTools.GetWalker(source); NodeParser Parser = Keep(SyntaxKeyword.Identifier).Name("Identifier"); Node expression = null; Parser.Parse(ref expression, Walker, out _); Assert.IsNotNull(expression); }
public void SimpleCase() { var nodes = ExpressionsBuild( "A>B,C", "B>C,D", "D>", "C>" ); var result = NodeParser.Parse(nodes); Assert.AreEqual("D,C,B,A", result.Select(a => a.Id).Aggregate((a, b) => a + "," + b)); }
// Update is called once per frame void Update() { if (saveLoad == null) { saveLoad = SaveLoad.instance; } if (parser == null) { parser = NodeParser.instance; } }
public void TestParsingMissingToken() { string source = "variable123"; TokenWalker Walker = TestTools.GetWalker(source); NodeParser Parser = Keep(SyntaxKeyword.Assignment).Name("Identifier"); Node expression = null; Assert.Throws <FailedParsingNodeException>(() => Parser.Parse(ref expression, Walker, out _)); Assert.IsNull(expression); }
public void TestAllParser() { string source = "[]"; TokenWalker Walker = TestTools.GetWalker(source); NodeParser OpenParser = Keep(SyntaxKeyword.OpenSquareBracket).Name("OpenSquareBracket"); NodeParser CloseParser = Keep(SyntaxKeyword.CloseSquareBracket).Name("CloseSquareBracket"); NodeParser AllParser = Continuous(OpenParser, CloseParser).Name("OpenCloseParser"); Node expression = null; AllParser.Parse(ref expression, Walker, out ParseResultHistory resultHistory); Assert.IsNotNull(expression); }
public void NodeParserFailsWithEmptyPath() { // arrange: ILog logger = MockRepository.GenerateMock<ILog>(); NodeParser np = new NodeParser(logger); logger.Expect(l => l.Warn(Arg<string>.Is.Equal(np.messageNoArgs))); //act: var result = np.ParseNodes(new string[0]); //assert: logger.VerifyAllExpectations(); }
public void NodeParserErrorsWithInvalidPath() { // arrange: ILog logger = MockRepository.GenerateMock<ILog>(); NodeParser np = new NodeParser(logger); logger.Expect(l => l.ErrorFormat(Arg<string>.Is.Equal(np.messageInvalidPath), Arg<object>.Is.Equal("invalid"))); //act: var result = np.ParseNodes(new string[1] { "invalid" }); //assert: logger.VerifyAllExpectations(); }
/// <summary> /// Validates a node and puts the gathered information into the report. /// </summary> /// <param name="parser">The parser to validate.</param> /// <param name="node">The node to validate.</param> internal void Validate(NodeParser parser, Node node) { // Check for unknown attributes foreach (var attributeName in node.Attributes.Keys) { if (!IsInStringArray(attributeName, parser.RequiredAttributes) && !IsInStringArray(attributeName, parser.OptionalAttributes)) { report.AddWarning("Unknown attribute in \"" + node.Name + "\" node: " + attributeName + "=" + node.Attributes[attributeName]); } } // Check for not appearing required attributes foreach (var attributeName in parser.RequiredAttributes) { if (!node.Attributes.ContainsKey(attributeName)) { report.AddError("Missing required attribute in \"" + node.Name + "\" node: " + attributeName); } } if (parser.CheckNodes) { // Check for unknown child nodes foreach (var child in node.Children) { string name = child.Name; if (!IsInStringArray(name, parser.RequiredChildNodes) && !IsInStringArray(name, parser.OptionalChildNodes)) { report.AddWarning("Unknown child node in \"" + node.Name + "\" node: " + name); } } // Check for not appearing required child nodes foreach (string name in parser.RequiredChildNodes) { if (!HasChildNodeWithName(node, name)) { report.AddError("Missing required child node in \"" + node.Name + "\" node: " + name); } } } if (parser.TagUsageType == NodeParser.TagUsage.Required && string.IsNullOrEmpty(node.Tag)) { report.AddError("Missing required tag in \"" + node.Name + "\" node."); } else if (parser.TagUsageType == NodeParser.TagUsage.NotAllowed && !string.IsNullOrEmpty(node.Tag)) { report.AddWarning("Found tag where none is allowed in \"" + node.Name + "\" node: " + node.Tag); } }
static void Main(string[] args) { string path = @"htmlToRead.html"; string readText = File.ReadAllText(path); NodeParser nodeParser = new NodeParser(); INode nodeTree = new NodeBase(); nodeTree.Add(nodeParser.Parse(readText)); //NodeWriter.Write(nodeTree); Console.WriteLine(readText); }
public virtual object EvalPostCommentClause(ParseNode node) { int i = 0; StringBuilder result = new StringBuilder(); while (node[TokenType.ANY_TEXT, i] != null) { result.AppendLine(NodeParser.GetCommentSegment(node[TokenType.ANY_TEXT, i])); i++; } if (result.Length > Environment.NewLine.Length) { result.Remove(result.Length - Environment.NewLine.Length, Environment.NewLine.Length); } return(ObjectFactory.GetInstance <IActionFactory>().CreatePostCommentAction(result.ToString())); }
private void Start() { if (instance == null) { instance = this; } else { Destroy(this.gameObject); return; } DontDestroyOnLoad(this.gameObject); projectSave = SaveLoad.instance; PrepareDataForParsing(); }
/// <summary> /// Parses the specified route into a sequence of nodes. /// </summary> /// <param name="routeUrl">The route URL to add.</param> /// <param name="parameters">The parameters to capture.</param> /// <returns>The parsed nodes.</returns> public IMatchNode[] Parse(string routeUrl, IEnumerable <ParameterInfo> parameters) { IReadOnlyDictionary <string, Type> parameterPairs = parameters.ToDictionary(p => p.Name, p => p.ParameterType, StringComparer.Ordinal); var parser = new NodeParser(this.specializedCaptureNodes); parser.ParseUrl(routeUrl, parameterPairs); string normalizedUrl = GetNormalizedRoute(routeUrl, parser.Nodes); if (!this.normalizedUrls.Add(normalizedUrl)) { throw new InvalidOperationException("The route produces an ambiguous match."); } return(parser.Nodes.ToArray()); }
public void NodeParserWorks() { // arrange: ILog logger = MockRepository.GenerateMock<ILog>(); NodeParser np = new NodeParser(logger); //act: var result = np.ParseNodes(new string[1] { "TestInput" }); //assert: Assert.AreEqual(2, result.Count); var microsoftNode = result.First(r => r.label == "Microsoft"); var ibmNode = result.First(r => r.label == "IBM"); Assert.IsNotNull(ibmNode); Assert.IsNotNull(microsoftNode); Assert.AreEqual(0, ibmNode.adjacentNodes.Count); Assert.AreEqual(1, microsoftNode.adjacentNodes.Count); }
public async Task <IList <TransferPath> > SearchLocalSourcePathsAsync(CancellationToken token) { Console2.WriteStartHeader("Search Paths"); string searchLocalPath = Path.Combine(Environment.CurrentDirectory, "Resources"); var paths = new List <TransferPath>(); long totalFileCount = 0; long totalByteCount = 0; var logger = this.CreateTransferLog(); var sourceNode = NodeParser.Node() .WithContext(NullNodeContext.Instance) .WithPath(searchLocalPath) .Parse <IDirectory>(); INode[] sourceNodes = { sourceNode }; var pathEnumerator = EnumerationBuilder.ForUpload(logger, Guid.NewGuid()) .StartFrom(sourceNodes) .WithStatistics(new SynchronousHandler <EnumerationStatistic>( statistic => { totalFileCount = statistic.TotalFiles; totalByteCount = statistic.TotalBytes; })) .Create(); var stopWatch = new Stopwatch(); stopWatch.Start(); await Task.Run(() => { paths.AddRange(pathEnumerator.LazyEnumerate(token) .Select(node => new TransferPath(node.AbsolutePath))); }, token).ConfigureAwait(false); stopWatch.Stop(); this._consolePrinter.DisplaySearchSummary(sourceNode, stopWatch, totalFileCount, totalByteCount); return(paths); }
public static void RunParser() { Console.WriteLine("Four Function Calculator (Ctrl-D to quit, Enter after each line, ';' to end expression)"); var input = ""; while (!string.IsNullOrEmpty(input = Console.ReadLine())) { Lexer lexer = new Lexer(input); NodeParser parser = new NodeParser(lexer); try { parser.Parse(); } catch { Console.WriteLine("Doh!!"); } } Console.WriteLine("Finished."); }
public void CycleDependency() { var nodes = ExpressionsBuild( "A>B,C", "B>C,D", "D>", "C>A" ); try { var result = NodeParser.Parse(nodes); } catch (CycleDependencyException e) { CollectionAssert.AreEquivalent(new List <string>() { "A", "B", "C" }, e.UnresolvedNodes); } }
public void ExceedExpression() { var nodes = ExpressionsBuild( "A>B,E", "B>C,D", "D>F", "C>" ); try { var result = NodeParser.Parse(nodes); } catch (ExceedDependencyException e) { CollectionAssert.AreEquivalent(new List <string>() { "E", "F" }, e.ExceedNodes); } }
public override bool Execute() { var parser = new NodeParser(); var validator = new GeneralSemanticValidator(); foreach (var file in Files) { var nodes = File.ReadAllText(file.ItemSpec); IList<LexicalElement> tokens; parser.Parse(nodes, file.ItemSpec, out tokens); foreach (var token in tokens.Where(token => token.Name.Contains("_"))) { LogError(new Warning(file.ItemSpec, token.StartCursor, token.EndCursor, "Syntax", token.Name)); } var ast = new Builder(nodes, tokens).ast; var warnings = validator.Validate(file.ItemSpec, ast); foreach (var warning in warnings) { LogWarning(warning); } } return Log.HasLoggedErrors && !ContinueOnError; }
// Read the data from streets.dat and node.dat and pass them to the view public ActionResult ProcessDrawingData() { Reader reader = new DatDocumentReader(); InputParser <Street> streetInput = new StreetParser(); List <string> streetData = reader.GetData(Server.MapPath("~/DataFiles/streets.dat")); List <Street> streetList = streetInput.ParseInput(streetData); Reader reader2 = new DatDocumentReader(); InputParser <Node> nodeInput = new NodeParser(); List <string> nodesData = reader.GetData(Server.MapPath("~/DataFiles/nodes.dat")); List <Node> nodeList = nodeInput.ParseInput(nodesData); var lineList = new List <LineViewModel>(); foreach (var s in streetList) { var nodeX = nodeList[s.StartNode - 1].X; var nodeY = nodeList[s.StartNode - 1].Y; var nodeX2 = nodeList[s.EndNode - 1].X; var nodeY2 = nodeList[s.EndNode - 1].Y; var rules = s.TR; LineViewModel line = new LineViewModel(); line.X = nodeX; line.Y = nodeY; line.X2 = nodeX2; line.Y2 = nodeY2; if (rules == 0) { line.Rules = 1; } else { line.Rules = 2; } lineList.Add(line); } return(View(lineList)); }
public override bool Execute() { var parser = new NodeParser(); var validator = new GeneralSemanticValidator(); foreach (var file in Files) { var nodes = File.ReadAllText(file.ItemSpec); IList <LexicalElement> tokens; parser.Parse(nodes, file.ItemSpec, out tokens); foreach (var token in tokens.Where(token => token.Name.Contains("_"))) { LogError(new Warning(file.ItemSpec, token.StartCursor, token.EndCursor, "Syntax", token.Name)); } var ast = new Builder(nodes, tokens).ast; var warnings = validator.Validate(file.ItemSpec, ast); foreach (var warning in warnings) { LogWarning(warning); } } return(Log.HasLoggedErrors && !ContinueOnError); }
public static void RunTemplateParser() { var input = @" <div>{{ }}</div> "; Func<string,string,string> getFullName = (f,l) => { return l + f; }; var scope = new { FirstName = "", LastName = "", GetFullName =getFullName}; Lexer lexer = new Lexer(input); NodeParser parser = new NodeParser(lexer); try { parser.Parse(); } catch { Console.WriteLine("Doh!!"); } }