public bool ParseHeader(ref string source, out Dictionary <string, object> pageContext) { var input = new StringReader(source); var parser = new Parser(input); pageContext = null; int i; parser.Expect <StreamStart>(); if (!parser.Accept <DocumentStart>()) { return(false); } var doc = _deserializer.Deserialize(parser); if (doc == null) { return(false); } pageContext = ConvertDoc(doc) as Dictionary <string, object>; if (pageContext == null) { return(false); } if (!parser.Accept <DocumentStart>()) { return(false); } i = parser.Current.End.Index - 1; char c; do { i++; if (i >= source.Length) { source = string.Empty; return(true); } c = source[i]; } while (c == '\r' || c == '\n'); source = source.Substring(i); return(true); }
private static int Parse(Parser p) { Symbol endSym = Symbol.RSqBracket; if (p.Accept(Symbol.LBracket)) { endSym = Symbol.RBracket; } else { p.Expect(Symbol.LSqBracket); } List <int> numbers = new(); bool redFound = false; while (!p.Done() && p.PeekSymbol() != endSym) { switch (p.PeekSymbol()) { case Symbol.LBracket: numbers.Add(Parse(p)); break; case Symbol.LSqBracket: numbers.Add(Parse(p)); break; case Symbol.Ident: { int val; if (p.Accept("red")) { redFound = endSym == Symbol.RBracket; } else if (p.AcceptNumber(out val)) { numbers.Add(val); } else { p.Burn(); } } break; default: p.Burn(); break; } } p.Burn(); return(redFound ? 0 : numbers.Sum()); }
/// <summary> /// Load a collection of objects from a string /// </summary> /// <param name="content"> /// The string to load the objects from. /// </param> /// <param name="typeMap"> /// A map from apiVersion/kind to Type. For example "v1/Pod" -> typeof(V1Pod) /// </param> /// <returns>collection of objects</returns> public static List <object> LoadAllFromString(string content, Dictionary <string, Type> typeMap) { if (typeMap == null) { throw new ArgumentNullException(nameof(typeMap)); } var types = new List <Type>(); var parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); while (parser.Accept <DocumentStart>(out _)) { var obj = Deserializer.Deserialize <KubernetesObject>(parser); types.Add(typeMap[obj.ApiVersion + "/" + obj.Kind]); } parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); var ix = 0; var results = new List <object>(); while (parser.Accept <DocumentStart>(out _)) { var objType = types[ix++]; var obj = Deserializer.Deserialize(parser, objType); results.Add(obj); } return(results); }
/// <summary> /// Load a collection of objects from a string /// </summary> /// <param name="content"> /// The string to load the objects from. /// </param> /// <param name="typeMap"> /// A map from apiVersion/kind to Type. For example "v1/Pod" -> typeof(V1Pod). If null, a default mapping will /// be used. /// </param> /// <returns>collection of objects</returns> public static List <object> LoadAllFromString(string content, IDictionary <string, Type> typeMap = null) { var mergedTypeMap = new Dictionary <string, Type>(ModelTypeMap); // merge in KVPs from typeMap, overriding any in ModelTypeMap typeMap?.ToList().ForEach(x => mergedTypeMap[x.Key] = x.Value); var types = new List <Type>(); var parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); while (parser.Accept <DocumentStart>(out _)) { var obj = Deserializer.Deserialize <KubernetesObject>(parser); types.Add(mergedTypeMap[obj.ApiVersion + "/" + obj.Kind]); } parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); var ix = 0; var results = new List <object>(); while (parser.Accept <DocumentStart>(out _)) { var objType = types[ix++]; var obj = Deserializer.Deserialize(parser, objType); results.Add(obj); } return(results); }
private static void AssertParse(string input, string expected) { var ast = new Parser(Tokenizer.Create()).Parse(input); var formatter = new FormattingVisitor(); ast.Accept(formatter); formatter.GetString().Should().Be(expected); }
public void Accept(ParserVisitor visitor) { visitor.Visit(this, x => { _first.Accept(visitor); _second.Accept(visitor); }); }
// [Theory] // [InlineData("{a}")] // [InlineData("{")] // [InlineData("{a")] // [InlineData("graph{a")] // public void InvalidSyntax(string sourceText) // => Assert.ThrowsAny<Exception>(() => AssertRoundTrip(sourceText)); SyntaxTree AssertRoundTrip(string sourceText) { var syntaxTree = new Parser(sourceText).Parse(); syntaxTree.Accept(new AssertTokensHaveSourceText()); Assert.Equal(sourceText, syntaxTree.ToString()); return(syntaxTree); }
void AcceptParserVisitor.Accept(ParserVisitor visitor) { visitor.Visit(this, x => { _parser.Accept(x); _except.Accept(x); }); }
public void Accept(ParserVisitor visitor) { visitor.Visit(this, x => { _element.Accept(x); _separator.Accept(x); }); }
private void DeserializeNodeStyles(MetaModel.MetaModel metaModel, Parser r) { r.Expect <SequenceStart>(); while (r.Accept <MappingStart>()) { DeserializeNodeStyle(metaModel, r); } r.Expect <SequenceEnd>(); }
private void DeserializeRecentFiles(MetaModel.MetaModel metaModel, Parser r) { r.Expect <SequenceStart>(); while (r.Accept <Scalar>()) { metaModel.RecentFiles.Add(r.Expect <Scalar>().Value); } r.Expect <SequenceEnd>(); }
public IEnumerable <Transformation> ParseManifests(TextReader reader) { var parser = new Parser(reader); parser.Consume <StreamStart>(); while (parser.Accept <DocumentStart>(out _)) { yield return(deserializer.Deserialize <Transformation>(parser)); } }
private async Task ParseSequence(ICommandHost host, Data.File file) { var reader = new StreamReader($"./config/{file.Sequence}"); var deserializer = new DeserializerBuilder() .WithNamingConvention(CamelCaseNamingConvention.Instance) .Build(); var parser = new Parser(reader); parser.Consume <StreamStart>(); string?speaker = null; string speakerColor = "68C355"; bool isNewLine = true; while (parser.Accept <DocumentStart>(out _)) { var step = deserializer.Deserialize <SequenceStep>(parser); if (step.Speaker != null) { speaker = step.Speaker; } if (step.SpeakerColor != null) { speakerColor = step.SpeakerColor; } if (!string.IsNullOrWhiteSpace(speaker) && isNewLine) { await host.Write($"{speaker}: ".Pastel(speakerColor)); } foreach (var c in step.Text) { await host.Write(c.ToString().Pastel(step.Color)); await Task.Delay(1000 / step.Speed); } //await host.Write(step.Text.Pastel(step.Color)); if (step.LineBreak) { isNewLine = true; await host.WriteLine(); } else { isNewLine = false; } await Task.Delay(step.Delay); } }
public void FromString(string content) { StringReader input = new StringReader(content); Deserializer deserializer = new DeserializerBuilder().Build(); Parser parser = new Parser(input); parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { documents.Add(deserializer.Deserialize <Document>(parser)); } }
/// <summary> /// 文書内の先頭のYAMLをデシリアライズする /// </summary> /// <typeparam name="TResult"></typeparam> /// <param name="reader"></param> /// <returns></returns> private static TResult DeserializeFirst <TResult>(TextReader reader) { var parser = new Parser(reader); parser.Expect <StreamStart>(); var deserializer = new DeserializerBuilder().Build(); if (parser.Accept <DocumentStart>()) { return(deserializer.Deserialize <TResult>(parser)); } throw new ArgumentException(); }
public List <TType> FromString <TType>(string data) { List <TType> list = new List <TType>(); StringReader input = new StringReader(data); Parser parser = new Parser(input); // Consume the stream start event "manually" parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { list.Add(deserializer.Deserialize <TType>(parser)); } return(list); }
/// <summary> /// Pass in YAML in the form of a string and return a ParseResult with one or more Actions deserialized. /// </summary> /// <param name="yaml"></param> /// <returns></returns> public ParseResult Parse(string yaml) { ParseResult result = new ParseResult(); if (string.IsNullOrWhiteSpace(yaml)) { Exception e = new Exception("Empty or null YAML string."); result.CatchError(e); } else { yaml = ParsingHelper.CleanseYaml(yaml); StringReader input = new StringReader(yaml); var deserializer = new DeserializerBuilder() .WithNamingConvention(new CamelCaseNamingConvention()) .Build(); var parser = new Parser(input); parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { try { var doc = deserializer.Deserialize <Action>(parser); doc.YamlArtifactID = -1; result.Library.Actions.Add(doc); } catch (Exception e) { if (e.InnerException != null) { if (e.InnerException.Message.Contains("not found")) { result.CatchError(Constants.ParseErrors.YamlParsingErrorInvalidTypeName, Constants.ParseErrors.YamlParsingErrorInvalidTypeComment, e); } } else { result.CatchError(e); } } } } return(result); }
/// <summary> /// Load a collection of objects from a string /// </summary> /// <param name="content"> /// The string to load the objects from. /// </param> /// <param name="typeMap"> /// A map from apiVersion/kind to Type. For example "v1/Pod" -> typeof(V1Pod) /// </param> /// <returns>collection of objects</returns> public static List <object> LoadAllFromString(string content, Dictionary <string, Type> typeMap) { if (typeMap == null) { throw new ArgumentNullException(nameof(typeMap)); } var deserializer = new DeserializerBuilder() .WithNamingConvention(CamelCaseNamingConvention.Instance) .WithTypeInspector(ti => new AutoRestTypeInspector(ti)) .WithTypeConverter(new IntOrStringYamlConverter()) .WithTypeConverter(new ByteArrayStringYamlConverter()) .IgnoreUnmatchedProperties() .Build(); var types = new List <Type>(); var parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); while (parser.Accept <DocumentStart>(out _)) { var obj = deserializer.Deserialize <KubernetesObject>(parser); types.Add(typeMap[obj.ApiVersion + "/" + obj.Kind]); } deserializer = new DeserializerBuilder() .WithNamingConvention(CamelCaseNamingConvention.Instance) .WithTypeInspector(ti => new AutoRestTypeInspector(ti)) .WithTypeConverter(new IntOrStringYamlConverter()) .WithTypeConverter(new ByteArrayStringYamlConverter()) .Build(); parser = new Parser(new StringReader(content)); parser.Consume <StreamStart>(); var ix = 0; var results = new List <object>(); while (parser.Accept <DocumentStart>(out _)) { var objType = types[ix++]; var obj = deserializer.Deserialize(parser, objType); results.Add(obj); } return(results); }
public YamlOctopusModel[] Read(Stream stream) { var models = new List <YamlOctopusModel>(); using (var reader = new StreamReader(stream)) { var parser = new Parser(reader); parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { models.Add(_deserializer.Deserialize <YamlOctopusModel>(parser)); } return(models.ToArray()); } }
public static IEnumerable <TStub> ReadStubs <TStub>(string endpointFolder) where TStub : Stub { var stringProperties = typeof(TStub).GetProperties().Where(p => p.PropertyType == typeof(String)).ToArray(); foreach (var stubFile in Directory.GetFiles(endpointFolder, StubFilePattern, SearchOption.AllDirectories)) { if (stubFile.Contains(@"\_Missing\")) { continue; } using (var fileContent = new StringReader(File.ReadAllText(stubFile))) { var parser = new Parser(fileContent); parser.Expect <StreamStart>(); int docIndex = 1; while (parser.Accept <DocumentStart>()) { var stub = YamlDesirializer.Deserialize <TStub>(parser); foreach (var property in stringProperties) { var value = property.GetValue(stub) as String; if (!String.IsNullOrWhiteSpace(value)) { var newValue = value.TrimEnd().Replace("\n", "\r\n"); property.SetValue(stub, newValue); } } stub.FilePath = stubFile; stub.Name = Path.GetFileName(stubFile); stub.FolderPath = endpointFolder; stub.DocumentIndex = docIndex; yield return(stub); docIndex++; } } } }
public void Should_support_a_combined_parser() { var anyParser = new AnyParser <int>(); Parser <int[], int> parser = from x in anyParser where x == 1 select x; var visualizer = new ParserVisualizer(); parser.Accept(visualizer); string text = visualizer.ToString(); Console.WriteLine(text); string expected = @" (Int32) * (Int32) Where x => (x == 1)"; Assert.AreEqual(expected, text); }
public void LoadLevelData() { //create yaml deserializer var deserializer = new DeserializerBuilder() .WithNamingConvention(new CamelCaseNamingConvention()) .Build(); var fileContents = File.ReadAllText(LevelsPath); var input = new StringReader(fileContents); var parser = new Parser(input); parser.Expect <StreamStart>(); Levels = new List <LevelYaml>(); while (parser.Accept <DocumentStart>()) { var doc = deserializer.Deserialize <LevelYaml>(parser); //there is a wierd effect, when the map is read in as a single string it reads in a space after the first line and every line after //so when parsing the map as a single string that extra space needs to be skipped. Levels.Add(doc); } }
/// <summary> /// Parses a string into a collection of YAML documents. /// </summary> /// <param name="text">A string containing one or more YAML documents.</param> /// <returns>List of parsed documents.</returns> protected virtual IList <object> ParseYamlObjects(string text) { IList <object> documents = new List <object>(); var input = new StringReader(text); var reader = new Parser(input); var deserializer = new Deserializer(); // Consume the stream start event "manually" reader.Expect <StreamStart>(); while (reader.Accept <DocumentStart>()) { // Deserialize the document var document = deserializer.Deserialize(reader); documents.Add(document); } return(documents); }
public void Main() { var input = new StringReader(Document); var deserializer = new DeserializerBuilder().Build(); var parser = new Parser(input); // Consume the stream start event "manually" parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { // Deserialize the document var doc = deserializer.Deserialize <List <string> >(parser); output.WriteLine("## Document"); foreach (var item in doc) { output.WriteLine(item); } } }
public Settings(string path) { if (!System.IO.File.Exists(path)) { throw new Exceptions.FileNotFoundException(); } Deserializer deserializer = new DeserializerBuilder().WithNamingConvention(new CamelCaseNamingConvention()).Build(); using (StreamReader sr = new StreamReader(path)) { Parser parser = new Parser(sr); parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { Settings s = (Settings)deserializer.Deserialize <Settings>(parser); foreach (PropertyInfo pi in s.GetType().GetProperties()) { pi.SetValue(this, pi.GetValue(s)); } } } }
/// <summary> /// Name and Title are mandatory while shortcut is optional /// </summary> /// <param name="metaModel"></param> /// <param name="r"></param> private void DeserializeIcon(MetaModel.MetaModel metaModel, Parser r) { string name = null, title = null, shortcut = null; r.Expect <MappingStart>(); r.Expect <Scalar>(); //name name = r.Expect <Scalar>().Value; r.Expect <Scalar>(); title = r.Expect <Scalar>().Value; if (r.Accept <Scalar>()) { if (r.Expect <Scalar>().Value.Equals(Shortcut)) { shortcut = r.Expect <Scalar>().Value; } } r.Expect <MappingEnd>(); metaModel.IconsList.Add(new ModelIcon(name, title, shortcut)); }
public void Run() { var input = new StringReader(Document); var deserializer = new DeserializerBuilder().Build(); var parser = new Parser(input); // comsuming the steam start event manully // check for this parser.Consume <StreamStart>(); while (parser.Accept <DocumentStart>()) { var doc = deserializer.Deserialize <List <string> >(parser); Console.WriteLine("##Document"); foreach (var item in doc) { Console.WriteLine(item); } } }
/// <summary> /// In my implementation the cache object is a List of nodes meta-information. /// </summary> /// <param name="path">The path to an existing file on a disk</param> /// <param name="interruptChecker">The function, that checks, if cache building should be interrupted. /// It throws <c>OperationCanceledException</c> in case of interrupt is necessary</param> /// <returns>Cache data for the given file, /// that is actually a <c>List<NodeDescription></c></returns> public object Build(string path, Action interruptChecker) { var cache = new List <NodeDescription>(); using var input = new StreamReader(path); var deserializer = new DeserializerBuilder() .IgnoreUnmatchedProperties() .WithNodeTypeResolver(new UnityNodeTypeResolver()) .Build(); var parser = new Parser(input); parser.Consume <StreamStart>(); var counter = 0; while (parser.Accept <DocumentStart>(out _)) { ++counter; cache.AddRange(deserializer.Deserialize <Dictionary <string, NodeDescription> >(parser).Values); if (counter != _checkFrequency) { continue; } try { interruptChecker(); } catch (OperationCanceledException) { return(cache); } counter = 0; } return(cache); }
private void ParseYaml(TextAsset text) { if (text == null) { return; } GameController gameController = FindObjectOfType <GameController> (); List <StoryNode> nodes = new List <StoryNode>(); var input = new StringReader(NormalizeToAscii(text.bytes)); var deserializer = new DeserializerBuilder() .WithNamingConvention(new CamelCaseNamingConvention()) .IgnoreUnmatchedProperties() .Build(); var parser = new Parser(input); // Consume the stream start event "manually" parser.Expect <StreamStart>(); while (parser.Accept <DocumentStart>()) { // Deserialize the document var doc = deserializer.Deserialize <StoryNode>(parser); if (doc != null) { gameController.script.Add(doc.id, doc); } } Debug.Log("finished a parse"); }
public void Accept(ParserVisitor visitor) { visitor.Visit(this, x => _parser.Accept(x)); }