public static void Add(string file, ITextSnapshot snapshot) { var fileInCache = false; lock (CacheLock) { fileInCache = Cache.ContainsKey(file); } if (fileInCache) { Update(file, snapshot); } else { var doc = RapidXamlDocument.Create(snapshot, file, vsa); lock (CacheLock) { Cache.Add(file, doc); } Parsed?.Invoke(null, new RapidXamlParsingEventArgs(doc, file, snapshot, ParsedAction.Add)); } }
/// <summary> /// Main entry point to checker command line tool. /// </summary> /// <param name="args">The command line arguments.</param> private static void Main(string[] args) { ParserResult <Options> result = Parser.Default.ParseArguments <Options>(args); if (ParserResultType.NotParsed == result.Tag) { return; } Parsed <Options> success = (Parsed <Options>)result; Options options = success.Value; List <WindowsFirewallInconsistency> inconsistencies = Program.CheckFirewalls(options); if (!inconsistencies.Any()) { Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("Firewalls are equivalent."); Console.ResetColor(); } else { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Firewalls are NOT equivalent."); Console.ResetColor(); Console.WriteLine(); Program.PrintInconsistentPackets(inconsistencies); Console.WriteLine(); Program.PrintRuleMatches(inconsistencies); } }
private void ReturnIdOrTokenOrLabel(LexerState lexerState, Symbol symbol) { if (_tokens.Contains(CurrentToken.ToString())) { CurrentToken.TokenIndex = _tokens.IndexOf(CurrentToken.ToString()) + 1; Log(LogEventLevel.Information, "Found token {0}", false, CurrentToken); ReturnToken(CurrentToken, symbol); } else if (symbol.Class?.Class == Class.Colon || Parsed.Last().Substring == "goto") { //Label ReturnLabel(new StateMachine.Transition(lexerState, LexerState.LabelDefinition, symbol)); } else { Log(LogEventLevel.Information, "Not found token - treat as ID: {0}", false, CurrentToken); var identifier = Identifiers.FirstOrDefault(x => x.Name == CurrentToken.ToString())?.Clone() as IdentifierToken; if (identifier == null) { identifier = new IdentifierToken(CurrentToken.ToString()) { TokenIndex = IdIndex }; Identifiers.Add(identifier); } identifier.Line = Line; ReturnToken(identifier, symbol); } }
/// <summary> /// プログラムのエントリーポイントとなります。 /// </summary> /// <param name="args">コマンドライン引数。</param> /// <returns>プログラムの戻り値</returns> static async Task <int> Main(string[] args) { try { var settings = Parser.Default.ParseArguments <Settings>(args) switch { Parsed <Settings> success => success.Value, _ => throw new ArgumentException("コマンドライン引数が適切ではありません") }; var magnification = (int)Math.Ceiling(Math.Log(settings.DataCount) * 0.5 + settings.DataCount * 0.0000007); //適当 var outputTickCount = settings.DataCount / magnification + 1; using (var pbar = new ProgressBar(settings.DataCount + outputTickCount, "", new ProgressBarOptions { ProgressCharacter = '-' })) { var creater = new RandamDataCreater(settings); var outputer = new Outputer(); creater.DataCreatedEvent += (sender, e) => pbar.Tick(); int i = 0; outputer.WroteDataEvent += (sender, e) => { if (i++ % magnification == 0) { pbar.Tick(); } }; await outputer.OutputFile(await creater.CreateAsync(), settings.FilePath, settings.Formatting); } } catch (Exception ex) { Console.WriteLine(ex); } return(0); }
public static void Main(string[] args) { #if DEBUG if (args == null || args.Length < 1) { args = new string[] { "-i", ".\\Test", "--extensionw", ".h", ".cpp", "--outputlog" }; } #endif AppDomain.CurrentDomain.UnhandledException += OnException; ParserResult <Options> result = Parser.Default.ParseArguments <Options>(args); switch (result.Tag) { case ParserResultType.Parsed: Parsed <Options> parsed = (Parsed <Options>)result; Action action = new Action(parsed.Value); break; case ParserResultType.NotParsed: default: Options template = new Options(); template.OutputEncoding = "gbk"; template.ExtensionWhiteList = new string[] { ".c", ".h", ".cpp" }; template.ExtensionBlackList = new string[] { ".txt", ".png" }; template.InputDirectory = "D:\\"; template.OutputLog = true; Console.WriteLine("Example:"); Console.WriteLine("\tbash " + Parser.Default.FormatCommandLine <Options>(template)); break; } Console.ReadKey(); }
public Reply <T> Parse(TokenStream tokens) { var start = tokens.Position; var reply = parsers[0].Parse(tokens); var newPosition = reply.UnparsedTokens.Position; var errors = ErrorMessageList.Empty; var i = 1; while (!reply.Success && (start == newPosition) && i < parsers.Length) { errors = errors.Merge(reply.ErrorMessages); reply = parsers[i].Parse(tokens); newPosition = reply.UnparsedTokens.Position; i++; } if (start == newPosition) { errors = errors.Merge(reply.ErrorMessages); if (reply.Success) { reply = new Parsed <T>(reply.Value, reply.UnparsedTokens, errors); } else { reply = new Error <T>(reply.UnparsedTokens, errors); } } return(reply); }
/// <summary> /// Checks the command line params.<para/> /// arguments format: key=value or --key value /// </summary> /// <param name="args">The args.</param> public FlexibleOptions Parse(string[] args) { // 1. parse local configuration file // display the options listed in the configuration file FlexibleOptions localOptions = ParseAppSettings(); // 2. parse console arguments // parse arguments like: key=value FlexibleOptions argsOptions = ParseCommandLineArguments(args); // 3. merge arguments with app.config options. Priority: arguments > app.config Parsed = FlexibleOptions.Merge(localOptions, argsOptions); // 4. check for external config file // set config alias Parsed.SetAlias("config", "S3ConfigurationPath", "webConfigurationFile"); // load and parse web hosted configuration file (priority order: argsOptions > localOptions) ExternalFiles = Parsed.GetAsList("config", new char[] { ',', ';' }); FlexibleOptions externalLoadedOptions = ParseExternalFiles(ExternalFiles); // 5. merge options with the following priority: // 1. console arguments // 2. external file with json configuration object (local or web) // 3. local configuration file (app.config or web.config) Parsed = FlexibleOptions.Merge(Parsed, externalLoadedOptions, argsOptions); // return final merged options return(Parsed); }
public override void ParseAttribute(string attribute, string value) { ParsedData p = new ParsedData(attribute, value); if (Parsed.Contains(p)) { return; } Parsed.Add(p); if (attribute.ToLower() == "content") { Text = (c) => c.GetWrapper().TryLoadText(value, c, out string text) ? text : ""; } else if (attribute.ToLower() == "font") { Font = (c) => c.GetWrapper().TryGet(value, out Font font) ? font : null; } else if (attribute.ToLower() == "textscale") { Scale = float.TryParse(value, out float scale) ? scale : 1f; } else if (attribute.ToLower() == "textlimit") { TextLimit = int.TryParse(value, out int limit) ? limit : -1; } else { Parsed.Remove(p); base.ParseAttribute(attribute, value); } }
public static void Add(string file, ITextSnapshot snapshot) { var fileInCache = false; lock (CacheLock) { fileInCache = Cache.ContainsKey(file); } if (fileInCache) { Update(file, snapshot); } else { // Don't worry about timing this call as it's only repeated calls to analyze a document that might cause a user prompt. // This only happens on document open. Repeated analysis of a document will happen through TryUpdate. var doc = RapidXamlDocument.Create(snapshot, file, vsa, string.Empty); lock (CacheLock) { Cache.Add(file, doc); } Parsed?.Invoke(null, new RapidXamlParsingEventArgs(doc, file, snapshot, ParsedAction.Add)); } }
public void Should_pull_the_optional_segments_without_a_glitch() { const string message = @"MSH|^~\&|LIFTLAB||UBERMED||201701131234||ORU^R01|K113|P| EVN|A08|201701131234|||12901"; Parsed <HL7Entity> parsed = Parser.Parse(message); ILayout <MessageLayout, HL7Entity> layout; Assert.That(Structure.TryGetLayout(out layout), Is.True); Parser <HL7Entity, MessageLayout> query = parsed.CreateQuery(q => layout.CreateQuery(TemplateQueryOptions.None, q)); Result <Cursor <HL7Entity>, MessageLayout> result = parsed.Query(query); Assert.That(result.HasValue, Is.True); Assert.That(result.Value.MSH, Is.Not.Null); Assert.That(result.Value.MSH.IsPresent, Is.True); Assert.That(result.Value.Optional.IsPresent, Is.True); Assert.That(result.Value.Optional.Value.EVN, Is.Not.Null); Assert.That(result.Value.Optional.Value.EVN.IsPresent, Is.True); Assert.That(result.Value.Optional.Value.EVN.HasValue, Is.True); Assert.That(result.Value.Optional.Value.EVN.Value.SegmentId.HasValue, Is.True); Assert.That(result.Value.Optional.Value.EVN.Value.RecordedDateTime.HasValue, Is.True); }
/////////////////////// // Overridden functions public override string ToString() { switch (_type) { case HqlWordType.UNKNOWN: case HqlWordType.LITERAL_STRING: case HqlWordType.TEXT: return(Data); case HqlWordType.END_OF_LINE: return("\\n"); case HqlWordType.FIELD: case HqlWordType.FUNCTION: case HqlWordType.SCALAR: return(Field.ToString()); case HqlWordType.FLOAT: case HqlWordType.INT: case HqlWordType.ROWNUM: return(Parsed.ToString()); case HqlWordType.KEYWORD: return(Data); case HqlWordType.NULL: return("NULL"); default: throw new Exception("Unknown type of token"); } }
static void Main(string[] args) { var parseResult = Parser.Default.ParseArguments <Options>(args); if (parseResult.Tag == ParserResultType.Parsed) { Parsed <Options> parsed = (Parsed <Options>)parseResult; var logger = new Logger(parsed.Value.Verbose); try { var trainer = trainerFactory(parsed.Value, logger); trainer.Train(); } catch (ArgumentException ex) { Console.WriteLine(new StringBuilder("Argument exception: ").Append(ex.Message)); } catch (Exception ex) { Console.WriteLine(new StringBuilder("Generic exception: ").Append(ex.Message)); } } Console.WriteLine("...END"); Console.ReadLine(); }
public static void Update(string file, ITextSnapshot snapshot) { var snapshotText = snapshot.GetText(); bool alreadyCached = false; lock (CacheLock) { alreadyCached = Cache.ContainsKey(file) && Cache[file].RawText == snapshotText; } if (!alreadyCached) { if (!CurrentlyProcessing.Contains(snapshotText)) { try { CurrentlyProcessing.Add(snapshotText); var doc = RapidXamlDocument.Create(snapshot, file, vsa); lock (CacheLock) { Cache[file] = doc; } Parsed?.Invoke(null, new RapidXamlParsingEventArgs(doc, file, snapshot, ParsedAction.Update)); } finally { CurrentlyProcessing.Remove(snapshotText); } } } }
public static IAttribute <string> GetString(this IElement element, string property, string defaultValue) { return(element[property].Convert( parse: p => Parsed.Success(p, p), serialize: d => d, defaultValue: defaultValue)); }
static void Main(string[] args) { var supportedOptions = new Type[] { typeof(SwiftClassifyCmdOptions), typeof(OCClassifyCmdOptions) }; var parseResult = CommandLine.Parser.Default.ParseArguments(args, supportedOptions); if (parseResult.Tag == ParserResultType.Parsed) { Parsed <Object> pr = parseResult as Parsed <Object>; if (pr.Value.GetType() == typeof(SwiftClassifyCmdOptions)) { SwiftClassifyCmd.TranformBaseType(pr.Value as SwiftClassifyCmdOptions); } else if (pr.Value.GetType() == typeof(OCClassifyCmdOptions)) { OCClassifyCmd.UpdateDefinition(pr.Value as OCClassifyCmdOptions); } } else if (parseResult.Tag == ParserResultType.NotParsed) { HelpText.AutoBuild(parseResult); } Console.ReadLine(); }
// Use this for initialization /// <summary> /// Beim starten des Scripts werden die Daten des Text Assets geparst. /// </summary> private void Awake() { string content = asset.text; string[] lines = content.Split('\n'); foreach (var item in lines) { string[] columns = item.Split(','); string[] date = columns[1].Split('-'); if (date[1].Equals("01") && date[2].Equals("01")) { Parsed parsedLine = new Parsed() { date = new Date(short.Parse(date[0])) , X = double.Parse(columns[2]) , Z = double.Parse(columns[3]) , Y = double.Parse(columns[4]) , VX = double.Parse(columns[5]) , VY = double.Parse(columns[6]) , VZ = double.Parse(columns[7]) }; parsedPositions.Add(parsedLine.date.Year, parsedLine); } } }
public void Verify_can_convert_DateTime_to_different_time_zone_given_TimeZoneInfo() { const string message = @"MSH|^~\&|MACHETELAB||UBERMED||201701131234||ORU^R01|K113|P| ZHX|20170113|201705221530"; Parsed <HL7Entity> parsed = Parser.Parse(message); var query = parsed.CreateQuery(q => from msh in q.Select <MSHSegment>() from zhx in q.Select <DateTimeSegment>() select new { MSH = msh, ZHX = zhx }); var result = parsed.Query(query); Value <DateTimeOffset> dt = result.Value.ZHX.TestDateTimeOffsetWithTime; TimeSpan offset = new TimeSpan(0, 8, 0, 0); DateTimeOffset dateTime = dt.ValueOrDefault(); DateTimeOffset expected = new DateTimeOffset(dateTime.DateTime, offset); DateTimeOffset actual = dt.ToOffset(offset).ValueOrDefault(); Assert.AreEqual(expected, actual); }
ParsedCursor(Parsed <TSchema> parsed, int index, TSchema entity) { _parsed = parsed; _index = index; Value = entity; HasValue = true; }
public void Verify_IsEqualTo_can_evaluate_component_field_correctly() { const string message1 = @"MSH|^~\&|LIFTLAB||UBERMED||201701131234||ORU^R01|K113|P|"; Parsed <HL7Entity> parsed1 = Parser.Parse(message1); var query1 = parsed1.CreateQuery(q => from x in q.Select <MSHSegment>() select x); var result1 = parsed1.Query(query1); const string message2 = @"MSH|^~\&|LIFTLAB||UBERMED||201701131234||ORU^R02|K113|P|"; Parsed <HL7Entity> parsed2 = Parser.Parse(message2); var query2 = parsed2.CreateQuery(q => from x in q.Select <MSHSegment>() select x); var result2 = parsed2.Query(query2); bool actual = result1.Value.MessageType.IsEqualTo(result2.Value.MessageType.Value); Assert.AreEqual(false, actual); }
/// <summary> /// Parse the parsed input from the beginning, create a new cursor, building the query on the fly /// </summary> /// <param name="parsed"></param> /// <param name="query">The query parser</param> /// <typeparam name="TSchema"></typeparam> /// <typeparam name="TResult"></typeparam> /// <returns></returns> public static Result <Cursor <TSchema>, TResult> Query <TSchema, TResult>(this Parsed <TSchema> parsed, Parser <TSchema, TResult> query) where TSchema : Entity { var cursor = parsed.GetCursor(); return(query.Parse(cursor)); }
private static ParserResult <object> OnVerbSetParsed(Parser parser, Parsed <object> parsed, IEnumerable <string> argsToParse, bool containedHelpOrVersion) { return(parsed.MapResult( (AnalyzeVerbSet _) => parser.ParseArguments <ApproximateCommand, DetectTreesCommand, FilterCommand, SliceCommand, TerrainCommand, TreeHeightCommand>(argsToParse), _ => parsed)); }
/// <summary> /// Parse the parsed input from the beginning, create a new cursor, building the query on the fly /// </summary> /// <param name="parsed"></param> /// <param name="buildQuery"></param> /// <typeparam name="TSchema"></typeparam> /// <typeparam name="TResult"></typeparam> /// <returns></returns> public static Result <Cursor <TSchema>, TResult> Query <TSchema, TResult>(this Parsed <TSchema> parsed, QueryBuilderCallback <TSchema, TResult> buildQuery) where TSchema : Entity { var query = Query <TSchema> .Create(buildQuery); var cursor = parsed.GetCursor(); return(query.Parse(cursor)); }
public CommandLinePlayer (Story story, bool autoPlay = false, Parsed.Story parsedStory = null, bool keepOpenAfterStoryFinish = false) { this.story = story; this.autoPlay = autoPlay; this.parsedStory = parsedStory; this.keepOpenAfterStoryFinish = keepOpenAfterStoryFinish; _debugSourceRanges = new List<DebugSourceRange> (); }
public Divert (Parsed.Path target, List<Expression> arguments = null) { this.target = target; this.arguments = arguments; if (arguments != null) { AddContent (arguments.Cast<Parsed.Object> ().ToList ()); } }
public static void Main(params string[] args) { ParserResult <Options> result = Parser.Default.ParseArguments <Options>(args); Parsed <Options> options = result as Parsed <Options>; if (options != null) { Run(options.Value); } }
static Parsed <double> TryParseAngle(string str) { double value; if (double.TryParse(str.StripSuffix("deg"), out value)) { return(Parsed.Success(value, str)); } return(Parsed.Failure <double>(str)); }
public bool Run(Parsed <CommandLineOptions> values) { var result = false; if (values != null && CheckArguments(values)) { result = ConvertPgs(); } return(result); }
private void TestParsedEchoVerb <T>(ParserResult <object> result, string input) where T : EchoVerb { result.Should().BeOfType(typeof(Parsed <object>), "the args should have been parsed."); Parsed <object> parsedResult = (Parsed <object>)result; parsedResult.Value.Should().BeOfType(typeof(T), $"the args should have been mapped to the {typeof(T).Name} type."); ((T)parsedResult.Value).Input.Should().Be(input); }
private void ParseHtmlContent(HtmlNode node, Parsed parsed) { if (!node.HasChildNodes) { return; } foreach (var subnode in node.ChildNodes) { ParseHtml(subnode, parsed); } }
public IEnumerable <T> As <T>(string name) { if (!HasArg(name)) { yield break; } foreach (var value in Parsed.First(x => x.Key.Equals(name, StringComparison.OrdinalIgnoreCase)).Value) { yield return(Convert <T>(value)); } }
public void Should_parse_a_series_of_segments_but_not_match() { const string message = @"MSH|^~\&|MACHETELAB||UBERMED||201701131234||ORU^R01|K113|P|"; Parsed <HL7Entity> parsed = Parser.Parse(message); var result = parsed.Query(q => from msh in q.Select <MSHSegment>() from evn in q.Select <EVNSegment>() select new { MSH = msh, EVN = evn }); Assert.That(result.HasValue, Is.False); }
public static MarkdownDocument ParseToMarkdown(this ITextSnapshot snapshot, string file = null) { lock (_syncRoot) { return(CachedDocuments.GetValue(snapshot, key => { var text = key.GetText(); var markdownDocument = ParseToMarkdown(text); Parsed?.Invoke(snapshot, new ParsingEventArgs(markdownDocument, file, snapshot)); return markdownDocument; })); } }
public override string GenerateCode(uint n = 0) { var sb = new StringBuilder (); sb.AppendLine ("<blockquote>", n); var parsed = new Parsed (new Tokenized (pq.lines)); foreach (var exp in parsed) sb.AppendLine (exp.GenerateCode (n + 1).TrimEnd()); sb.AppendLine ("</blockquote>", n); return sb.ToString (); }
public void Test() { var parser = CreateFullParser(); var result = new Parsed(); parser.Parse(TestJson, result); Assert.AreEqual("Bob", result.StringField); Assert.AreEqual(91321, result.IntField); Assert.AreEqual(-8, result.NegativeField); Assert.IsTrue(result.BoolFieldTrue); Assert.IsFalse(result.BoolFieldFalse); Assert.AreEqual("\"value\"", result.EscapedQuotes); CollectionAssert.AreEquivalent(new List<string> { "Jim", "Joe" }, result.StringArray); CollectionAssert.AreEquivalent(new List<int> { 0, 1 }, result.ArrayOfArrays[0]); CollectionAssert.AreEquivalent(new List<int> { 2, 3 }, result.ArrayOfArrays[1]); Assert.AreEqual(321, result.SubGroup.SubField); }
public Parsed.Object ResolveFromContext(Parsed.Object context) { if (_components == null || _components.Count == 0) { return null; } // Find base target of path from current context. e.g. // ==> BASE.sub.sub var baseTargetObject = ResolveBaseTarget (context); if (baseTargetObject == null) { return null; } // Given base of path, resolve final target by working deeper into hierarchy // e.g. ==> base.mid.FINAL if (_components.Count > 1) { return ResolveTailComponents (baseTargetObject); } return baseTargetObject; }
public override void Error(string message, Parsed.Object source, bool isWarning) { ErrorType errorType = isWarning ? ErrorType.Warning : ErrorType.Error; var sb = new StringBuilder (); if (source is AuthorWarning) { sb.Append ("TODO: "); errorType = ErrorType.Author; } else if (isWarning) { sb.Append ("WARNING: "); } else { sb.Append ("ERROR: "); } if (source && source.debugMetadata != null && source.debugMetadata.startLineNumber >= 1 ) { if (source.debugMetadata.fileName != null) { sb.AppendFormat ("'{0}' ", source.debugMetadata.fileName); } sb.AppendFormat ("line {0}: ", source.debugMetadata.startLineNumber); } sb.Append (message); message = sb.ToString (); if (_errorHandler != null) { _errorHandler (message, errorType); } else { Console.WriteLine (message); } _hadError = errorType == ErrorType.Error; _hadWarning = errorType == ErrorType.Warning; }
private Parsed ParseDocument(Stream stream) { try { var document = new HtmlDocument(); document.Load(stream); var parsed = new Parsed(); ParseHtml(document.DocumentNode, parsed); return parsed; } catch { return null; } }
static void Main(string[] args) { using (var sr = new StreamReader (@"C:\Users\rod\Documents\projects\Becoming Gracenote\Becoming Gracenote\test.txt")) { //var tokenized = new Tokenized (sr.ReadToEnd ()); //foreach (var token in tokenized) // Console.WriteLine (token); var parsed = new Parsed (sr.ReadToEnd ()); var sw = new StreamWriter (@"c:\users\rod\desktop\gracenote_test.html", false, Encoding.UTF8); foreach (var expression in parsed) { //Console.WriteLine (expression.GenerateCode ()); sw.WriteLine (Encoding.UTF8.GetString (Encoding.UTF8.GetBytes (expression.GenerateCode ()))); } sw.Close (); System.Diagnostics.Process.Start (new System.Diagnostics.ProcessStartInfo ("iexplore.exe", @"c:\users\rod\desktop\gracenote_test.html")); } //var xs = new[] { 2, 4, 6, 8, 10 }; //var index = new Indexical<int> (xs); //while (index) //{ // Console.WriteLine (index.Value); // index++; //} //index = index - 2; ////Console.WriteLine (index.Value); //foreach (var x in index) // Console.WriteLine (x); //var xs = Enumerable.Range (1, 100); //var index = new Indexical<int> (xs.ToArray()); //var old = Console.ForegroundColor; //while (index) //{ // Console.WriteLine (index.Value); // Console.WriteLine ((index + 30).IsValid); // index++; //} //Console.ReadLine (); //var text = "\"Today, August \\26th, is the *best* day of the rest of our lives\" \r\n --me"; //var tokenized = new TokenizedSpans (text); ////foreach (var token in tokenized) //// Console.WriteLine (token); ////text = "(__hello world__) __&__ \"world\" **sdfsdf**"; //var parsed = new ParsedTextRun (text); //var code = parsed.GenerateCode (); //var sw = new StreamWriter (@"c:\users\rod\desktop\gracenote.html"); //sw.Write (code); //sw.Close (); //Console.WriteLine (code); // Console.ReadLine (); }
public Divert (Parsed.Object targetContent) { this.targetContent = targetContent; }
public IncludedFile (Parsed.Story includedStory) { this.includedStory = includedStory; }
public VariableResolveResult ResolveVariableWithName(string varName, Parsed.Object fromNode) { var result = new VariableResolveResult (); if (fromNode == null) { fromNode = this; } var ancestor = fromNode; while (ancestor) { if (ancestor is FlowBase) { var ancestorFlow = (FlowBase)ancestor; if (ancestorFlow.arguments != null ) { foreach (var arg in ancestorFlow.arguments) { if (arg.name.Equals (varName)) { result.found = true; result.isArgument = true; result.ownerFlow = ancestorFlow; return result; } } } if (ancestorFlow.variableDeclarations.ContainsKey (varName)) { result.found = true; result.ownerFlow = ancestorFlow; if ( !(ancestorFlow is Story) ) { result.isTemporary = true; } return result; } } ancestor = ancestor.parent; } result.found = false; return result; }
// See whether "context" contains a child with a given name at a given flow level // Can either be a named knot/stitch (a FlowBase) or a weave point within a Weave (Choice or Gather) // This function also ignores any other object types that are neither FlowBase nor Weave. // Called from both ResolveBase (force deep) and ResolveTail for the individual components. Parsed.Object TryGetChildFromContext(Parsed.Object context, string childName, FlowLevel? minimumLevel, bool forceDeepSearch = false) { // null childLevel means that we don't know where to find it bool ambiguousChildLevel = minimumLevel == null; // Search for WeavePoint within Weave var weaveContext = context as Weave; if ( weaveContext != null && (ambiguousChildLevel || minimumLevel == FlowLevel.WeavePoint)) { return (Parsed.Object) weaveContext.WeavePointNamed (childName); } // Search for content within Flow (either a sub-Flow or a WeavePoint) var flowContext = context as FlowBase; if (flowContext != null) { // When searching within a Knot, allow a deep searches so that // named weave points (choices and gathers) can be found within any stitch // Otherwise, we just search within the immediate object. var shouldDeepSearch = forceDeepSearch || flowContext.flowLevel == FlowLevel.Knot; return flowContext.ContentWithNameAtLevel (childName, minimumLevel, shouldDeepSearch); } return null; }
// Find the final child from path given root, i.e.: // root.sub.finalChild Parsed.Object ResolveTailComponents(Parsed.Object rootTarget) { Parsed.Object foundComponent = rootTarget; for (int i = 1; i < _components.Count; ++i) { var compName = _components [i]; FlowLevel minimumExpectedLevel; var foundFlow = foundComponent as FlowBase; if (foundFlow != null) minimumExpectedLevel = (FlowLevel)(foundFlow.flowLevel + 1); else minimumExpectedLevel = FlowLevel.WeavePoint; foundComponent = TryGetChildFromContext (foundComponent, compName, minimumExpectedLevel); if (foundComponent == null) break; } return foundComponent; }
public void PostParse(Parsed.Story parsedStory) { foreach (var plugin in _plugins) { plugin.PostParse (parsedStory); } }
private void ParseHtml(HtmlNode node, Parsed parsed) { switch (node.NodeType) { case HtmlNodeType.Document: ParseHtmlContent(node, parsed); break; case HtmlNodeType.Text: string html = ((HtmlTextNode)node).Text; switch (node.ParentNode.Name) { case "script": case "style": // Ignore script and style blocks. return; case "title": // The title is handled special. parsed.Title = HtmlEntity.DeEntitize(html.Trim()); return; } // Is it in fact a special closing node output as text? if (!HtmlNode.IsOverlappedClosingElement(html)) parsed.Append(HtmlEntity.DeEntitize(html)).Append(' '); break; case HtmlNodeType.Element: ParseHtmlContent(node, parsed); break; } }
protected Expression ExpressionInfixRight(Parsed.Expression left, InfixOperator op) { Whitespace (); var right = Parse(() => Expression (op.precedence)); if (right) { // We assume that the character we use for the operator's type is the same // as that used internally by e.g. Runtime.Expression.Add, Runtime.Expression.Multiply etc var expr = new BinaryExpression (left, right, op.type); return expr; } return null; }
void WarningInTermination(Parsed.Object terminatingObject, string additionalExplanation = null) { string message = "Apparent loose end exists where the flow runs out. Do you need a '-> DONE' statement, choice or divert?"; if (additionalExplanation != null) { message = message + " " + additionalExplanation; } if (_firstChildFlow) { message = message + " Note that if you intend to enter '"+_firstChildFlow.name+"' next, you need to divert to it explicitly."; } Warning (additionalExplanation == null ? message : message + " " + additionalExplanation, terminatingObject); }
private void ParseHtmlContent(HtmlNode node, Parsed parsed) { if (!node.HasChildNodes) return; foreach (var subnode in node.ChildNodes) { ParseHtml(subnode, parsed); } }
// Find the root object from the base, i.e. root from: // root.sub1.sub2 Parsed.Object ResolveBaseTarget(Parsed.Object originalContext) { var firstComp = firstComponent; // Work up the ancestry to find the node that has the named object Parsed.Object ancestorContext = originalContext; while (ancestorContext != null) { // Only allow deep search when searching deeper from original context. // Don't allow search upward *then* downward, since that's searching *everywhere*! // Allowed examples: // - From an inner gather of a stitch, you should search up to find a knot called 'x' // at the root of a story, but not a stitch called 'x' in that knot. // - However, from within a knot, you should be able to find a gather/choice // anywhere called 'x' // (that latter example is quite loose, but we allow it) bool deepSearch = ancestorContext == originalContext; var foundBase = TryGetChildFromContext (ancestorContext, firstComp, null, deepSearch); if (foundBase != null) return foundBase; ancestorContext = ancestorContext.parent; } return null; }
public void PostExport(Parsed.Story parsedStory, Runtime.Story runtimeStory) { foreach (var plugin in _plugins) { plugin.PostExport (parsedStory, runtimeStory); } }
public override string GenerateCode(uint n = 0) { var sb = new StringBuilder (); var parsed = new Parsed (addr.lines); foreach (var exp in parsed) { sb.AppendLine ("<address>", n); sb.AppendLine (exp.GenerateCode (n + 1).TrimEnd ()); sb.AppendLine ("</address>", n); } return sb.ToString (); }