/// <summary> /// Creates a new <see cref="PreprocessorThrowDirective"/> with the /// <paramref name="column"/>, <paramref name="line"/>, and <paramref name="position"/>. /// </summary> /// <param name="error"></param> /// <param name="arguments">The arguments used for extra information about the error</param> /// <param name="fileName">The file in which the <see cref="PreprocessorThrowDirective"/> was declared /// in.</param> /// <param name="column">The column at the current <paramref name="line"/> the /// <see cref="PreprocessorThrowDirective"/> was declared at. </param> /// <param name="line">The line index the <see cref="PreprocessorThrowDirective"/> was declared at.</param> /// <param name="position">The position in the file the <see cref="PreprocessorThrowDirective"/> /// was declared at.</param> public PreprocessorThrowDirective(IOilexerGrammarFile source, string error, IOilexerGrammarToken[] arguments, int column, int line, long position) : base(column, line, position) { this.source = source; this.error = error; this.arguments = arguments; }
public static IProductionRule ReplaceReferences(IOilexerGrammarFile source, IProductionRule target, IDictionary <IProductionRuleItem, IProductionRuleItem> elementToElementList) { List <IProductionRuleItem> result = new List <IProductionRuleItem>(); foreach (var item in target) { if (elementToElementList.ContainsKey(item)) { result.Add(elementToElementList[item]); } else { if (item is IProductionRuleGroupItem && ContainsReferences(source, item, elementToElementList)) { var gItem = ((IProductionRuleGroupItem)(item)); var rItemElements = ReplaceReferences(source, gItem.ToArray(), elementToElementList); ProductionRuleGroupItem rItem = new ProductionRuleGroupItem(rItemElements.ToArray(), gItem.Column, gItem.Line, gItem.Position); rItem.Name = gItem.Name; rItem.RepeatOptions = gItem.RepeatOptions; result.Add(rItem); } else { result.Add(item); } } } return(new ProductionRule(result, target.FileName, target.Column, target.Line, target.Position)); }
/// <summary> /// Obtains the tokens associated to a grammar description file. /// </summary> /// <param name="file">The <see cref="IOilexerGrammarFile"/> which contains the tokens /// to enumerate.</param> /// <returns>A <see cref="IEnumerable{T}"/> associated to the tokens of the /// <paramref name="file"/> provided.</returns> public static IEnumerable <OilexerGrammarTokenEntry> GetTokens(this IOilexerGrammarFile file) { return(from e in file let iE = e as OilexerGrammarTokenEntry where iE != null select iE); }
/// <summary> /// Obtains the rules associated to a grammar description file. /// </summary> /// <param name="file">The <see cref="IOilexerGrammarFile"/> which contains the rules /// to enumerate.</param> /// <returns>A <see cref="IEnumerable{T}"/> associated to the rules of the /// <paramref name="file"/> provided.</returns> public static IEnumerable <IOilexerGrammarProductionRuleEntry> GetRules(this IOilexerGrammarFile file) { return(from e in file let iE = e as IOilexerGrammarProductionRuleEntry where iE != null orderby iE.Name select iE); }
public static void InitLookups(this IOilexerGrammarFile file, _OilexerGrammarResolutionAssistant resolutionAid = null) { OilexerGrammarLinkerCore.resolutionAid = resolutionAid; OilexerGrammarLinkerCore.errorEntries = (file.GetErrorEnumerator()); OilexerGrammarLinkerCore.tokenEntries = (file.GetTokenEnumerator()); OilexerGrammarLinkerCore.ruleEntries = (file.GetRulesEnumerator()); OilexerGrammarLinkerCore.ruleTemplEntries = (file.GetTemplateRulesEnumerator()); }
public static string ToLocationDetails(this InlinedTokenEntry entry, IOilexerGrammarFile file) { if (entry.FileName.ToLower().StartsWith(file.RelativeRoot.ToLower())) { return(string.Format(@"{0}.{1} at location: line {2}, column {3}", string.IsNullOrEmpty(entry.Name) ? string.Empty : string.Format("{0} in ", entry.Name), entry.FileName.Substring(file.RelativeRoot.Length), entry.Line, entry.Column)); } return(entry.FileName); }
/// <summary> /// Creates a new <see cref="InlinedTokenEntry"/> with the <paramref name="source"/> /// provided. /// </summary> /// <param name="source">The <see cref="IOilexerGrammarTokenEntry"/> from which the /// current <see cref="InlinedTokenEntry"/> derives.</param> public InlinedTokenEntry(IOilexerGrammarTokenEntry source, IOilexerGrammarFile file) : base(source.Name, null, source.ScanMode, source.FileName, source.Column, source.Line, source.Position, source.Unhinged, source.LowerPrecedenceTokens, source.ForcedRecognizer) { this.OldNewLookup = new Dictionary <ITokenItem, ITokenItem>(); this.branches = OilexerGrammarInliningCore.Inline(source.Branches, source, this, this.OldNewLookup); this.Source = source; this.file = file; this.Contextual = source.Contextual; }
public static IProductionRuleSeries ReplaceReferences(IOilexerGrammarFile source, IProductionRule[] target, IDictionary <IProductionRuleItem, IProductionRuleItem> elementToElementList) { List <IProductionRule> resultData = new List <IProductionRule>(); foreach (var rule in target) { resultData.Add(ReplaceReferences(source, rule, elementToElementList)); } return(new ProductionRuleSeries(resultData)); }
public static bool ContainsReferences(IOilexerGrammarFile source, IProductionRule target, IDictionary <IProductionRuleItem, IProductionRuleItem> elementToElementList) { foreach (var item in target) { if (ContainsReferences(source, item, elementToElementList)) { return(true); } } return(false); }
public static void ReplaceReferences(IOilexerGrammarFile source, IOilexerGrammarProductionRuleEntry target, IDictionary <IProductionRuleItem, IProductionRuleItem> elementToElementList) { OilexerGrammarProductionRuleEntry r = (OilexerGrammarProductionRuleEntry)(target); IProductionRuleSeries resultSeries = ReplaceReferences(source, ((IProductionRuleSeries)(r)).ToArray(), elementToElementList); r.Clear(); foreach (var rule in resultSeries) { r.Add(rule); } }
public static bool ContainsReferences(IOilexerGrammarFile source, IProductionRuleItem target, IDictionary <IProductionRuleItem, IProductionRuleItem> elementToElementList) { if (elementToElementList.ContainsKey(target)) { return(true); } if (target is IProductionRuleGroupItem) { return(ContainsReferences(source, ((IProductionRuleSeries)(target)), elementToElementList)); } return(false); }
internal static bool NeedsExpansion(this IOilexerGrammarFile target) { foreach (var item in target) { if (item is IOilexerGrammarProductionRuleEntry) { if (((IOilexerGrammarProductionRuleEntry)(item)).NeedsExpansion()) { return(true); } } } return(false); }
public void BuildNFA(IOilexerGrammarFile source) { if (this.Source is IOilexerGrammarTokenEofEntry) { return; } this.captureType = this.DetermineKind(); Dictionary <ITokenSource, ICaptureTokenStructuralItem> replacements = new Dictionary <ITokenSource, ICaptureTokenStructuralItem>(); if (captureType.Value != RegularCaptureType.Recognizer) { this.structure = TokenStructuralExtractionCore.BuildStructureFor(this, source); replacements = TokenStructuralExtractionCore.ObtainReplacements(this.structure); } else { replacements = new Dictionary <ITokenSource, ICaptureTokenStructuralItem>(); } this.nfaState = new RegularLanguageNFARootState(this); bool first = true; foreach (var expression in this.Branches.Cast <InlinedTokenExpression>()) { expression.BuildState(replacements); var expressionNFA = expression.NFAState; if (first) { bool isEdge = expressionNFA.IsEdge; first = false; nfaState.Union(expression.NFAState); if (nfaState.IsEdge && !isEdge) { nfaState.IsEdge = isEdge; } } else { nfaState.Union(expression.NFAState); } } if (ForcedRecognizer) { List <RegularLanguageNFAState> flatline = new List <RegularLanguageNFAState>(); RegularLanguageNFAState.FlatlineState(this.nfaState, flatline); foreach (var state in flatline) { state.IgnoreSources = true; } } }
public static bool InlineTokens(IOilexerGrammarFile file) { IOilexerGrammarTokenEntry[] originals = OilexerGrammarLinkerCore.tokenEntries.ToArray(); InlinedTokenEntry[] result = new InlinedTokenEntry[originals.Length]; Dictionary <IOilexerGrammarTokenEntry, InlinedTokenEntry> originalNewLookup = new Dictionary <IOilexerGrammarTokenEntry, InlinedTokenEntry>(); for (int i = 0; i < result.Length; i++) { result[i] = OilexerGrammarInliningCore.Inline(originals[i], file); } for (int i = 0; i < result.Length; i++) { originalNewLookup.Add(originals[i], result[i]); } for (int i = 0; i < result.Length; i++) { result[i].ResolveLowerPrecedencesAgain(originalNewLookup); } OilexerGrammarEntryCollection gdec = file as OilexerGrammarEntryCollection; if (gdec == null) { return(false); } for (int i = 0; i < originals.Length; i++) { gdec.Insert(gdec.IndexOf(originals[i]), result[i]); /* * * The rule building phase will require direct references * to the tokens, because it uses them in a context lookup. * * * Time to replace every reference for the token with * the inlined version. * * * This includes literal reference members for the same reason. * */ ReplaceTokenReference(originals[i], result[i]); file.Remove(originals[i]); } return(true); }
private static ICaptureTokenStructuralItem BuildStructureFor(InlinedTokenEntry entry, ITokenExpressionSeries expressionSeries, ITokenExpression expression, ITokenItem item, IOilexerGrammarFile source) { ICaptureTokenStructuralItem result = null; if (item is ITokenGroupItem) { var tokenGroup = ((ITokenGroupItem)(item)); result = BuildStructureFor(entry, tokenGroup, source); if (result.ResultType == ResultedDataType.None && !string.IsNullOrEmpty(item.Name)) { if (tokenGroup.Count == 1 && tokenGroup[0].Count == 1) { var singleItem = tokenGroup[0][0]; if (singleItem is ILiteralCharTokenItem || singleItem is ILiteralCharReferenceTokenItem) { if (singleItem.RepeatOptions == ScannableEntryItemRepeatInfo.None && tokenGroup.RepeatOptions == ScannableEntryItemRepeatInfo.None) { result.ResultType = ResultedDataType.Character; } else { result.ResultType = ResultedDataType.String; } } else { result.ResultType = ResultedDataType.String; } } else { result.ResultType = ResultedDataType.String; } } bool groupOptional = tokenGroup != null && ((tokenGroup.RepeatOptions.Options & ScannableEntryItemRepeatOptions.ZeroOrMore) == ScannableEntryItemRepeatOptions.ZeroOrMore || (tokenGroup.RepeatOptions.Options & ScannableEntryItemRepeatOptions.ZeroOrOne) == ScannableEntryItemRepeatOptions.ZeroOrOne || (((tokenGroup.RepeatOptions.Options & ScannableEntryItemRepeatOptions.Specific) == ScannableEntryItemRepeatOptions.Specific) && (!tokenGroup.RepeatOptions.Min.HasValue || tokenGroup.RepeatOptions.Min.Value == 0))); if (groupOptional) { result.GroupOptional = groupOptional; } if (item.Name.IsEmptyOrNull()) { result.ResultType = ResultedDataType.PassThrough; /* * if ((item.RepeatOptions.Options & ScannableEntryItemRepeatOptions.ZeroOrMore) == ScannableEntryItemRepeatOptions.ZeroOrMore || * (item.RepeatOptions.Options & ScannableEntryItemRepeatOptions.ZeroOrOne) == ScannableEntryItemRepeatOptions.ZeroOrOne || * ((item.RepeatOptions.Options & ScannableEntryItemRepeatOptions.Specific) == ScannableEntryItemRepeatOptions.Specific && * (!item.RepeatOptions.Min.HasValue || item.RepeatOptions.Min.Value == 0))) * foreach (var element in ((ICaptureTokenStructure)result).Values) * element.Optional = true;*/ } ((ControlledCollection <ITokenSource>)(result.Sources)).baseList.Add(item); } else if (item is ILiteralTokenItem) { result = new CaptureTokenLiteralStructuralItem((ILiteralTokenItem)item); if (entry.CaptureKind == RegularCaptureType.Transducer) { result.ResultType = ResultedDataType.EnumerationItem; } else if (entry.CaptureKind == RegularCaptureType.ContextfulTransducer) { result.ResultType = ResultedDataType.FlagEnumerationItem; } } else if (item is ICharRangeTokenItem) { result = new CaptureTokenCharRangeStructuralItem((ICharRangeTokenItem)item); if (entry.CaptureKind == RegularCaptureType.Transducer) { result.ResultType = ResultedDataType.EnumerationItem; } else if (entry.CaptureKind == RegularCaptureType.ContextfulTransducer) { result.ResultType = ResultedDataType.FlagEnumerationItem; } } Deform(item, result, expression); return(result); }
internal static RegularCaptureType DetermineKind(IOilexerGrammarTokenEntry entry, ITokenExpression target, IOilexerGrammarFile file) { // if (target.Count == 1) { /* * * If the expression consists of a single named literal, then * for the sake of this one expression, the best result is a * transducer type state machine. * * * When mixed with a set of singular named literals, the resulted * state machine is a transducer which reports back the exact * phrase matched. Since the strings are all known, they can be * boiled down to a finite set of numbers. * */ var first = target[0]; if (first is ILiteralTokenItem && !string.IsNullOrEmpty(first.Name) && first.RepeatOptions == ScannableEntryItemRepeatInfo.None) { return(RegularCaptureType.Transducer); } if (first is ITokenReferenceTokenItem) { var refItem = first as ITokenReferenceTokenItem; var refKind = DetermineKind(refItem.Reference, file); if (refKind == RegularCaptureType.Recognizer) { goto other; } else if (refKind == RegularCaptureType.Capturer) { return(RegularCaptureType.Capturer); } return(RegularCaptureType.Transducer); } else if (first is ILiteralReferenceTokenItem) { var lFirst = (first as ILiteralReferenceTokenItem).Literal; if (lFirst is ILiteralTokenItem && !string.IsNullOrEmpty(lFirst.Name) && lFirst.RepeatOptions == ScannableEntryItemRepeatInfo.None) { return(RegularCaptureType.Transducer); } } else if (first is ITokenGroupItem) { var gFirst = first as ITokenGroupItem; var gRepOpt = DetermineKind(entry, gFirst, file); if (gRepOpt == RegularCaptureType.Transducer && gFirst.RepeatOptions != ScannableEntryItemRepeatInfo.None) { return(RegularCaptureType.Recognizer); } return(gRepOpt); } } other: if (AnyContainAName(target)) { return(RegularCaptureType.Capturer); } return(RegularCaptureType.Recognizer); }
private static ICaptureTokenStructure BuildStructureFor(InlinedTokenEntry entry, ITokenExpressionSeries expressionSeries, ITokenExpression expression, IOilexerGrammarFile source) { ICaptureTokenStructure result = new CaptureTokenStructure(); foreach (var item in expression) { var current = BuildStructureFor(entry, expressionSeries, expression, item, source); result = result.Concat(current); } return(result); }
private static IProductionRuleCaptureStructure BuildStructureFor(OilexerGrammarProductionRuleEntry entry, IProductionRuleSeries expressionSeries, IProductionRule expression, IOilexerGrammarFile source) { IProductionRuleCaptureStructure result = new ProductionRuleCaptureStructure(entry); foreach (var item in expression) { var current = BuildStructureFor(entry, expressionSeries, expression, item, source); result = result.Concat(current); } return(result); }
private static RegularCaptureType DetermineKind(IOilexerGrammarTokenEntry target, IOilexerGrammarFile file) { return(DetermineKind(target, target.Branches, file)); }
internal static IProductionRuleCaptureStructure BuildStructureFor(OilexerGrammarProductionRuleEntry entry, IOilexerGrammarFile source) { return(BuildStructureFor(entry, entry, source)); }
private static IProductionRuleCaptureStructure BuildStructureFor(OilexerGrammarProductionRuleEntry entry, IProductionRuleSeries expressionSeries, IOilexerGrammarFile source) { IProductionRuleCaptureStructure result = null; HashList <HashList <string> > currentResultVariants = new HashList <HashList <string> >(); foreach (var expression in expressionSeries) { var current = BuildStructureFor(entry, expressionSeries, expression, source); if (result == null) { result = current; } else { result = result.Union(current); } var dataSet = new HashList <string>(current.Keys); if (!currentResultVariants.Any(k => k.SequenceEqual(dataSet))) { currentResultVariants.Add(dataSet); } } foreach (var variant in currentResultVariants) { result.Structures.Add(variant); } if (expressionSeries == entry) { ((ControlledCollection <IProductionRuleSource>)result.Sources).baseList.Add(entry); } result.ResultedTypeName = string.Format("{0}{1}{2}", source.Options.RulePrefix, entry.Name, source.Options.RuleSuffix); return(result); }
internal static RegularCaptureType DetermineKind(IOilexerGrammarTokenEntry entry, ITokenExpressionSeries series, IOilexerGrammarFile file) { RegularCaptureType result = RegularCaptureType.Undecided; foreach (var expression in series) { var currentKind = DetermineKind(entry, expression, file); switch (currentKind) { case RegularCaptureType.Recognizer: if (result == RegularCaptureType.Undecided) { result = currentKind; } break; case RegularCaptureType.Capturer: result = currentKind; break; case RegularCaptureType.Transducer: if (result == RegularCaptureType.Undecided) { result = RegularCaptureType.Transducer; } else if (result == RegularCaptureType.Recognizer) { result = RegularCaptureType.Capturer; } break; } } if (result == RegularCaptureType.Undecided) { result = RegularCaptureType.Recognizer; } if (result == RegularCaptureType.Transducer) { if ((from gdEntry in file where gdEntry is IOilexerGrammarProductionRuleEntry let ruleEntry = (IOilexerGrammarProductionRuleEntry)gdEntry from productionRuleItem in GetProductionRuleSeriesItems(ruleEntry) where productionRuleItem is ILiteralReferenceProductionRuleItem let literalItem = (ILiteralReferenceProductionRuleItem)productionRuleItem where literalItem.Source == entry select literalItem).Count() > 0) { return(RegularCaptureType.ContextfulTransducer); } } return(result); }
private static IProductionRuleCaptureStructuralItem BuildStructureFor(OilexerGrammarProductionRuleEntry entry, IProductionRuleSeries expressionSeries, IProductionRule expression, IProductionRuleItem item, IOilexerGrammarFile source) { IProductionRuleCaptureStructuralItem result = null; if (item is IProductionRuleGroupItem) { var ruleGroup = ((IProductionRuleGroupItem)(item)); if (!ruleGroup.Name.IsEmptyOrNull() && AllAreUnnamedEquivalents(ruleGroup)) { SetAllNames(ruleGroup, ruleGroup.Name); } result = BuildStructureFor(entry, ruleGroup, source); if (result.ResultType == ResultedDataType.None && !item.Name.IsEmptyOrNull()) { if (ruleGroup.Count == 1 && ruleGroup[0].Count == 1) { var singleItem = ruleGroup[0][0]; if (singleItem is ILiteralCharReferenceProductionRuleItem) { if (singleItem.RepeatOptions == ScannableEntryItemRepeatInfo.None && ruleGroup.RepeatOptions == ScannableEntryItemRepeatInfo.None) { result.ResultType = ResultedDataType.Character; } else { result.ResultType = ResultedDataType.String; } } else { result.ResultType = ResultedDataType.String; } } else { result.ResultType = ResultedDataType.String; } } else if (item.Name.IsEmptyOrNull()) { result.ResultType = ResultedDataType.PassThrough; } ((ControlledCollection <IProductionRuleSource>)(result.Sources)).baseList.Add(item); } else if (item is ILiteralReferenceProductionRuleItem) { var literalRefItem = (ILiteralReferenceProductionRuleItem)item; var inlinedRef = ((InlinedTokenEntry)(literalRefItem.Source)); result = new ProductionRuleLiteralTokenItemReferenceStructuralItem(literalRefItem.Source, literalRefItem, entry); if (inlinedRef.CaptureKind == RegularCaptureType.Transducer) { result.ResultType = ResultedDataType.EnumerationItem; } else if (inlinedRef.CaptureKind == RegularCaptureType.ContextfulTransducer) { result.ResultType = ResultedDataType.FlagEnumerationItem; } } else if (item is ITokenReferenceProductionRuleItem) { var tokenRefItem = ((ITokenReferenceProductionRuleItem)(item)); var inlinedRef = ((InlinedTokenEntry)(tokenRefItem.Reference)); result = new ProductionRuleTokenReferenceStructuralItem((ITokenReferenceProductionRuleItem)item, entry); if (inlinedRef.CaptureKind == RegularCaptureType.Transducer) { result.ResultType = ResultedDataType.Enumeration; } else { result.ResultType = ResultedDataType.ImportType; } } else if (item is IRuleReferenceProductionRuleItem) { var ruleItem = (IRuleReferenceProductionRuleItem)item; result = new ProductionRuleCaptureReferenceStructuralItem(ruleItem, entry); } Deform(item, result, expression); return(result); }
internal static ICaptureTokenStructure BuildStructureFor(InlinedTokenEntry entry, IOilexerGrammarFile source) { return(BuildStructureFor(entry, entry.Branches, source)); }