private List <IToken> TokenizeBody(MethodDefinition method) { var lines = new List <IToken>(); IToken parametersList = TokenizeParameters(method); if (method.IsConstructor) { var nativePtrParts = new List <IToken> { new WordToken("IntPtr"), new WordToken("native"), new WordToken("="), new WordToken(method.ClonedFrom.Parent.Name + "_new("), parametersList, new StringToken(");") }; var nativePtr = new LineToken(nativePtrParts); lines.Add(nativePtr); lines.Add(new LineToken("InitializeUserOwned(native);")); } else { var lineParts = new List <IToken> { new WordToken(method.ClonedFrom.Parent.Name + "_" + method.ClonedFrom.Name + "("), parametersList, new StringToken(");") }; lines.Add(new LineToken(lineParts)); } return(lines); }
private IToken Tokenize(MethodDefinition method) { if (method.IsExcluded) { return(NullToken.Instance); } LineToken header = TokenizeHeader(method); IToken methodToken; if (method.IsExtern) { methodToken = new LinesToken(new LineToken[] { new LineToken("[DllImport(Native.Dll, CallingConvention = Native.Conv)]"), header }); } else { IList <IToken> bodyTokens = TokenizeBody(method); methodToken = new BlockToken(header, bodyTokens); } return(methodToken); }
public Line(BinaryReader br) { int remaining = 1; while (remaining > 0) { var raw = br.ReadUInt16(); var opcode = (RleOpcode)(byte)(raw >> 14); remaining--; switch (opcode) { case RleOpcode.Packets: Tokens = new LineToken[raw]; for (int i = 0; i < raw; i++) { Tokens[i] = new LineToken(br); } break; case RleOpcode.StoreLowByteInLastPixel: LastPixel = (byte)(0xff & raw); remaining++; break; case RleOpcode.LineSkipCount: Skip = (ushort)-raw; remaining++; break; default: throw new ArgumentOutOfRangeException(); } } }
public SyntaxNode Complete(LineToken token, ref int i) { parentStack.Pop (); parentStack.Push (new CompeteQuoteExpression (this)); return token.StartNode (ref i); }
public void Write(LineToken token, string expectedText) { var writer = new StringWriter(); token.Write(writer); Assert.That(writer.ToString(), Is.EqualTo(expectedText)); }
private IToken Tokenize(ClassDefinition @class) { var abstractSpecifier = @class.IsAbstract ? "abstract " : null; var baseClassSpecifier = @class.BaseClass != null ? (" : " + @class.BaseClass.ManagedName) : null; var header = new LineToken($"public {abstractSpecifier}class {@class.Name}{baseClassSpecifier}"); IList <IToken> children = GetClassMembers(@class); return(new BlockToken(header, children)); }
public override SyntaxNode Parse(LineToken token, ref int i) { if (token is BlockQuote) { lines.Add (StripQuoteCharacter (token)); return this; } else return Complete (token, ref i); }
private void WriteLine(LineToken line) { WriteIndent(); IToken precedingToken = null; foreach (IToken element in line.Elements) { WriteToken(element, precedingToken); precedingToken = element; } _writer.WriteLine(); }
public void Two_token_instances_are_equal_if_all_properties_are_equal( LineTokenKind kind, [CombinatorialValues(null, "", "\t", "some value")] string?value, [CombinatorialValues(1, 5)] int lineNumber) { var token1 = new LineToken(kind, value, lineNumber); var token2 = new LineToken(kind, value, lineNumber); Assert.Equal(token1.GetHashCode(), token2.GetHashCode()); Assert.Equal(token1, token2); Assert.Equal(token2, token1); Assert.True(token1.Equals(token2)); Assert.True(token1.Equals((object)token2)); Assert.True(token2.Equals(token1)); Assert.True(token2.Equals((object)token1)); }
public async Task <IActionResult> OnGetAsync(int?id) { Employee = HttpContext.Session.GetLogin(_context.Employee); if (id == null) { return(NotFound()); } LineToken = await _context.LineToken .Include(l => l.Company).FirstOrDefaultAsync(m => m.LineTokenID == id); if (LineToken == null) { return(NotFound()); } return(Page()); }
public async Task <IActionResult> OnPostAsync(int?id) { Employee = HttpContext.Session.GetLogin(_context.Employee); if (id == null) { return(NotFound()); } LineToken = await _context.LineToken.FindAsync(id); if (LineToken != null) { _context.LineToken.Remove(LineToken); await _context.SaveChangesAsync(); } return(RedirectToPage("./Index")); }
private IToken Tokenize(PropertyDefinition property) { if (property.Setter == null) { var expressionBody = new ExpressionBodyToken(new LineToken(new IToken[] { new WordToken($"public {property.Getter.ReturnType} {property.Name} =>"), new WordToken($"{property.Getter.Parent.Name}_{property.Getter.Name}(Native);") })); return(expressionBody); } var header = new LineToken($"public {property.Getter.ReturnType} {property.Name}"); var propertyToken = new BlockToken(header, new IToken[] { new LineToken($"get => {property.Getter.Parent.Name}_{property.Getter.Name}(Native);"), new LineToken($"set => {property.Setter.Parent.Name}_{property.Setter.Name}(Native, value);") }); return(propertyToken); }
private BlockToken TokenizeNamespace(NamespaceTreeNode node) { string name; if (node.Children.Count == 1 && node.Nodes.Count == 0) { var child = node.Children[0]; if (node.Namespace.IsGlobal) { name = child.Namespace.Name; } else { name = $"{node.Namespace.Name}.{child.Namespace.Name}"; } node = child; } else { name = node.Namespace.Name; } LineToken header = new LineToken($"namespace {name}"); var children = new List <IToken>(); foreach (ModelNodeDefinition childNode in node.Nodes) { IToken childToken = TokenizeNode(childNode); children.Add(childToken); } foreach (var childNamespace in node.Children) { IToken childNamespaceToken = TokenizeNamespace(childNamespace); children.Add(childNamespaceToken); } BlockToken @namespace = new BlockToken(header, children); return(@namespace); }
public override SyntaxNode Parse(LineToken token, ref int i) { if (token is Paragraph) { ps.Push (token as Paragraph); return this; } else { parentStack.Pop (); var header_factory = (token as HorizontalRule).GetHeaderFactory (ps.Peek()); if (header_factory != null) { if (ps.Count > 1) { var top = ps.Pop (); parentStack.Push (new CompleteParagraph (this)); return header_factory (top); } else { return header_factory (ps.Peek ()); } } else { parentStack.Push (new CompleteParagraph (this)); return parentStack.Top.Parse (token, ref i); } } }
public IToken Tokenize(ModelNodeDefinition node) { var @enum = (EnumDefinition)node; var definition = new LineToken($"public enum {@enum.Name}"); IToken headerToken; if (@enum.IsFlags()) { headerToken = new LinesToken(new LineToken[] { new LineToken("[Flags]"), definition } ); } else { headerToken = definition; } var enumerators = new List <IToken>(); var lastEnumerator = @enum.Enumerators.Last(); foreach (EnumeratorDefinition enumerator in @enum.Enumerators) { string comma = enumerator == lastEnumerator ? "" : ","; string enumeratorLine = enumerator.Value != null ? $"{enumerator.Name} = {enumerator.Value}{comma}" : enumerator.Name + comma; enumerators.Add(new LineToken(enumeratorLine)); } BlockToken enumToken = new BlockToken(headerToken, enumerators); return(enumToken); }
public override SyntaxNode Parse(LineToken token, ref int i) { if (token.NumberOfBlankLines == 1 && !seenLine) { seenLine = true; return this; } return base.Parse (token, ref i); }
public override SyntaxNode Parse(LineToken token, ref int i) { return this; }
public static IEnumerable <LineToken> GetTokens(string input, bool treatWhitespaceOnlyLinesAsBlankLines) { if (input is null) { throw new ArgumentNullException(nameof(input)); } var currentLine = new StringBuilder(); var currentLineIsWhitespace = true; var lineNumber = 1; LineToken GetCurrentToken() { // In "Loose Mode", lines that consist only of whitespace characters are considered blank lines if (currentLine.Length == 0 || (treatWhitespaceOnlyLinesAsBlankLines && currentLineIsWhitespace)) { // clear current value of line currentLine.Clear(); return(LineToken.Blank(lineNumber++)); } else { // get current value and reset state var value = currentLine.GetValueAndClear(); currentLineIsWhitespace = true; return(LineToken.Line(value, lineNumber++)); } } for (var i = 0; i < input.Length; i++) { var currentChar = input[i]; var nextChar = i + 1 < input.Length ? (char?)input[i + 1] : null; switch ((currentChar, nextChar)) { case ('\r', '\n'): yield return(GetCurrentToken()); i += 1; break; case ('\n', _): yield return(GetCurrentToken()); break; default: currentLineIsWhitespace &= char.IsWhiteSpace(currentChar); currentLine.Append(currentChar); break; } } if (currentLine.Length > 0) { yield return(GetCurrentToken()); } yield return(LineToken.Eof(lineNumber)); }
public static LineToken Parse(Pullenti.Ner.Token t, int maxChar, LineToken prev) { if (t == null || t.EndChar > maxChar) { return(null); } LineToken res = new LineToken(t, t); for (; t != null && t.EndChar <= maxChar; t = t.Next) { if (t.IsChar(':')) { if (res.IsNewlineBefore && res.BeginToken.IsValue("ПРИЛОЖЕНИЕ", "ДОДАТОК")) { res.IsListHead = true; } res.EndToken = t; break; } if (t.IsChar(';')) { if (!t.IsWhitespaceAfter) { } if (t.Previous != null && (t.Previous.GetReferent() is Pullenti.Ner.Decree.DecreeReferent)) { if (!t.IsWhitespaceAfter) { continue; } if (t.Next != null && (t.Next.GetReferent() is Pullenti.Ner.Decree.DecreeReferent)) { continue; } } res.IsListItem = true; res.EndToken = t; break; } if (t.IsChar('(')) { Pullenti.Ner.Core.BracketSequenceToken br = Pullenti.Ner.Core.BracketHelper.TryParse(t, Pullenti.Ner.Core.BracketParseAttr.No, 100); if (br != null) { res.EndToken = (t = br.EndToken); continue; } } if (t.IsNewlineBefore && t != res.BeginToken) { bool next = true; if (t.Previous.IsComma || t.Previous.IsAnd || t.IsCharOf("(")) { next = false; } else if (t.Chars.IsLetter || (t is Pullenti.Ner.NumberToken)) { if (t.Chars.IsAllLower) { next = false; } else if (t.Previous.Chars.IsLetter) { next = false; } } if (next) { break; } } res.EndToken = t; } if (res.BeginToken.IsHiphen) { res.IsListItem = res.BeginToken.Next != null && !res.BeginToken.Next.IsHiphen; } else if (res.BeginToken.IsCharOf("·")) { res.IsListItem = true; res.BeginToken = res.BeginToken.Next; } else if (res.BeginToken.Next != null && ((res.BeginToken.Next.IsChar(')') || ((prev != null && ((prev.IsListItem || prev.IsListHead))))))) { if (res.BeginToken.LengthChar == 1 || (res.BeginToken is Pullenti.Ner.NumberToken)) { res.IsListItem = true; if ((res.BeginToken is Pullenti.Ner.NumberToken) && (res.BeginToken as Pullenti.Ner.NumberToken).IntValue != null) { res.Number = (res.BeginToken as Pullenti.Ner.NumberToken).IntValue.Value; } else if ((res.BeginToken is Pullenti.Ner.TextToken) && res.BeginToken.LengthChar == 1) { string te = (res.BeginToken as Pullenti.Ner.TextToken).Term; if (Pullenti.Morph.LanguageHelper.IsCyrillicChar(te[0])) { res.Number = ((int)te[0]) - ((int)'А'); } else if (Pullenti.Morph.LanguageHelper.IsLatinChar(te[0])) { res.Number = ((int)te[0]) - ((int)'A'); } } } } return(res); }
public override SyntaxNode Parse(LineToken token, ref int i) { if (token is Directive) if (token.LeadingWhiteSpaceCount == leading) if (token.line.ToUpper().Contains ("END GRID")) return new Grid (this); if (token is EndOfFileToken) return new Grid (this); if (token.NumberOfBlankLines > 2) return new Grid (this); lines.Add (token); return this; }
static object[] TestCase(string input, params FooterToken[] tokens) => new object[] { new XunitSerializableLineToken(LineToken.Line(input, 1)), tokens.Select(t => new XunitSerializableFooterToken(t)).ToArray() };
public override SyntaxNode Parse(LineToken token, ref int i) { bool end_directive = false; if (token is EndOfFileToken || (token is Directive && (end_directive = ((string)token).StartsWith ("END", System.StringComparison.OrdinalIgnoreCase)))) { if (end_directive) lines.Add (token); return new CompleteCode (this); } else { lines.Add (token); return this; } }
internal XunitSerializableLineToken(LineToken value) => Value = value;
public static List <LineToken> ParseList(Pullenti.Ner.Token t, int maxChar, LineToken prev) { LineToken lt = Parse(t, maxChar, prev); if (lt == null) { return(null); } List <LineToken> res = new List <LineToken>(); res.Add(lt); string ss = lt.ToString(); for (t = lt.EndToken.Next; t != null; t = t.Next) { LineToken lt0 = Parse(t, maxChar, lt); if (lt0 == null) { break; } res.Add((lt = lt0)); t = lt0.EndToken; } if ((res.Count < 2) && !res[0].IsListItem) { if ((prev != null && prev.IsListItem && res[0].EndToken.IsChar('.')) && !res[0].BeginToken.Chars.IsCapitalUpper) { res[0].IsListItem = true; return(res); } return(null); } int i; for (i = 0; i < res.Count; i++) { if (res[i].IsListItem) { break; } } if (i >= res.Count) { return(null); } int j; int cou = 0; for (j = i; j < res.Count; j++) { if (!res[j].IsListItem) { if (res[j - 1].IsListItem && res[j].EndToken.IsChar('.')) { if (res[j].BeginToken.GetSourceText() == res[i].BeginToken.GetSourceText() || res[j].BeginToken.Chars.IsAllLower) { res[j].IsListItem = true; j++; cou++; } } } else { cou++; } } return(res); }
public static List <LineToken> TokenizeFilterLineString(string input) { // Initialization bool quoteMode = false; bool firstIdentTest = true; LineParsingCommentState CommentState = LineParsingCommentState.None; string currentString = ""; LineToken currentToken = new LineToken(); List <LineToken> tokens = new List <LineToken>(); for (int i = 0; i < input.Length; i++) { char currentChar = input[i]; // Handle inComments if (CommentState == LineParsingCommentState.Comment) { currentString += currentChar; continue; } // Handle inQuotes if (quoteMode) { if (currentChar == '"') { quoteMode = false; AddCurrentToken(); } else { currentString += currentChar; } continue; } switch (currentChar) { case '#': AddCurrentToken(); if (CommentState == LineParsingCommentState.None && firstIdentTest == true) { CommentState = LineParsingCommentState.Testing; } else { CommentState = LineParsingCommentState.Comment; } break; case '"': quoteMode = true; currentToken.isQuoted = true; break; case ' ': case '\t': { TestForIdentToken(); AddCurrentToken(); } break; default: currentString += currentChar; break; } } if (tokens.Count == 0) { TestForIdentToken(); } AddCurrentToken(); if (tokens.Count == 0 && CommentState != LineParsingCommentState.None) { tokens.Add(LineToken.CreateFluffComment(" ")); } return(tokens); // Local functions void TestForIdentToken() { if (firstIdentTest) { var trimmed = currentString.Trim(); if (trimmed.Length > 0) { if (CommentState == LineParsingCommentState.Testing) { if (FilterConstants.LineTypes.ContainsKey(trimmed)) { currentString = trimmed; currentToken.isIdent = true; CommentState = LineParsingCommentState.Value; } else { CommentState = LineParsingCommentState.Comment; } } else { if (FilterConstants.LineTypes.ContainsKey(trimmed)) { currentToken.isIdent = true; } else { throw new Exception("Unknown Ident during Check Phase: " + currentString); } } firstIdentTest = false; } } } void AddCurrentToken() { if (currentString != string.Empty) { if (CommentState == LineParsingCommentState.Value) { currentToken.isCommented = true; } if (CommentState == LineParsingCommentState.Comment) { currentToken.isFluffComment = true; currentToken.isCommented = true; } currentToken.value = currentString; tokens.Add(currentToken); currentString = string.Empty; currentToken = new LineToken(); } }; }
public override SyntaxNode Parse(LineToken token, ref int i) { LineToken directive = null; if(token.NumberOfBlankLines > 2 || token is EndOfFileToken || ((directive = token as Directive) != null) && token.line.Equals("Column", StringComparison.OrdinalIgnoreCase)) { parentStack.Pop (); parentStack.Push (new CompleteColumn (this)); return token.StartNode (ref i); } tokens.Add (token); return this; }
public ExpressionBodyToken(LineToken line) { Line = line; }
static int _analizeListItems(List <FragToken> chi, int ind) { if (ind >= chi.Count) { return(-1); } FragToken res = chi[ind]; Pullenti.Ner.Instrument.InstrumentKind ki = res.Kind; if (((ki == Pullenti.Ner.Instrument.InstrumentKind.Chapter || ki == Pullenti.Ner.Instrument.InstrumentKind.Clause || ki == Pullenti.Ner.Instrument.InstrumentKind.Content) || ki == Pullenti.Ner.Instrument.InstrumentKind.Item || ki == Pullenti.Ner.Instrument.InstrumentKind.Subitem) || ki == Pullenti.Ner.Instrument.InstrumentKind.ClausePart || ki == Pullenti.Ner.Instrument.InstrumentKind.Indention) { } else { return(-1); } if (res.HasChanges && res.MultilineChangesValue != null) { Pullenti.Ner.MetaToken ci = res.MultilineChangesValue; FragToken cit = new FragToken(ci.BeginToken, ci.EndToken) { Kind = Pullenti.Ner.Instrument.InstrumentKind.Citation }; res.Children.Add(cit); if (Pullenti.Ner.Core.BracketHelper.IsBracket(cit.BeginToken.Previous, true)) { cit.BeginToken = cit.BeginToken.Previous; } if (Pullenti.Ner.Core.BracketHelper.IsBracket(cit.EndToken.Next, true)) { cit.EndToken = cit.EndToken.Next; if (cit.EndToken.Next != null && cit.EndToken.Next.IsCharOf(";.")) { cit.EndToken = cit.EndToken.Next; } } res.FillByContentChildren(); if (res.Children[0].HasChanges) { } Pullenti.Ner.Instrument.InstrumentKind citKind = Pullenti.Ner.Instrument.InstrumentKind.Undefined; if (ci.Tag is Pullenti.Ner.Decree.DecreeChangeReferent) { Pullenti.Ner.Decree.DecreeChangeReferent dcr = ci.Tag as Pullenti.Ner.Decree.DecreeChangeReferent; if (dcr.Value != null && dcr.Value.NewItems.Count > 0) { string mnem = dcr.Value.NewItems[0]; int i; if ((((i = mnem.IndexOf(' ')))) > 0) { mnem = mnem.Substring(0, i); } citKind = Pullenti.Ner.Decree.Internal.PartToken._getInstrKindByTyp(Pullenti.Ner.Decree.Internal.PartToken._getTypeByAttrName(mnem)); } else if (dcr.Owners.Count > 0 && (dcr.Owners[0] is Pullenti.Ner.Decree.DecreePartReferent) && dcr.Kind == Pullenti.Ner.Decree.DecreeChangeKind.New) { Pullenti.Ner.Decree.DecreePartReferent pat = dcr.Owners[0] as Pullenti.Ner.Decree.DecreePartReferent; int min = 0; foreach (Pullenti.Ner.Slot s in pat.Slots) { Pullenti.Ner.Decree.Internal.PartToken.ItemType ty = Pullenti.Ner.Decree.Internal.PartToken._getTypeByAttrName(s.TypeName); if (ty == Pullenti.Ner.Decree.Internal.PartToken.ItemType.Undefined) { continue; } int l = Pullenti.Ner.Decree.Internal.PartToken._getRank(ty); if (l == 0) { continue; } if (l > min || min == 0) { min = l; citKind = Pullenti.Ner.Decree.Internal.PartToken._getInstrKindByTyp(ty); } } } } FragToken sub = null; if (citKind != Pullenti.Ner.Instrument.InstrumentKind.Undefined && citKind != Pullenti.Ner.Instrument.InstrumentKind.Appendix) { sub = new FragToken(ci.BeginToken, ci.EndToken); ContentAnalyzeWhapper wr = new ContentAnalyzeWhapper(); wr.Analyze(sub, null, true, citKind); sub.Kind = Pullenti.Ner.Instrument.InstrumentKind.Content; } else { sub = FragToken.CreateDocument(ci.BeginToken, ci.EndChar, citKind); } if (sub == null || sub.Children.Count == 0) { } else if ((sub.Kind == Pullenti.Ner.Instrument.InstrumentKind.Content && sub.Children.Count > 0 && sub.Children[0].BeginToken == sub.BeginToken) && sub.Children[sub.Children.Count - 1].EndToken == sub.EndToken) { cit.Children.AddRange(sub.Children); } else { cit.Children.Add(sub); } return(1); } int endChar = res.EndChar; if (res.Itok == null) { res.Itok = InstrToken1.Parse(res.BeginToken, true, null, 0, null, false, res.EndChar, false, false); } List <LineToken> lines = LineToken.ParseList(res.BeginToken, endChar, null); if (lines == null || (lines.Count < 1)) { return(-1); } int ret = 1; if (res.Kind == Pullenti.Ner.Instrument.InstrumentKind.Content) { for (int j = ind + 1; j < chi.Count; j++) { if (chi[j].Kind == Pullenti.Ner.Instrument.InstrumentKind.Content) { List <LineToken> lines2 = LineToken.ParseList(chi[j].BeginToken, chi[j].EndChar, lines[lines.Count - 1]); if (lines2 == null || (lines2.Count < 1)) { break; } if (!lines2[0].IsListItem) { if ((lines2.Count > 1 && lines2[1].IsListItem && lines2[0].EndToken.IsCharOf(":")) && !lines2[0].BeginToken.Chars.IsCapitalUpper) { lines2[0].IsListItem = true; } else { break; } } lines.AddRange(lines2); ret = (j - ind) + 1; } else if (chi[j].Kind != Pullenti.Ner.Instrument.InstrumentKind.Editions && chi[j].Kind != Pullenti.Ner.Instrument.InstrumentKind.Comment) { break; } } } if (lines.Count < 2) { return(-1); } if ((lines.Count > 1 && lines[0].IsListItem && lines[1].IsListItem) && lines[0].Number != 1) { if (lines.Count == 2 || !lines[2].IsListItem) { lines[0].IsListItem = (lines[1].IsListItem = false); } } for (int i = 0; i < lines.Count; i++) { if (lines[i].IsListItem) { if (i > 0 && lines[i - 1].IsListItem) { continue; } if (((i + 1) < lines.Count) && lines[i + 1].IsListItem) { } else { lines[i].IsListItem = false; continue; } int j; bool newLine = false; for (j = i + 1; j < lines.Count; j++) { if (!lines[j].IsListItem) { break; } else if (lines[j].IsNewlineBefore) { newLine = true; } } if (newLine) { continue; } if (i > 0 && lines[i - 1].EndToken.IsChar(':')) { continue; } for (j = i; j < lines.Count; j++) { if (!lines[j].IsListItem) { break; } else { lines[j].IsListItem = false; } } } } if (lines.Count > 2) { LineToken last = lines[lines.Count - 1]; LineToken last2 = lines[lines.Count - 2]; if ((!last.IsListItem && last.EndToken.IsChar('.') && last2.IsListItem) && last2.EndToken.IsChar(';')) { if ((last.LengthChar < (last2.LengthChar * 2)) || last.BeginToken.Chars.IsAllLower) { last.IsListItem = true; } } } for (int i = 0; i < (lines.Count - 1); i++) { if (!lines[i].IsListItem && !lines[i + 1].IsListItem) { if (((i + 2) < lines.Count) && lines[i + 2].IsListItem && lines[i + 1].EndToken.IsChar(':')) { } else { lines[i].EndToken = lines[i + 1].EndToken; lines.RemoveAt(i + 1); i--; } } } for (int i = 0; i < (lines.Count - 1); i++) { if (lines[i].IsListItem) { if (lines[i].Number == 1) { bool ok = true; int num = 1; int nonum = 0; for (int j = i + 1; j < lines.Count; j++) { if (!lines[j].IsListItem) { ok = false; break; } else if (lines[j].Number > 0) { num++; if (lines[j].Number != num) { ok = false; break; } } else { nonum++; } } if (!ok || nonum == 0 || (num < 2)) { break; } LineToken lt = lines[i]; for (int j = i + 1; j < lines.Count; j++) { if (lines[j].Number > 0) { lt = lines[j]; } else { List <LineToken> chli = lt.Tag as List <LineToken>; if (chli == null) { lt.Tag = (chli = new List <LineToken>()); } lt.EndToken = lines[j].EndToken; chli.Add(lines[j]); lines.RemoveAt(j); j--; } } } } } int cou = 0; foreach (LineToken li in lines) { if (li.IsListItem) { cou++; } } if (cou < 2) { return(-1); } for (int i = 0; i < lines.Count; i++) { if (lines[i].IsListItem) { int i0 = i; bool ok = true; cou = 1; for (; i < lines.Count; i++, cou++) { if (!lines[i].IsListItem) { break; } else if (lines[i].Number != cou) { ok = false; } } if (!ok) { for (i = i0; i < lines.Count; i++) { if (!lines[i].IsListItem) { break; } else { lines[i].Number = 0; } } } if (cou > 3 && lines[i0].BeginToken.GetSourceText() != lines[i0 + 1].BeginToken.GetSourceText() && lines[i0 + 1].BeginToken.GetSourceText() == lines[i0 + 2].BeginToken.GetSourceText()) { string pref = lines[i0 + 1].BeginToken.GetSourceText(); ok = true; for (int j = i0 + 2; j < i; j++) { if (pref != lines[j].BeginToken.GetSourceText()) { ok = false; break; } } if (!ok) { continue; } Pullenti.Ner.Token tt = null; ok = false; for (tt = lines[i0].EndToken.Previous; tt != null && tt != lines[i0].BeginToken; tt = tt.Previous) { if (tt.GetSourceText() == pref) { ok = true; break; } } if (ok) { LineToken li0 = new LineToken(lines[i0].BeginToken, tt.Previous); lines[i0].BeginToken = tt; lines.Insert(i0, li0); i++; } } } } foreach (LineToken li in lines) { li.CorrectBeginToken(); FragToken ch = new FragToken(li.BeginToken, li.EndToken) { Kind = (li.IsListItem ? Pullenti.Ner.Instrument.InstrumentKind.ListItem : Pullenti.Ner.Instrument.InstrumentKind.Content), Number = li.Number }; if (ch.Kind == Pullenti.Ner.Instrument.InstrumentKind.Content && ch.EndToken.IsChar(':')) { ch.Kind = Pullenti.Ner.Instrument.InstrumentKind.ListHead; } res.Children.Add(ch); List <LineToken> chli = li.Tag as List <LineToken>; if (chli != null) { foreach (LineToken lt in chli) { ch.Children.Add(new FragToken(lt.BeginToken, lt.EndToken) { Kind = Pullenti.Ner.Instrument.InstrumentKind.ListItem }); } if (ch.BeginChar < ch.Children[0].BeginChar) { ch.Children.Insert(0, new FragToken(ch.BeginToken, ch.Children[0].BeginToken.Previous) { Kind = Pullenti.Ner.Instrument.InstrumentKind.Content }); } } } return(ret); }
public void Equals_returns_false_if_the_argument_if_not_a_LineToken() { var sut = new LineToken(LineTokenKind.Line, "Some Value", 1); Assert.False(sut.Equals(new object())); }
public override SyntaxNode Parse(LineToken token, ref int i) { if (token.NumberOfBlankLines > 1 || token is EndOfFileToken || (token is Directive && ((string)token).StartsWith("END", System.StringComparison.OrdinalIgnoreCase) )) return Complete (token, ref i); else { lines.Add (LineToken.TEXT_FRAGMENT_SIGNAL + token); return this; } }