public Block Parse(string id, Line line, Block b, int _cmt, int start, int end, bool ispart) { tokentype = TokenType.TXT; tokens = new List <Token>(); List <Tuple <int, int, bool> > cmstrulrs = new List <Tuple <int, int, bool> >(); line.Block.isLineHeadCmt = _cmt; lex.Src = line.Text; if (ispart) { lex.SetRange(start, end); } //while (tokentype != TokenType.EOS) { while (lex.advance(b, line.Block)) { tokentype = lex.token; //if (lex.advance(b, line.Block)) { // tokentype = lex.token; //} //else { // tokentype = TokenType.EOS; //} switch (tokentype) { case TokenType.EndLine: case TokenType.Line: case TokenType.Enclose: case TokenType.Keyword: { tokens.Add(new Token { id = id, ad = lex.OffsetLenAttr.t1, len = lex.OffsetLenAttr.t2, attr = lex.OffsetLenAttr.t3.attr }); } break; case TokenType.MultiLineStart: { int off = lex.Offset; //int len = line.Length - lex.OffsetLenAttr.t1; int len = end - lex.OffsetLenAttr.t1; lex.isNextLine = true; cmstrulrs.Add(new Tuple <int, int, bool> { t1 = off, t2 = len, t3 = lex.isNextLine }); var parid = ((MultiLineRule)(lex.OffsetLenAttr.t3)).id; tokens.Add(new Token { id = parid, type = TokenType.MultiLine, mtype = MultiLineType.Start, ad = lex.OffsetLenAttr.t1, len = len, attr = lex.OffsetLenAttr.t3.attr }); } break; case TokenType.MultiLineAllLine: { int off = lex.Offset; //int len = line.Length - lex.OffsetLenAttr.t1; int len = end - lex.OffsetLenAttr.t1; lex.isNextLine = true; cmstrulrs.Add(new Tuple <int, int, bool> { t1 = off, t2 = len, t3 = lex.isNextLine }); var parid = ((MultiLineRule)(lex.OffsetLenAttr.t3)).id; if (line.Length == 0) { tokens.Add(new Token { id = parid, type = TokenType.MultiLine, mtype = MultiLineType.All, ad = 0, len = 0, attr = lex.OffsetLenAttr.t3.attr }); } else { tokens.Add(new Token { id = parid, type = TokenType.MultiLine, mtype = MultiLineType.All, ad = lex.OffsetLenAttr.t1, len = len, attr = lex.OffsetLenAttr.t3.attr }); } } break; case TokenType.MultiLineEnd: { //int len = line.Length - lex.OffsetLenAttr.t1; int len = end - lex.OffsetLenAttr.t1; bool isnext = false; // lex.isNextLine; lex.isNextLine = false; if (cmstrulrs.Count > 0) { cmstrulrs[cmstrulrs.Count - 1].t3 = isnext; } else { int off = lex.Offset; cmstrulrs.Add(new Tuple <int, int, bool> { t1 = off, t2 = len, t3 = isnext }); } if (tokens.Count > 0 && (tokens[tokens.Count - 1].mtype == MultiLineType.Start || tokens[tokens.Count - 1].mtype == MultiLineType.All)) { int off = tokens[tokens.Count - 1].ad; //tokens[tokens.Count - 1].len = off + lex.OffsetLenAttr.t2; tokens[tokens.Count - 1].mtype = MultiLineType.Line; tokens[tokens.Count - 1].len = lex.OffsetLenAttr.t2 - off; } else if (line.Block.isLineHeadCmt != 0) { var parid = ((MultiLineRule)(lex.OffsetLenAttr.t3)).id; tokens.Add(new Token { id = parid, type = TokenType.MultiLine, mtype = MultiLineType.End, ad = lex.OffsetLenAttr.t1, len = lex.OffsetLenAttr.t2, attr = lex.OffsetLenAttr.t3.attr }); } } break; default: break; } if (line.Length == 0) { break; } } if (cmstrulrs.Count == 0) { line.Block.commentTransition = 2; } else { bool next = cmstrulrs[cmstrulrs.Count - 1].t3; if (next) { line.Block.commentTransition = 3; } else { line.Block.commentTransition = 0; } } cmt = (line.Block.commentTransition >> _cmt) & 1; //if (!ispart) { if (tokens.Count > 0) { var lastrule = tokens[tokens.Count - 1]; //if (lastrule.ad + lastrule.len < line.Length) { // tokens.Add(new Token { id = id, ad = lastrule.ad + lastrule.len, len = line.Length - (lastrule.ad + lastrule.len), attr = defaultAttr }); //} if (lastrule.ad + lastrule.len < end) { tokens.Add(new Token { id = id, ad = lastrule.ad + lastrule.len, len = end - (lastrule.ad + lastrule.len), attr = defaultAttr }); } List <Token> defaultRules = new List <Token>(); int index = 0; for (int i = 0; i < tokens.Count; i++) { if (tokens[i].ad - index > 0) { defaultRules.Add(new Token { id = id, ad = index, len = tokens[i].ad - index, attr = defaultAttr }); } index = tokens[i].ad + tokens[i].len; } if (defaultRules.Count > 0) { tokens.AddRange(defaultRules); tokens.Sort((x, y) => { return(x.ad < y.ad ? -1 : 1); }); } } else { if (ispart) { tokens.Add(new Token { id = id, ad = start, len = end - start, attr = defaultAttr }); } else { //tokens.Add(new Token { id = id, ad = 0, len = line.Length, attr = defaultAttr }); tokens.Add(new Token { id = id, ad = 0, len = end, attr = defaultAttr }); } } //} if (ispart) { foreach (var token in line.Tokens) { if (token.ad == start && (token.ad + token.len) == end) { int index = line.Tokens.IndexOf(token); line.Tokens.Remove(token); tokens[0].type = token.type; line.Tokens.InsertRange(index, tokens); break; } } } else { line.Tokens = tokens; } return(line.Block); }
public Block Parse(Line line, Block b, int _cmt, int _sccmt) { tokentype = TokenType.TXT; tokens = new List <Token>(); List <Tuple <int, int, bool> > cmstrulrs = new List <Tuple <int, int, bool> >(); line.Block.isLineHeadCmt = _cmt; bool?isscnext = null; line.Block.isLineHeadPart = _sccmt; if (line.Block.isLineHeadPart == 0) { line.Block.PartID = Document.DEFAULT_ID; setd(b.PartID); } else { line.Block.PartID = b.PartID; setd(b.PartID); } lex.Src = line.Text; while (tokentype != TokenType.EOS) { if (lex.advance(b, line.Block)) { tokentype = lex.token; } else { tokentype = TokenType.EOS; } switch (tokentype) { case TokenType.EndLine: case TokenType.Line: case TokenType.Enclose: case TokenType.Keyword: { tokens.Add(new Token { ad = lex.OffsetLenAttr.t1, len = lex.OffsetLenAttr.t2, attr = lex.OffsetLenAttr.t3 }); } break; case TokenType.MultiLineStart: { int off = lex.Offset; int len = line.Length - lex.OffsetLenAttr.t1; lex.isNextLine = true; cmstrulrs.Add(new Tuple <int, int, bool> { t1 = off, t2 = len, t3 = lex.isNextLine }); tokens.Add(new Token { ad = lex.OffsetLenAttr.t1, len = len, attr = lex.OffsetLenAttr.t3 }); } break; case TokenType.MultiLineEnd: { int len = line.Length - lex.OffsetLenAttr.t1; bool isnext = false; // lex.isNextLine; lex.isNextLine = false; if (cmstrulrs.Count > 0) { cmstrulrs[cmstrulrs.Count - 1].t3 = isnext; } else { int off = lex.Offset; cmstrulrs.Add(new Tuple <int, int, bool> { t1 = off, t2 = len, t3 = isnext }); } if (tokens.Count > 0) { int off = tokens[tokens.Count - 1].ad; //tokens[tokens.Count - 1].len = off + lex.OffsetLenAttr.t2; tokens[tokens.Count - 1].len = lex.OffsetLenAttr.t2 - off; } else if (line.Block.isLineHeadCmt != 0) { tokens.Add(new Token { ad = lex.OffsetLenAttr.t1, len = lex.OffsetLenAttr.t2, attr = lex.OffsetLenAttr.t3 }); } } break; case TokenType.PartitionStart: isscnext = lex.scisNextLine; if (line.Block.PartID != Document.DEFAULT_ID) { setd(line.Block.PartID); } break; case TokenType.Partition: isscnext = lex.scisNextLine; setd(line.Block.PartID); break; case TokenType.PartitionEnd: isscnext = lex.scisNextLine; setd(Document.DEFAULT_ID); break; default: break; } } if (cmstrulrs.Count == 0) { line.Block.commentTransition = 2; } else { bool next = cmstrulrs[cmstrulrs.Count - 1].t3; if (next) { line.Block.commentTransition = 3; } else { line.Block.commentTransition = 0; } } cmt = (line.Block.commentTransition >> _cmt) & 1; if (isscnext == null) { line.Block.partTransition = 2; } else { if ((bool)isscnext) { line.Block.partTransition = 3; } else { line.Block.partTransition = 0; } } sccmt = (line.Block.partTransition >> _sccmt) & 1; if (tokens.Count > 0) { var lastrule = tokens[tokens.Count - 1]; if (lastrule.ad + lastrule.len < line.Length) { tokens.Add(new Token { ad = lastrule.ad + lastrule.len, len = line.Length - (lastrule.ad + lastrule.len), attr = defaultAttr }); } List <Token> defaultRules = new List <Token>(); int index = 0; for (int i = 0; i < tokens.Count; i++) { if (tokens[i].ad - index > 0) { defaultRules.Add(new Token { ad = index, len = tokens[i].ad - index, attr = defaultAttr }); } index = tokens[i].ad + tokens[i].len; } if (defaultRules.Count > 0) { tokens.AddRange(defaultRules); tokens.Sort((x, y) => { return(x.ad < y.ad ? -1 : 1); }); } } else { tokens.Add(new Token { ad = 0, len = line.Length, attr = defaultAttr }); } line.Tokens = tokens; return(line.Block); }