private void DoConnect() { InternalToken it = new InternalToken(DSAServices.PassportToken.PassportContent); InternalConnection = new Connection(); InternalConnection.Connect(AccessPoint.Parse(TargetDSNS), "sm", it); }
/// <summary> /// Gets the part of the text at the given position. /// The returned string can be composed of one or severals words /// all with the same style. /// </summary> private void _populateToken(int lineIndex, int tokenIndex, InternalToken output) { int tagZero; int tagStart; int tagEnd; int strIndex_start; int strIndex_end; string res; ClassificationTag tag; // // Gets value of tagStart and tagEnd // from which string indexes can be extracted // tagZero = _info.LineArray[lineIndex]; tagStart = tagZero + tokenIndex; tagEnd = tagStart + 1; strIndex_start = _info.IndexArray[tagStart]; if (tagEnd < _info.IndexArray.Count) { strIndex_end = _info.IndexArray[tagEnd]; res = _info.Text.Substring(strIndex_start, strIndex_end - strIndex_start); } else { res = _info.Text.Substring(strIndex_start); } // // Check the need to trimEnd() the resulting string. // We only want to trimEnd when current string is the // last part of the current line. Intermediary strings // must not be trimed end. At display time this would // lead to introduce bad shifting of text sequences. // if (res.EndsWith("\n")) { res = res.TrimEnd(); } // // Convert the byte code into a ClassificationTag // for this string sequence // tag = _getTagFromByteValue(_info.TagArray[tagStart]); // and populate result output.Setup(res, tag); return; }
private void DoConnect() { InternalToken it = new InternalToken(DSAServices.PassportToken.PassportContent); try { InternalConnection = new Connection(); InternalConnection.Connect(AccessPoint.Parse(TargetDSNS), "SystemMaintenance", it); } catch { } }
/// <summary> /// Call this method to visit iteratively all tokens in the source text. /// Each time a token has been identifier, the method returns true and the /// identified Token is place under the CurrentToken property. /// When there is not more token to visit, the method returns false /// and null is set in CurrentToken property. /// </summary> public bool Next() { char c; LexToken token; string prediction; int pos; int count; int prediction_length; _token = null; if (!HasNext()) { return(false); } pos = _position; _token = new InternalToken(pos); prediction_length = _dictionary[0].Text.Length; while (pos < _text.Length) { c = _text[pos]; _token.AppendsChar(c); prediction = ""; if (pos + 1 < _text.Length) { count = Math.Min(prediction_length, _text.Length - pos - 1); prediction = _text.Substring(pos + 1, count); } token = _dictionary.TryMatch(_token.Text, prediction); if (token != null) { _token.SetText(token.Text); _token.SetIndex(_position); _token.SetLexerTag(token.Tag); _position += _token.Text.Length; break; } pos++; } return(true); }
/// <summary> /// Gets the part of the text at the given position. /// The returned string can be composed of one or severals words /// all with the same style. /// </summary> private void _populateToken(int lineIndex, int tokenIndex, InternalToken output) { int tagZero; int tagStart; int tagEnd; int strIndex_start; int strIndex_end; string res; ClassificationTag tag; // // Gets value of tagStart and tagEnd // from which string indexes can be extracted // tagZero = _info.LineArray[lineIndex]; tagStart = tagZero + tokenIndex; tagEnd = tagStart + 1; strIndex_start = _info.IndexArray[tagStart]; if (tagEnd < _info.IndexArray.Count) { strIndex_end = _info.IndexArray[tagEnd]; res = _info.Text.Substring(strIndex_start, strIndex_end - strIndex_start); } else res = _info.Text.Substring(strIndex_start); // // Check the need to trimEnd() the resulting string. // We only want to trimEnd when current string is the // last part of the current line. Intermediary strings // must not be trimed end. At display time this would // lead to introduce bad shifting of text sequences. // if (res.EndsWith("\n")) res = res.TrimEnd(); // // Convert the byte code into a ClassificationTag // for this string sequence // tag = _getTagFromByteValue(_info.TagArray[tagStart]); // and populate result output.Setup(res, tag); return; }
private void DoConnect() { InternalToken it = new InternalToken(DSAServices.PassportToken.PassportContent); InternalConnection = new Connection(); InternalConnection.Connect(AccessPoint.Parse(TargetDSNS), "SystemMaintenance", it); }
internal static void AddToken(InternalToken token) { _tokens[token.Id] = new Token(token, TokenManager.Instance.ContinuousMeanSquare); }
/// <summary> /// Call this method to visit iteratively all tokens in the source text. /// Each time a token has been identifier, the method returns true and the /// identified Token is place under the CurrentToken property. /// When there is not more token to visit, the method returns false /// and null is set in CurrentToken property. /// </summary> public bool Next() { char c; LexToken token; string prediction; int pos; int count; int prediction_length; _token = null; if (!HasNext()) return (false); pos = _position; _token = new InternalToken(pos); prediction_length = _dictionary[0].Text.Length; while (pos < _text.Length) { c = _text[pos]; _token.AppendsChar(c); prediction = ""; if (pos + 1 < _text.Length) { count = Math.Min(prediction_length, _text.Length - pos - 1); prediction = _text.Substring(pos + 1, count); } token = _dictionary.TryMatch(_token.Text, prediction); if (token != null) { _token.SetText(token.Text); _token.SetIndex(_position); _token.SetLexerTag(token.Tag); _position += _token.Text.Length; break; } pos++; } return (true); }