private bool AppendTextToken(TextToken textToken, string lastText, int stopTokenID, string stopText, ref bool firstText) { bool flag = false; if (textToken.ID == stopTokenID && !string.IsNullOrEmpty(stopText)) { textToken = new TextToken(stopTokenID, stopText + "-") { Part = stopText }; flag = true; } if (firstText && !string.IsNullOrEmpty(lastText) && textToken.Text.StartsWith(lastText)) { string text = textToken.Text.Remove(0, lastText.Length); if (text.StartsWith("-")) { text = text.Remove(0, 1); } textToken = new TextToken(textToken.ID, text); } AppendToLine(textToken); firstText = false; return(flag); }
private void AppendToLine(TextToken token, double textWidth) { string text = token.Text; if (string.IsNullOrEmpty(text)) { return; } _block = _block ?? new TextTokenBlock { TextAlign = _lastOpenTag.TextProperties.TextAlign, MarginLeft = _marginLeft, MarginRight = _marginRight, FirstTokenID = _firstTokenID, TextIndent = _textIndent }; _block.LastTokenID = token.ID; _block.UpdateHeight(GetTextHeight(text)); if (_separator) { TextVisualProperties properties = _lastOpenTag.TextProperties.Clone(); var inlineItem = _block.Inlines.OfType <TextElement>().LastOrDefault(); if (inlineItem != null && string.IsNullOrEmpty(inlineItem.LinkID)) { properties.LinkID = string.Empty; } _block.AddText(" ", properties, _fontSize, GetTextSize(" ", properties)); } _block.AddText(text, _lastOpenTag.TextProperties, _fontSize, GetTextSize(text, _lastOpenTag.TextProperties), token.Part, token.ID); _textWidth += textWidth; }
protected override bool ParseMatch(DatesRawData data, Match match, DateTime userDate) { var token = data.Tokens[match.Index]; data.RemoveRange(match.Index, 1); if (Morph.HasLemma(token.Value, Keywords.Holiday[0], Morph.LemmaSearchOptions.OnlySingular)) { // singular var saturday = new TextToken(Keywords.Saturday[0]) { Start = token.Start, End = token.End }; data.ReturnTokens(match.Index, "D", saturday); } else { // plural var holidays = new[] { Keywords.Saturday[0], Keywords.TimeTo[0], Keywords.Sunday[0] } .Select(k => new TextToken(k, token.Start, token.End)) .ToArray(); data.ReturnTokens(match.Index, "DtD", holidays); } return(true); }
public List <RichTextData> Parse(List <TextToken> tokens) { _attrStack.Clear(); int len = tokens.Count; int i = 0; while (i < len) { TextToken token = tokens[i]; switch (token.Type) { case TextToken.TextTokenType.String: DoString(token); break; case TextToken.TextTokenType.Attribute: DoAttribute(token); break; default: break; } i++; } while (_textStack.Count > 0) { _datas.Add(_textStack.Pop()); } _datas.Reverse(); return(_datas); }
private void DoString(TextToken token) { RichTextData data = new RichTextData(token.Value); AddAttributeToText(data); _textStack.Push(data); }
public void Test_StackdrvierFormatter_FormatLong() { // Creates a large string > 200kb var token = new TextToken(new string('*', 51200)); var logEvent = new LogEvent(DateTimeOffset, LogEventLevel.Debug, new Exception(), new MessageTemplate("{0}", new MessageTemplateToken[] { token }), new LogEventProperty[0]); using var writer = new StringWriter(); new StackdriverJsonFormatter().Format(logEvent, writer); var lines = SplitLogLogs(writer.ToString()); // The log created was longer than Stackdriver's soft limit of 256 bytes // This means the json will be spread out onto two lines, breaking search // In this scenario the library should add an additional log event informing // the user of this issue Assert.True(lines.Length == 2); // Validate each line is valid json var ourLogLineDict = GetLogLineAsDictionary(lines[0]); AssertValidLogLine(ourLogLineDict); var errorLogLineDict = GetLogLineAsDictionary(lines[1]); AssertValidLogLine(errorLogLineDict, hasException: false); }
public void When_Has2DistinctSections() { var sectionA1 = new ConfigIniSection("A"); var tokenA1_1 = new TextToken(); var tokenA1_2 = new TextToken(); var tokenA1_3 = new TextToken(); sectionA1.Tokens.Add(tokenA1_1); sectionA1.Tokens.Add(tokenA1_2); sectionA1.Tokens.Add(tokenA1_3); var sectionB1 = new ConfigIniSection("B"); var tokenB1_1 = new TextToken(); var tokenB1_2 = new TextToken(); var tokenB1_3 = new TextToken(); sectionB1.Tokens.Add(tokenB1_1); sectionB1.Tokens.Add(tokenB1_2); sectionB1.Tokens.Add(tokenB1_3); var config = new ConfigIni(); config.Sections.Add(sectionA1); config.Sections.Add(sectionB1); config.MergeDuplicateSections(); Assert.That(config.Sections, Is.EquivalentTo(new[] { sectionA1, sectionB1 })); Assert.That(sectionA1.Tokens, Is.EquivalentTo(new[] { tokenA1_1, tokenA1_2, tokenA1_3 })); Assert.That(sectionB1.Tokens, Is.EquivalentTo(new[] { tokenB1_1, tokenB1_2, tokenB1_3 })); }
public IToken Produce() { var context = this.Context; var text = context.Text; var length = text.Length; var c = text[context.Index]; if (c == '\'') { var initialIndex = context.Index; var initialLine = context.Line; var index = initialIndex + 1; // skip ''' int delta; while (true) { if (index == length) { delta = index - initialIndex; var column = context.Column + delta; throw LexingHelper.CreateUnclosedStringException(new Position(initialLine, column)); } c = text[index]; if (LexingHelper.IsCaretControl(c)) { delta = index - initialIndex; var column = context.Column + delta; throw LexingHelper.CreateNewLineInStringException(new Position(initialLine, column)); } index++; if (c == '\'') { break; } } delta = index - initialIndex; var str = text.Substring(initialIndex + 1, delta - 2); var token = new TextToken( StringTextClass.Instance, SingleQuoteTextDecoration.Instance, str, new Position(context.Line, context.Column), delta); context.Advance(delta, 0, context.Column + delta); return(token); } return(null); }
int RenderTextToken(TextToken tt, TextWriter output) { var count = 0; using (_theme.Apply(output, ConsoleThemeStyle.Text, ref count)) output.Write(tt.Text); return(count); }
public void When_ConstructedWithTokens() { ConfigIniSection section = null; var token1 = new TextToken(); var token2 = new TextToken(); Assert.That(() => { section = new ConfigIniSection(new[] { token1, token2 }); }, Throws.Nothing); Assert.That(section.Name, Is.Null); Assert.That(section.Tokens, Is.EquivalentTo(new[] { token1, token2 })); }
private void ParseToken(TextToken readToken) { switch (readToken.TokenType) { case TokenType.ChangeTiming: int newTime = Convert.ToInt32(readToken.Value); _characterTickTimer.ChangeInterval(newTime); BattleTextState = DialogueState.Writing; break; case TokenType.Pause: int pauseLength = Convert.ToInt32(readToken.Value); _characterTickTimer.Delay(pauseLength); BattleTextState = DialogueState.Writing; break; case TokenType.Stop: _characterTickTimer.Stop(); _justConsumedStop = true; BattleTextState = DialogueState.Halted; break; case TokenType.String: case TokenType.Character: BackingLabel.Text += readToken.Value; if (BackingLabel.Height > _currentPageMaxHeight) { if (_justConsumedStop) { PageDown(); BattleTextState = DialogueState.Writing; break; } else { _characterTickTimer.Stop(); BattleTextState = DialogueState.Halted; break; } } if (_mostRecentToken?.IsPrintable == true && !String.IsNullOrWhiteSpace(_mostRecentToken?.Value)) { _justConsumedStop = false; } BattleTextState = DialogueState.Writing; break; default: Debug.WriteLine($"Found unhandled token: {readToken.Value} of type: {readToken.TokenType}. Skipping..."); break; } Debug.WriteLine("Parsed: " + readToken.Value); _mostRecentToken = readToken; }
public IToken Produce() { var context = this.Context; var text = context.Text; var length = text.Length; var c = text[context.Index]; if (IsPathFirstChar(c)) { var initialIndex = context.Index; var index = initialIndex + 1; while (true) { if (index == length) { break; } c = text[index]; if (IsPathInnerChar(c)) { index++; continue; } if (LexingHelper.IsInlineWhiteSpaceOrCaretControl(c)) { break; } return(null); } var delta = index - initialIndex; var str = text.Substring(initialIndex, delta); var position = new Position(context.Line, context.Column); context.Advance(delta, 0, context.Column + delta); var token = new TextToken( PathTextClass.Instance, NoneTextDecoration.Instance, str, position, delta); return(token); } return(null); }
public void TextToken_MarkerIsEmpty_PropertiesAsExpected() { TextToken actual = this.CreateInstance(42, new StringBuilder()); Assert.That(actual.Offset, Is.EqualTo(-1)); Assert.That(actual.Rating, Is.EqualTo(-1)); Assert.That(actual.Marker, Is.Empty); Assert.That(actual.Symbol, Is.Empty); Assert.That(actual.Lining, Is.Empty); Assert.That(actual.Format, Is.Empty); }
public void TextToken_ParametersValid_PropertiesAsExpected() { TextToken actual = this.CreateInstance(42, new StringBuilder("some value")); Assert.That(actual.Offset, Is.EqualTo(42)); Assert.That(actual.Rating, Is.EqualTo(-1)); Assert.That(actual.Marker, Is.EqualTo("some value")); Assert.That(actual.Symbol, Is.Empty); Assert.That(actual.Lining, Is.Empty); Assert.That(actual.Format, Is.Empty); }
public void When_ConstructedWithNameAndTokens() { string name = "/Script/Engine.PlayerInput"; ConfigIniSection section = null; var token1 = new TextToken(); var token2 = new TextToken(); Assert.That(() => { section = new ConfigIniSection(name, new[] { token1, token2 }); }, Throws.Nothing); Assert.That(section.Name, Is.EqualTo(name)); Assert.That(section.Tokens, Is.EquivalentTo(new[] { token1, token2 })); }
public void Should_render_correctly_with_give_offset_and_length(string text, int offset, int length, string expected) { var writer = new StringWriter(); var token = new TextToken(text, offset, length); token.Render(new LogEvent(LogLevel.Debug, DateTimeOffset.UtcNow, null), writer, null); token.ToString().Should().Be(expected); writer.ToString().Should().Be(expected); }
private int RenderTextToken(TextToken tt, TextWriter output) { var count = 0; using (_theme.Apply(output, RichTextBoxThemeStyle.Text, ref count)) { var text = SpecialCharsEscaping.Apply(tt.Text, ref count); output.Write(text); } return(count); }
private TextToken ReadTextToken(IEnumerable <IExpressionToken> expressionTokens) { var result = new TextToken(); foreach (var expressionToken in expressionTokens) { if (expressionToken is TextToken textToken) { result.Concat(textToken); } break; } return(result); }
public TextToken Stop() { if (builder.Length == 0) { return(null); } TextToken token = new TextToken { Index = stream.Index, Value = builder.ToString() }; builder.Clear(); return(token); }
private void CreateLine(TextToken token) { CreateEmptyLine(token); string text = token.Text; _block.UpdateHeight(GetTextHeight(text)); _textWidth = GetTextWidth(text, false, _lastOpenTag.TextProperties); _block.AddText( text, _lastOpenTag.TextProperties, _fontSize, GetTextSize(text, _lastOpenTag.TextProperties), token.Part, token.ID); }
private void DoAttribute(TextToken token) { TextAttributeBase attr = RichTextParseHelper.ParseTextAttribute(token.Value); if (attr.Type == TextAttributeBase.TextAttributeType.Close) { TextAttributeBase prevAttr = _attrStack.Peek(); if (prevAttr.Name == attr.Name && prevAttr.Type == TextAttributeBase.TextAttributeType.Open) { _attrStack.Pop(); } } else { _attrStack.Push(attr); } }
private Task PrintNonfiscalText() { // instantiate string builder, to append string more efficiently. StringBuilder sb = new StringBuilder(); // plain text sb.AppendLine("Hello world!"); // text with formatting sb.Append(TextToken.Create("This line is bold.", TextFormats.Bold, TextAlignment.Center).ToString()); sb.AppendLine("After text token, newline is created automatically."); sb.Append(TextToken.Create("This line is underlined.", TextFormats.Underlined).ToString()); // "You can combine TextFormat values by using | operator sb.Append(TextToken.Create("This line is bold and underlined.", TextFormats.Underlined | TextFormats.Bold).ToString()); sb.AppendLine(TextToken.Create("Hello world!", TextFormats.DoubleHeight | TextFormats.DoubleWidth | TextFormats.Underlined | TextFormats.Inverted | TextFormats.Bold).ToString()); // Paper cut follows now. sb.Append(new PageBreakToken().ToString()); // We can print either barcodes or QR codes easily! sb.AppendLine(BarcodeToken.Create("1234567", BarcodeType.EAN8, BarcodeHriPosition.Above).ToString()); sb.AppendLine(BarcodeToken.Create("123456789012", BarcodeType.EAN13, BarcodeHriPosition.Below).ToString()); sb.AppendLine(BarcodeToken.Create("1234ABCD39", BarcodeType.Code39, BarcodeHriPosition.Both).ToString()); sb.AppendLine(BarcodeToken.Create("1234ABCD93", BarcodeType.Code93, height: 30).ToString()); sb.AppendLine(BarcodeToken.Create("1234567890", BarcodeType.Code128, elementWidth: 2).ToString()); sb.AppendLine(QrCodeToken.Create("https://www.ninedigit.sk").ToString()); string text = sb.ToString(); if (!ReceiptText.IsValid(text)) { throw new InvalidOperationException("We have used some forbidden characters in our text output!"); } ReceiptText receiptText = new ReceiptText(text); TextPrintContext context = new TextPrintContext(receiptText); return(this.client.PrintTextAsync(context, CancellationToken.None)); }
public void When_Has3DuplicateSections() { var sectionA1 = new ConfigIniSection("A"); var tokenA1_1 = new TextToken(); var tokenA1_2 = new TextToken(); var tokenA1_3 = new TextToken(); sectionA1.Tokens.Add(tokenA1_1); sectionA1.Tokens.Add(tokenA1_2); sectionA1.Tokens.Add(tokenA1_3); var sectionA2 = new ConfigIniSection("A"); var tokenA2_1 = new TextToken(); var tokenA2_2 = new TextToken(); var tokenA2_3 = new TextToken(); sectionA2.Tokens.Add(tokenA2_1); sectionA2.Tokens.Add(tokenA2_2); sectionA2.Tokens.Add(tokenA2_3); var sectionA3 = new ConfigIniSection("A"); var tokenA3_1 = new TextToken(); var tokenA3_2 = new TextToken(); var tokenA3_3 = new TextToken(); sectionA3.Tokens.Add(tokenA3_1); sectionA3.Tokens.Add(tokenA3_2); sectionA3.Tokens.Add(tokenA3_3); var config = new ConfigIni(); config.Sections.Add(sectionA1); config.Sections.Add(sectionA2); config.Sections.Add(sectionA3); config.MergeDuplicateSections(); Assert.That(config.Sections, Is.EquivalentTo(new[] { sectionA1 })); Assert.That(sectionA1.Tokens, Is.EquivalentTo(new[] { tokenA1_1, tokenA1_2, tokenA1_3, tokenA2_1, tokenA2_2, tokenA2_3, tokenA3_1, tokenA3_2, tokenA3_3 })); }
internal static IEnumerable <int> GetRuleHandleIndexes(Reduction ruleDeclaration) { if (ruleDeclaration == null) { throw new ArgumentNullException("ruleDeclaration"); } Reduction handle = (Reduction)ruleDeclaration.Children[2]; int index = 0; List <int> emittedIndexes = new List <int>(); while (handle.Children.Count == 2) { Reduction handleSymbol = (Reduction)handle.Children[0]; TextToken offset = handleSymbol.Children[0] as TextToken; if (offset != null) { if (offset.Text == "~") { yield return(-1); } else { int result = int.Parse(offset.Text.TrimEnd(' ', ':'), NumberFormatInfo.InvariantInfo); emittedIndexes.Add(result); yield return(result); } } else { while (emittedIndexes.Contains(index)) { index++; } emittedIndexes.Add(index); yield return(index++); } handle = (Reduction)handle.Children[1]; } }
public OutputTemplateRenderer(string outputTemplate, IFormatProvider formatProvider) { if (outputTemplate is null) { throw new ArgumentNullException(nameof(outputTemplate)); } var template = new MessageTemplateParser().Parse(outputTemplate); _renderers = template.Tokens .Select(token => token switch { TextToken tt => new TextTokenRenderer(tt.Text), PropertyToken pt => pt.PropertyName switch { OutputProperties.LevelPropertyName => new LevelTokenRenderer(pt) as OutputTemplateTokenRenderer, OutputProperties.NewLinePropertyName => new NewLineTokenRenderer(pt.Alignment), OutputProperties.ExceptionPropertyName => new ExceptionTokenRenderer(), OutputProperties.MessagePropertyName => new MessageTemplateOutputTokenRenderer(), OutputProperties.TimestampPropertyName => new TimestampTokenRenderer(pt, formatProvider), OutputProperties.PropertiesPropertyName => new PropertiesTokenRenderer(pt, template), _ => new EventPropertyTokenRenderer(pt, formatProvider) },
public Utilisateur CreateConseiller(List <XmlNode> Nodes) { SectionsExtractor CvSectionsExtractor = new SectionsExtractor(); List <IXmlToken> matchTokens = new List <IXmlToken>(); matchTokens.Add(TextToken.CreateTextToken()); matchTokens.Add(FormatationToken.CreateFormatationToken(new KeyValuePair <string, string>("w:val", "Titre1"))); List <CVSection> Sections = null; try { Sections = CvSectionsExtractor.GetCVSections(Nodes, matchTokens, "IDENTIFICATION"); conseiller = new Conseiller(); AssemblerConseiller(Sections); } catch (Exception ex) { WriteToErrorLog(ex); } return(utilisateur); }
public IToken Produce() { var context = this.Context; var text = context.Text; var length = text.Length; string start = null; foreach (var possibleStart in PossibleStarts) { if (context.StartsWith(possibleStart)) { start = possibleStart; break; } } if (start == null) { return(null); } var initialIndex = context.Index; var startIndex = initialIndex + start.Length; var index = startIndex; while (true) { if (index == length) { break; } var c = text[index]; if (LexingHelper.IsInlineWhiteSpaceOrCaretControl(c)) { break; } index++; } if (index == startIndex) { return(null); } var delta = index - initialIndex; var str = text.Substring(initialIndex, delta); var position = new Position(context.Line, context.Column); context.Advance(delta, 0, context.Column + delta); var token = new TextToken( UrlTextClass.Instance, NoneTextDecoration.Instance, str, position, delta); return(token); }
private static void RenderTextToken(TextToken textToken, TextWriter output) => output.Write(textToken.Text);
public static bool GetSyntaxTree(Stream data, SyntaxTree <PreprocessorToken> tree, PreprocessorToken root = null) { while (!data.Eof()) { char c = data.Peek(); switch ((TokenTypes)c) { case TokenTypes.IfDefined: case TokenTypes.IfNotDefined: case TokenTypes.If: case TokenTypes.ElseIf: { ConditionToken token = new ConditionToken((TokenTypes)c); token.Deserialize(data); if (root == null) { tree.Add(token); } else { tree.AddAppend(root, token); } if (!GetSyntaxTree(data, tree, token)) { return(false); } } break; case TokenTypes.Else: { PreprocessorToken token = new PreprocessorToken(TokenTypes.Else); token.Deserialize(data); if (root == null) { tree.Add(token); } else { tree.AddAppend(root, token); } if (!GetSyntaxTree(data, tree, token)) { return(false); } } break; case TokenTypes.Define: case TokenTypes.Undefine: case TokenTypes.Include: case TokenTypes.Error: case TokenTypes.Pragma: { TextToken token = new TextToken((TokenTypes)c); token.Deserialize(data); if (root == null) { tree.Add(token); } else { tree.AddAppend(root, token); } } break; case TokenTypes.Text: { TextToken token = new TextToken(); token.Deserialize(data); if (root == null) { tree.Add(token); } else { tree.AddAppend(root, token); } } break; default: { if (root != null && c == 0) { data.Get(); return(true); } else { return(ThrowError(new CodeProcessorContext(new CodeProcessorConfig()), PreprocessorCodes.UnexpectedCharacter)); } } } } return(true); }
public static void RenderTextToken(TextToken tt, TextWriter output) { output.Write(tt.Text); }