Ejemplo n.º 1
0
		private static void ProcessTokens(List<Tokenizer.Token> tokens,
										  OpenMarkdown doc, XmlNode context)
		{
			// Reset the elements list, and then restore what it should look
			// like from the token stream
			OpenMarkdown.StripChildNodes(context);

			StringBuilder accum = new StringBuilder();

			Tokenizer.Token lastToken = null;
			for (int i = 0; i < tokens.Count; i++) {
				Tokenizer.Token tok = tokens[i];
				switch (tok.TokenKind) {
				case Tokenizer.Token.Kind.Backslash:
					if (i + 1 < tokens.Count) {
						accum.Append(tokens[i + 1].Content);
						i++;
					}
					break;

				case Tokenizer.Token.Kind.Referral:
					AppendNode(tok.Referral, doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.DoubleDash:
					switch (doc.Config.DashesStyle) {
					case Configuration.SmartyDashes.DoubleEmdashNoEndash:
					case Configuration.SmartyDashes.DoubleEmdashTripleEndash:
						AppendSpecial(OpenMarkdown.SpecialKind.Emdash,
									  doc, context, ref accum);
						break;
					case Configuration.SmartyDashes.TripleEmdashDoubleEndash:
						AppendSpecial(OpenMarkdown.SpecialKind.Endash,
									  doc, context, ref accum);
						break;
					}
					break;

				case Tokenizer.Token.Kind.TripleDash:
					switch (doc.Config.DashesStyle) {
					case Configuration.SmartyDashes.DoubleEmdashTripleEndash:
						AppendSpecial(OpenMarkdown.SpecialKind.Endash,
									  doc, context, ref accum);
						break;
					case Configuration.SmartyDashes.TripleEmdashDoubleEndash:
						AppendSpecial(OpenMarkdown.SpecialKind.Emdash,
									  doc, context, ref accum);
						break;
					}
					break;

				case Tokenizer.Token.Kind.Ellipsis:
					AppendSpecial(OpenMarkdown.SpecialKind.Ellipsis,
								  doc, context, ref accum);
					AppendSentenceEnd(tokens, ref i, ref tok,
									  doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.UnbreakableSpace:
					AppendSpecial(OpenMarkdown.SpecialKind.UnbreakableSpace,
								  doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.OpenDoubleQuote:
					AppendSpecial(OpenMarkdown.SpecialKind.OpenDoubleQuote,
								  doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.CloseDoubleQuote:
					AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote,
								  doc, context, ref accum);
					AppendSentenceEnd(tokens, ref i, ref tok,
									  doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.SingleQuote:
					if (lastToken == null ||
						lastToken.TokenKind == Tokenizer.Token.Kind.Whitespace) {
						AppendSpecial(OpenMarkdown.SpecialKind.OpenSingleQuote,
									  doc, context, ref accum);
						break;
					}
					else if (i + 1 == tokens.Count) {
						AppendSpecial(OpenMarkdown.SpecialKind.CloseSingleQuote,
									  doc, context, ref accum);
						AppendSentenceEnd(tokens, ref i, ref tok,
										  doc, context, ref accum);
						break;
					}
					else {
						Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind;
						switch (kind) {
						case Tokenizer.Token.Kind.QuestionMark:
						case Tokenizer.Token.Kind.ExclamationMark:
						case Tokenizer.Token.Kind.Comma:
						case Tokenizer.Token.Kind.Period:
						case Tokenizer.Token.Kind.Semicolon:
						case Tokenizer.Token.Kind.Colon:
						case Tokenizer.Token.Kind.CloseParen:
						case Tokenizer.Token.Kind.Whitespace:
							AppendSpecial(OpenMarkdown.SpecialKind.CloseSingleQuote,
										  doc, context, ref accum);
							AppendSentenceEnd(tokens, ref i, ref tok,
											  doc, context, ref accum);
							break;
						default:
							accum.Append(tok.Content);
							break;
						}
					}
					break;

				case Tokenizer.Token.Kind.DoubleQuote:
					if (lastToken == null ||
						lastToken.TokenKind == Tokenizer.Token.Kind.Whitespace) {
						AppendSpecial(OpenMarkdown.SpecialKind.OpenDoubleQuote,
									  doc, context, ref accum);
						break;
					}
					else if (lastToken != null &&
							 (lastToken.TokenKind == Tokenizer.Token.Kind.QuestionMark ||
							  lastToken.TokenKind == Tokenizer.Token.Kind.ExclamationMark ||
							  lastToken.TokenKind == Tokenizer.Token.Kind.Ellipsis ||
							  lastToken.TokenKind == Tokenizer.Token.Kind.Period)) {
						AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote,
									  doc, context, ref accum);
						AppendSentenceEnd(tokens, ref i, ref tok,
										  doc, context, ref accum);
						break;
					}
					else if (i + 1 == tokens.Count) {
						AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote,
									  doc, context, ref accum);
						AppendSentenceEnd(tokens, ref i, ref tok,
										  doc, context, ref accum);
						break;
					}
					else {
						Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind;
						switch (kind) {
						case Tokenizer.Token.Kind.Whitespace:
						case Tokenizer.Token.Kind.UnbreakableSpace:
						case Tokenizer.Token.Kind.QuestionMark:
						case Tokenizer.Token.Kind.ExclamationMark:
						case Tokenizer.Token.Kind.Comma:
						case Tokenizer.Token.Kind.Period:
						case Tokenizer.Token.Kind.Semicolon:
						case Tokenizer.Token.Kind.Colon:
						case Tokenizer.Token.Kind.CloseParen:
						case Tokenizer.Token.Kind.SingleQuote:
						case Tokenizer.Token.Kind.SingleDash:
						case Tokenizer.Token.Kind.DoubleDash:
						case Tokenizer.Token.Kind.TripleDash:
						case Tokenizer.Token.Kind.Ellipsis:
						case Tokenizer.Token.Kind.Referral:
							AppendSpecial(OpenMarkdown.SpecialKind.CloseDoubleQuote,
										  doc, context, ref accum);
							AppendSentenceEnd(tokens, ref i, ref tok,
											  doc, context, ref accum);
							break;
						default:
							accum.Append(tok.Content);
							break;
						}
					}
					break;

				case Tokenizer.Token.Kind.SingleDash:
				case Tokenizer.Token.Kind.BackQuote:
				case Tokenizer.Token.Kind.Comma:
				case Tokenizer.Token.Kind.Semicolon:
				case Tokenizer.Token.Kind.Colon:
				case Tokenizer.Token.Kind.OpenParen:
					accum.Append(tok.Content);
					break;

				case Tokenizer.Token.Kind.QuestionMark:
				case Tokenizer.Token.Kind.ExclamationMark:
				case Tokenizer.Token.Kind.CloseParen:
				case Tokenizer.Token.Kind.Period:
					accum.Append(tok.Content);
					AppendSentenceEnd(tokens, ref i, ref tok,
									  doc, context, ref accum);
					break;

				case Tokenizer.Token.Kind.Text:
					if (doc.Config.UseWikiLinks) {
						Match m = wikiLinkRe.Match(tok.Content);
						if (m.Success) {
							AppendText(doc, context, ref accum);

							XmlElement elem = doc.CreateElement("wikilink");
							XmlText value = doc.Document.CreateTextNode(tok.Content);
							elem.AppendChild(value);
							context.AppendChild(elem);
							break;
						}
					}
					accum.Append(tok.Content);
					break;

				case Tokenizer.Token.Kind.Whitespace:
					bool append = true;
					if (doc.Config.SpacesAroundDashes && i + 1 < tokens.Count) {
						Tokenizer.Token.Kind kind = tokens[i + 1].TokenKind;
						switch (kind) {
						case Tokenizer.Token.Kind.DoubleDash:
							switch (doc.Config.DashesStyle) {
							case Configuration.SmartyDashes.DoubleEmdashNoEndash:
							case Configuration.SmartyDashes.DoubleEmdashTripleEndash:
								AppendSpecial(OpenMarkdown.SpecialKind.Emdash,
											  doc, context, ref accum);
								break;
							case Configuration.SmartyDashes.TripleEmdashDoubleEndash:
								AppendSpecial(OpenMarkdown.SpecialKind.Endash,
											  doc, context, ref accum);
								break;
							}
							append = false;
							break;

						case Tokenizer.Token.Kind.TripleDash:
							switch (doc.Config.DashesStyle) {
							case Configuration.SmartyDashes.DoubleEmdashTripleEndash:
								AppendSpecial(OpenMarkdown.SpecialKind.Endash,
											  doc, context, ref accum);
								break;
							case Configuration.SmartyDashes.TripleEmdashDoubleEndash:
								AppendSpecial(OpenMarkdown.SpecialKind.Emdash,
											  doc, context, ref accum);
								break;
							}
							append = false;
							break;
						}

						if (! append) {
							i++;
							if (i + 1 < tokens.Count &&
								tokens[i + 1].TokenKind == Tokenizer.Token.Kind.Whitespace)
								i++;
						}
					}

					if (append)
						accum.Append(tok.Content);
					break;
				}

				lastToken = tok;
			}

			AppendText(doc, context, ref accum);
		}