public void GetLinesTest() { //Stage var example = Convert.FromBase64String(@"owEJCAoAiTqXAB4IFABEJNDnKDQpOrIgQ1RSTC1EADkIHgCiMjpBJNAiQVBQTEUgSUkiOrAxMDAw AGoIKACiNDpBJNAiRE9TIFZFUlNJT04gMy4zICBTWVNURU0gTUFTVEVSIjqwMTAwMACMCDIAojc6 QSTQIkpBTlVBUlkgMSwgMTk4MyI6sDEwMDAAqAg8ALpEJDsiQkxPQUQgTE9BREVSLk9CSjAiAM8I RgCMNDA5NjqyIEZBU1QgTE9BRCBJTiBJTlRFR0VSIEJBU0lDABAJUACiMTA6jMk5NTg6QSTQIkNP UFlSSUdIVCBBUFBMRSBDT01QVVRFUixJTkMuIDE5ODAsMTk4MiI6sDEwMDAATwlaAEPQ4ijJMTEw MSk6rUPQNsS6Op46QSTQIkJFIFNVUkUgQ0FQUyBMT0NLIElTIERPV04iOrAxMDAwOp0AXglkALrn KDQpOyJGUCIAdQnoA7IgQ0VOVEVSIFNUUklORyBBJACVCfIDQtDTKDIwySjjKEEkKcsyKSk6rULQ 0TDEQtAxAKIJ/AOWQjq6QSQ6sQAAAIcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="); var bytes = example.Count(b => b != 0); //Mock //Test var detokenizer = new Detokenizer(); var lines = detokenizer.GetLines(example); //Output var result = string.Join("\r\n", lines); this.TestContext.WriteLine(result); //Assert var line9 = lines.ElementAt(9); var expected = "90 C= PEEK ( - 1101):IF C= 6 THEN PRINT :INVERSE :A$= \"BE SURE CAPS LOCK IS DOWN\":GOSUB 1000:NORMAL "; Assert.AreEqual(expected, line9); //Verify }
public POSToTokenSampleStream(Detokenizer detokenizer, ObjectStream <POSSample> samples) : base(samples) { if (detokenizer == null) { throw new System.ArgumentException("detokenizer must not be null!"); } this.detokenizer = detokenizer; }
static void Main(string[] args) { string tokenEnd = "%"; // must be on both sides of the item to detokenize string template = $"Firstname: {tokenEnd}FirstName{tokenEnd}. Age: {tokenEnd}Age{tokenEnd}. Created: {tokenEnd}Created{tokenEnd}"; // using model Console.WriteLine("Using model"); var model = new CustomModel { FirstName = "Anthony", Age = 39, Created = new DateTime(2019, 1, 1) }; var result = Detokenizer.Detokenize(model, template, tokenEnd); Console.WriteLine(result); Console.WriteLine(); // using dictionary Console.WriteLine("Using dictionary"); var dictionary = new Dictionary <string, string> { { "FirstName", "Anthony" }, { "Age", "39" }, { "Created", new DateTime(2019, 1, 1).ToString() } }; result = Detokenizer.Detokenize(dictionary, template, tokenEnd); Console.WriteLine(result); Console.WriteLine(); // using dictionary with explicit token as part of dictionary items Console.WriteLine("Using dictionary with explicit token as part of dictionary items"); dictionary = new Dictionary <string, string> { { $"{tokenEnd}FirstName{tokenEnd}", "Anthony" }, { $"{tokenEnd}Age{tokenEnd}", "39" }, { $"{tokenEnd}Created{tokenEnd}", new DateTime(2019, 1, 1).ToString() } }; result = Detokenizer.Detokenize(dictionary, template); Console.WriteLine(result); Console.ReadLine(); }
public void Template_Is_Detokenized_With_Model() { string tokenEnd = "%%"; // must be on both sides of the item to detokenize string template = $"Firstname: {tokenEnd}FirstName{tokenEnd}. Age: {tokenEnd}Age{tokenEnd}. Created: {tokenEnd}Created{tokenEnd}"; var model = new CustomModel { FirstName = "Anthony", Age = 39, Created = new DateTime(2019, 1, 1) }; var result = Detokenizer.Detokenize(model, template, tokenEnd); var expected = $"Firstname: {model.FirstName}. Age: {model.Age}. Created: {model.Created}"; Assert.Equal(expected, result); }
public void Template_Is_Detokenized_With_Tokened_Dictionary() { string tokenEnd = "%%"; // must be on both sides of the item to detokenize string template = $"Firstname: {tokenEnd}FirstName{tokenEnd}. Age: {tokenEnd}Age{tokenEnd}. Created: {tokenEnd}Created{tokenEnd}"; var dictionary = new Dictionary <string, string> { { $"{tokenEnd}FirstName{tokenEnd}", "Anthony" }, { $"{tokenEnd}Age{tokenEnd}", "39" }, { $"{tokenEnd}Created{tokenEnd}", new DateTime(2019, 1, 1).ToString() } }; var result = Detokenizer.Detokenize(dictionary, template); var expected = $"Firstname: {dictionary[$"{tokenEnd}FirstName{tokenEnd}"]}. Age: {dictionary[$"{tokenEnd}Age{tokenEnd}"]}. Created: {dictionary[$"{tokenEnd}Created{tokenEnd}"]}"; Assert.Equal(expected, result); }
internal AbstractToSentenceSampleStream(Detokenizer detokenizer, ObjectStream <T> samples, int chunkSize) : base(samples) { if (detokenizer == null) { throw new System.ArgumentException("detokenizer must not be null!"); } this.detokenizer = detokenizer; if (chunkSize < 0) { throw new System.ArgumentException("chunkSize must be zero or larger but was " + chunkSize + "!"); } if (chunkSize > 0) { this.chunkSize = chunkSize; } else { this.chunkSize = int.MaxValue; } }
public NameToTokenSampleStream(Detokenizer detokenizer, ObjectStream <NameSample> samples) : base(samples) { this.detokenizer = detokenizer; }
public Converter(Tokenizer serializer, Detokenizer detokenizer) { this.tokenizer = serializer; this.detokenizer = detokenizer; }
public NameToSentenceSampleStream(Detokenizer detokenizer, ObjectStream <NameSample> samples, int chunkSize) : base(detokenizer, samples, chunkSize) { }