public void DefaultTokenDelimiterUsedWhenTokenSequenceSpecifiesNullDelimiter() { const string filename = "File_Name_Test_32B"; FileNameTokenizer tokenizer = new FileNameTokenizer(filename); Assert.AreEqual("File Name Test 32 B", tokenizer.GetTokenSequence(0, 4, null)); }
public void MultipleTokensCreatedWhenNumbersAndWordsExist() { const string token = "Test"; const string token2 = "21"; FileNameTokenizer tokenizer = new FileNameTokenizer(token + token2); Assert.AreEqual(2, tokenizer.TokenCount); Assert.AreEqual(token, tokenizer[0]); Assert.AreEqual(token2, tokenizer[1]); Assert.AreEqual("%0%1", tokenizer.TokenizedFileName); }
public void EnumeratorEnumeratesInCurrectOrder() { const string token = "Test"; const string token2 = "21"; FileNameTokenizer tokenizer = new FileNameTokenizer(token + token2); string expected = token; foreach (string tokenValue in tokenizer) { Assert.AreEqual(expected, tokenValue); expected = token2; } }
public void NoTokensCreatedWhenNumbersOrAlphaCharsExist() { FileNameTokenizer tokenizer = new FileNameTokenizer("^#*@(#*"); Assert.AreEqual(tokenizer.OriginalFileName, tokenizer.TokenizedFileName); Assert.AreEqual(0, tokenizer.TokenCount, "No tokens created."); }
public void FileNameInCorrectFormatWhenManySymbolsWordsAndNumbersUsed() { FileNameTokenizer tokenizer = new FileNameTokenizer("this is *** a t3st. 201 ok!"); Assert.AreEqual("%0 %1 *** %2 %3%4%5. %6 %7!", tokenizer.TokenizedFileName); }
public void ExceptionThrownWhenStartIndexNotValidForTokenSequence() { FileNameTokenizer tokenizer = new FileNameTokenizer("test"); tokenizer.GetTokenSequence(-1, 1); }
public void ExceptionThrownWhenStartIndexGreaterThenEndIndexForTokenSequence() { FileNameTokenizer tokenizer = new FileNameTokenizer("test name"); tokenizer.GetTokenSequence(1, 0); }
public void TokenUsedWhenFileNameIsWord() { const string token = "Test"; FileNameTokenizer tokenizer = new FileNameTokenizer(token); Assert.AreEqual(1, tokenizer.TokenCount); Assert.AreEqual(token, tokenizer[0]); Assert.AreEqual("%0", tokenizer.TokenizedFileName); }
public void ToStringInCorrectFormat() { const string token = "Test"; const string token2 = "21"; FileNameTokenizer tokenizer = new FileNameTokenizer(token + token2); Assert.AreEqual(String.Format("Original: {0}{1}Tokenized: {2}", token + token2, Environment.NewLine, "%0%1"), tokenizer.ToString()); }
public void TokenSequenceHasCorrectDelimiterWhenTokenSequenceDelimiterSpecified() { const string filename = "File_Name_Test"; FileNameTokenizer tokenizer = new FileNameTokenizer(filename); Assert.AreEqual("File*Name*Test", tokenizer.GetTokenSequence(0, 2, "*")); }
public void TokenSequenceHasAllTokensWhenTokenSequenceRequested() { const string filename = "File_Name_Test_32B"; FileNameTokenizer tokenizer = new FileNameTokenizer(filename); Assert.AreEqual("File Name Test 32 B", tokenizer.GetTokenSequence(0, 4)); }
public void TokenMarkerEscapedWhenUsedInOriginalString() { FileNameTokenizer tokenizer = new FileNameTokenizer("%"); Assert.AreEqual("%%", tokenizer.TokenizedFileName); }
public void TokenCountZeroWhenFileNameEmpty() { FileNameTokenizer tokenizer = new FileNameTokenizer(""); Assert.AreEqual(tokenizer.OriginalFileName, tokenizer.TokenizedFileName); Assert.AreEqual(0, tokenizer.TokenCount, "No tokens created."); }
public void OneTokenReturnedWhenTokenSequencesStartAndEndIndexTheSame() { const string expected = "test"; FileNameTokenizer tokenizer = new FileNameTokenizer(expected); Assert.AreEqual(expected, tokenizer.GetTokenSequence(0, 0)); }