updated test because the new tokenizer removes URLs

Former-commit-id: 35f472fcf9
This commit is contained in:
Joshua Chin 2015-06-18 11:38:28 -04:00
parent b7f125e1d9
commit 529aa9afde

View File

@ -105,7 +105,7 @@ def test_not_really_random():
# This not only tests random_ascii_words, it makes sure we didn't end
# up with 'eos' as a very common Japanese word
eq_(random_ascii_words(nwords=4, lang='ja', bits_per_word=0),
'http http http http')
'rt rt rt rt')
@raises(ValueError)