no Thai because we can't tokenize it

This commit is contained in:
Rob Speer 2015-12-02 12:38:03 -05:00
parent 8f6cd0e57b
commit 95f53e295b

View File

@ -49,7 +49,7 @@ def cld2_surface_tokenizer(text):
# list of languages we're allowed to use here. # list of languages we're allowed to use here.
KEEP_THESE_LANGUAGES = { KEEP_THESE_LANGUAGES = {
'ar', 'de', 'el', 'en', 'es', 'fr', 'hr', 'id', 'it', 'ja', 'ko', 'ms', 'ar', 'de', 'el', 'en', 'es', 'fr', 'hr', 'id', 'it', 'ja', 'ko', 'ms',
'nl', 'pl', 'pt', 'ro', 'ru', 'sv', 'th' 'nl', 'pl', 'pt', 'ro', 'ru', 'sv'
} }