initial commit

This commit is contained in:
Jong Wook Kim
2022-09-22 00:43:20 +09:00
commit 6e3be77e1a
39 changed files with 107388 additions and 0 deletions

14
tests/test_tokenizer.py Normal file
View File

@@ -0,0 +1,14 @@
from whisper.tokenizer import get_tokenizer
def test_tokenizer():
gpt2_tokenizer = get_tokenizer(multilingual=False)
multilingual_tokenizer = get_tokenizer(multilingual=True)
text = "다람쥐 헌 쳇바퀴에 타고파"
gpt2_tokens = gpt2_tokenizer.encode(text)
multilingual_tokens = multilingual_tokenizer.encode(text)
assert gpt2_tokenizer.decode(gpt2_tokens) == text
assert multilingual_tokenizer.decode(multilingual_tokens) == text
assert len(gpt2_tokens) > len(multilingual_tokens)