fix: Update count_tokens function in utils.py

This commit is contained in:
Alex
2024-05-04 10:39:23 +01:00
parent de0193fffc
commit 431755144e

View File

@@ -1,6 +1,6 @@
from transformers import GPT2TokenizerFast
tokenizer = GPT2TokenizerFast.from_pretrained('gpt2')
def count_tokens(string):
tokenizer = GPT2TokenizerFast.from_pretrained('gpt2')
return len(tokenizer(string)['input_ids'])