Update README.md, increase max_new_tokens

This commit is contained in:
Gavin Li
2023-11-29 20:14:41 -06:00
committed by GitHub
parent 456e4f09b5
commit 04da4fcfe9

View File

@@ -54,7 +54,7 @@ input_tokens = model.tokenizer(input_text,
generation_output = model.generate(
input_tokens['input_ids'].cuda(),
max_new_tokens=2,
max_new_tokens=20,
use_cache=True,
return_dict_in_generate=True)