Add no_repeat_ngram_size to config to avoid repeatedly generating <s>.

This commit is contained in:
Zhengbao Jiang 2022-10-26 20:36:18 +00:00 committed by huggingface-web
parent 36a2c16a6c
commit 141727c63c
1 changed files with 2 additions and 1 deletions

View File

@ -33,5 +33,6 @@
"torch_dtype": "float32",
"transformers_version": "4.17.0.dev0",
"use_cache": true,
"vocab_size": 50265
"vocab_size": 50265,
"no_repeat_ngram_size": 3
}