Change BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431 (#1)

- Change  BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431 (0a3f77286e1f0367b3b698f2b43696f36ac439a0)
This commit is contained in:
patrickvonplaten 2022-05-26 15:44:17 +00:00 committed by system
parent fdb605569e
commit 80fcb577f9
1 changed files with 1 additions and 1 deletions

View File

@ -5,7 +5,7 @@
"OPTForCausalLM" "OPTForCausalLM"
], ],
"attention_dropout": 0.0, "attention_dropout": 0.0,
"bos_token_id": 0, "bos_token_id": 2,
"hidden_size": 2048, "hidden_size": 2048,
"do_layer_norm_before": true, "do_layer_norm_before": true,
"dropout": 0.1, "dropout": 0.1,