gpt2-large/config.json

31 lines
666 B
JSON
Raw Normal View History

2019-08-21 00:28:36 +00:00
{
2020-04-24 16:00:20 +00:00
"activation_function": "gelu_new",
2020-01-31 23:00:25 +00:00
"architectures": [
"GPT2LMHeadModel"
],
2019-08-21 00:28:36 +00:00
"attn_pdrop": 0.1,
2020-04-24 16:00:20 +00:00
"bos_token_id": 50256,
2019-08-21 00:28:36 +00:00
"embd_pdrop": 0.1,
2020-04-24 16:00:20 +00:00
"eos_token_id": 50256,
2019-08-21 00:28:36 +00:00
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
2020-04-24 16:00:20 +00:00
"model_type": "gpt2",
2019-08-21 00:28:36 +00:00
"n_ctx": 1024,
"n_embd": 1280,
"n_head": 20,
"n_layer": 36,
"n_positions": 1024,
"resid_pdrop": 0.1,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
2020-05-11 21:02:05 +00:00
"task_specific_params": {
"text-generation": {
"do_sample": true,
"max_length": 50
}
},
2019-08-21 00:28:36 +00:00
"vocab_size": 50257
2020-05-11 21:02:05 +00:00
}