distilbert-base-uncased/config.json

23 lines
483 B
JSON
Raw Normal View History

2019-08-28 12:31:11 +00:00
{
2020-04-24 15:58:01 +00:00
"activation": "gelu",
2020-01-31 23:00:24 +00:00
"architectures": [
"DistilBertForMaskedLM"
],
2019-09-19 15:58:21 +00:00
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"initializer_range": 0.02,
"max_position_embeddings": 512,
2020-04-24 15:58:01 +00:00
"model_type": "distilbert",
2019-09-19 15:58:21 +00:00
"n_heads": 12,
"n_layers": 6,
2020-04-24 15:58:01 +00:00
"pad_token_id": 0,
2019-09-19 15:58:21 +00:00
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
2019-09-24 08:13:06 +00:00
"sinusoidal_pos_embds": false,
2019-09-19 15:58:21 +00:00
"tie_weights_": true,
2021-08-29 21:44:39 +00:00
"transformers_version": "4.10.0.dev0",
2019-09-19 15:58:21 +00:00
"vocab_size": 30522
}