Update config.json
This commit is contained in:
parent
f815297dfe
commit
f43722e239
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"_num_labels": 3,
|
||||
"activation_dropout": 0.0,
|
||||
"activation_function": "gelu",
|
||||
"add_bias_logits": false,
|
||||
"add_final_layer_norm": false,
|
||||
"architectures": [
|
||||
"BartForSequenceClassification"
|
||||
],
|
||||
"attention_dropout": 0.1,
|
||||
"bos_token_id": 0,
|
||||
"classif_dropout": 0.0,
|
||||
"d_model": 1024,
|
||||
"decoder_attention_heads": 16,
|
||||
"decoder_ffn_dim": 4096,
|
||||
"decoder_layerdrop": 0.0,
|
||||
"decoder_layers": 1,
|
||||
"dropout": 0.1,
|
||||
"encoder_attention_heads": 16,
|
||||
"encoder_ffn_dim": 4096,
|
||||
"encoder_layerdrop": 0.0,
|
||||
"encoder_layers": 12,
|
||||
"eos_token_id": 2,
|
||||
"extra_pos_embeddings": 2,
|
||||
"finetuning_task": "mnli",
|
||||
"force_bos_token_to_be_generated": false,
|
||||
"id2label": {
|
||||
"0": "contradiction",
|
||||
"1": "neutral",
|
||||
"2": "entailment"
|
||||
},
|
||||
"init_std": 0.02,
|
||||
"is_encoder_decoder": true,
|
||||
"label2id": {
|
||||
"contradiction": 0,
|
||||
"entailment": 2,
|
||||
"neutral": 1
|
||||
},
|
||||
"max_position_embeddings": 1024,
|
||||
"model_type": "bart",
|
||||
"normalize_before": false,
|
||||
"normalize_embedding": true,
|
||||
"num_hidden_layers": 12,
|
||||
"output_past": false,
|
||||
"pad_token_id": 1,
|
||||
"scale_embedding": false,
|
||||
"static_position_embeddings": false,
|
||||
"total_flos": 153130534133111808,
|
||||
"vocab_size": 50265
|
||||
}
|
Loading…
Reference in New Issue