diff --git a/config.json b/config.json index 016e349..9e61bcd 100644 --- a/config.json +++ b/config.json @@ -1,16 +1,22 @@ { - "vocab_size": 30522, - "max_position_embeddings": 512, - "sinusoidal_pos_embds": true, - "n_layers": 6, - "n_heads": 12, - "dim": 768, - "hidden_dim": 3072, - "dropout": 0.1, - "attention_dropout": 0.1, - "activation": "gelu", - "initializer_range": 0.02, - "tie_weights_": true, - "seq_classif_dropout": 0.2, - "qa_dropout": 0.1 -} \ No newline at end of file + "activation": "gelu", + "attention_dropout": 0.1, + "dim": 768, + "dropout": 0.1, + "finetuning_task": null, + "hidden_dim": 3072, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "n_heads": 12, + "n_layers": 6, + "num_labels": 2, + "output_attentions": false, + "output_hidden_states": false, + "pruned_heads": {}, + "qa_dropout": 0.1, + "seq_classif_dropout": 0.2, + "sinusoidal_pos_embds": true, + "tie_weights_": true, + "torchscript": false, + "vocab_size": 30522 +}