diff --git a/config.json b/config.json index 3b2be1c..1c51480 100644 --- a/config.json +++ b/config.json @@ -1 +1,47 @@ -{"_num_labels": 3, "activation_dropout": 0.0, "activation_function": "swish", "add_bias_logits": false, "add_final_layer_norm": false, "architectures": ["MarianMTModel"], "attention_dropout": 0.0, "bos_token_id": 0, "classif_dropout": 0.0, "d_model": 512, "decoder_attention_heads": 8, "decoder_ffn_dim": 2048, "decoder_layerdrop": 0.0, "decoder_layers": 6, "dropout": 0.1, "encoder_attention_heads": 8, "encoder_ffn_dim": 2048, "encoder_layerdrop": 0.0, "encoder_layers": 6, "eos_token_id": 0, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "init_std": 0.02, "is_encoder_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "max_position_embeddings": 512, "model_type": "marian", "normalize_before": false, "normalize_embedding": false, "num_beams": 6, "num_hidden_layers": 6, "pad_token_id": 58100, "scale_embedding": true, "static_position_embeddings": true, "vocab_size": 58101} \ No newline at end of file +{ + "_num_labels": 3, + "activation_dropout": 0.0, + "activation_function": "swish", + "add_bias_logits": false, + "add_final_layer_norm": false, + "architectures": [ + "MarianMTModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "classif_dropout": 0.0, + "d_model": 512, + "decoder_attention_heads": 8, + "decoder_ffn_dim": 2048, + "decoder_layerdrop": 0.0, + "decoder_layers": 6, + "decoder_start_token_id": 58100, + "dropout": 0.1, + "encoder_attention_heads": 8, + "encoder_ffn_dim": 2048, + "encoder_layerdrop": 0.0, + "encoder_layers": 6, + "eos_token_id": 0, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2" + }, + "init_std": 0.02, + "is_encoder_decoder": true, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2 + }, + "max_position_embeddings": 512, + "model_type": "bart", + "normalize_before": false, + "normalize_embedding": false, + "num_beams": 6, + "num_hidden_layers": 6, + "pad_token_id": 58100, + "scale_embedding": true, + "static_position_embeddings": true, + "vocab_size": 58101 +}