diff --git a/config.json b/config.json new file mode 100644 index 0000000..464334c --- /dev/null +++ b/config.json @@ -0,0 +1,48 @@ +{ + "activation_dropout": 0.1, + "activation_function": "relu", + "add_bias_logits": false, + "add_final_layer_norm": true, + "architectures": [ + "PegasusForConditionalGeneration" + ], + "attention_dropout": 0.1, + "bos_token_id": 0, + "classif_dropout": 0.0, + "d_model": 1024, + "decoder_attention_heads": 16, + "decoder_ffn_dim": 4096, + "decoder_layerdrop": 0.0, + "decoder_layers": 16, + "dropout": 0.1, + "encoder_attention_heads": 16, + "encoder_ffn_dim": 4096, + "encoder_layerdrop": 0.0, + "encoder_layers": 16, + "eos_token_id": 1, + "extra_pos_embeddings": 1, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2" + }, + "init_std": 0.02, + "is_encoder_decoder": true, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2 + }, + "length_penalty": 0.8, + "max_length": 64, + "max_position_embeddings": 512, + "model_type": "pegasus", + "normalize_before": true, + "normalize_embedding": false, + "num_beams": 8, + "num_hidden_layers": 16, + "pad_token_id": 0, + "scale_embedding": true, + "static_position_embeddings": true, + "vocab_size": 96103 +}