diff --git a/config.json b/config.json new file mode 100644 index 0000000..b3acc6f --- /dev/null +++ b/config.json @@ -0,0 +1,45 @@ +{ + "activation": "gelu", + "attention_dropout": 0.1, + "bos_token_id": 0, + "dim": 768, + "do_sample": false, + "dropout": 0.1, + "eos_token_ids": 0, + "finetuning_task": null, + "hidden_dim": 3072, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "initializer_range": 0.02, + "is_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "length_penalty": 1.0, + "max_length": 20, + "max_position_embeddings": 512, + "n_heads": 12, + "n_layers": 6, + "num_beams": 1, + "num_labels": 2, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_past": true, + "pad_token_id": 0, + "pruned_heads": {}, + "qa_dropout": 0.1, + "repetition_penalty": 1.0, + "seq_classif_dropout": 0.2, + "sinusoidal_pos_embds": false, + "temperature": 1.0, + "tie_weights_": true, + "top_k": 50, + "top_p": 1.0, + "torchscript": false, + "use_bfloat16": false, + "vocab_size": 28996 +}