Update config.json
This commit is contained in:
parent
5e0fbb6164
commit
7289c30e30
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"_num_labels": 2,
|
||||
"architectures": [
|
||||
"RobertaForQuestionAnswering"
|
||||
],
|
||||
"attention_probs_dropout_prob": 0.1,
|
||||
"bos_token_id": 0,
|
||||
"decoder_start_token_id": null,
|
||||
"do_sample": false,
|
||||
"early_stopping": false,
|
||||
"eos_token_id": 2,
|
||||
"finetuning_task": null,
|
||||
"hidden_act": "gelu",
|
||||
"hidden_dropout_prob": 0.1,
|
||||
"hidden_size": 768,
|
||||
"id2label": {
|
||||
"0": "LABEL_0",
|
||||
"1": "LABEL_1"
|
||||
},
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 3072,
|
||||
"is_decoder": false,
|
||||
"is_encoder_decoder": false,
|
||||
"label2id": {
|
||||
"LABEL_0": 0,
|
||||
"LABEL_1": 1
|
||||
},
|
||||
"language": "english",
|
||||
"layer_norm_eps": 1e-05,
|
||||
"length_penalty": 1.0,
|
||||
"max_length": 20,
|
||||
"max_position_embeddings": 514,
|
||||
"min_length": 0,
|
||||
"model_type": "roberta",
|
||||
"name": "Roberta",
|
||||
"no_repeat_ngram_size": 0,
|
||||
"num_attention_heads": 12,
|
||||
"num_beams": 1,
|
||||
"num_hidden_layers": 12,
|
||||
"num_return_sequences": 1,
|
||||
"output_attentions": false,
|
||||
"output_hidden_states": false,
|
||||
"output_past": true,
|
||||
"pad_token_id": 1,
|
||||
"prefix": null,
|
||||
"pruned_heads": {},
|
||||
"repetition_penalty": 1.0,
|
||||
"task_specific_params": null,
|
||||
"temperature": 1.0,
|
||||
"top_k": 50,
|
||||
"top_p": 1.0,
|
||||
"torchscript": false,
|
||||
"type_vocab_size": 1,
|
||||
"use_bfloat16": false,
|
||||
"vocab_size": 50265
|
||||
}
|
Loading…
Reference in New Issue