correct config.json

This commit is contained in:
patrickvonplaten 2021-01-30 22:03:26 +00:00
parent 9fc4550a36
commit 8f79ee3b90
1 changed files with 7 additions and 7 deletions

View File

@ -2,7 +2,7 @@
"architectures": [ "architectures": [
"Wav2Vec2ForMaskedLM" "Wav2Vec2ForMaskedLM"
], ],
"conv_bias": false, "conv_bias": true,
"conv_dim": [ "conv_dim": [
512, 512,
512, 512,
@ -30,22 +30,22 @@
2, 2,
2 2
], ],
"do_stable_layer_norm": false, "do_stable_layer_norm": true,
"feat_extract_activation": "gelu", "feat_extract_activation": "gelu",
"feat_extract_dropout": 0.0, "feat_extract_dropout": 0.0,
"feat_extract_norm": "group", "feat_extract_norm": "layer",
"hidden_act": "gelu", "hidden_act": "gelu",
"hidden_dropout_prob": 0.1, "hidden_dropout_prob": 0.1,
"hidden_size": 768, "hidden_size": 1024,
"initializer_range": 0.02, "initializer_range": 0.02,
"intermediate_size": 3072, "intermediate_size": 4096,
"layer_norm_eps": 1e-05, "layer_norm_eps": 1e-05,
"model_type": "wav2vec2", "model_type": "wav2vec2",
"num_attention_heads": 12, "num_attention_heads": 16,
"num_conv_pos_embedding_groups": 16, "num_conv_pos_embedding_groups": 16,
"num_conv_pos_embeddings": 128, "num_conv_pos_embeddings": 128,
"num_feat_extract_layers": 7, "num_feat_extract_layers": 7,
"num_hidden_layers": 12, "num_hidden_layers": 24,
"transformers_version": "4.3.0.dev0", "transformers_version": "4.3.0.dev0",
"vocab_size": 32 "vocab_size": 32
} }