Upload config

This commit is contained in:
Arthur Zucker 2022-10-04 14:58:21 +00:00 committed by huggingface-web
parent 80bf224040
commit d13d0e4bec
1 changed files with 73 additions and 71 deletions

View File

@ -1,11 +1,11 @@
{ {
"_name_or_path": "openai/whisper-tiny.en",
"activation_dropout": 0.0, "activation_dropout": 0.0,
"activation_function": "gelu", "activation_function": "gelu",
"architectures": [
"WhisperForConditionalGeneration"
],
"attention_dropout": 0.0, "attention_dropout": 0.0,
"begin_suppress_tokens": [
220,
50256
],
"bos_token_id": 50257, "bos_token_id": 50257,
"d_model": 384, "d_model": 384,
"decoder_attention_heads": 6, "decoder_attention_heads": 6,
@ -19,14 +19,16 @@
"encoder_layerdrop": 0.0, "encoder_layerdrop": 0.0,
"encoder_layers": 4, "encoder_layers": 4,
"eos_token_id": 50256, "eos_token_id": 50256,
"feature_size": 1,
"init_std": 0.02, "init_std": 0.02,
"input_channels": 1,
"is_encoder_decoder": true, "is_encoder_decoder": true,
"max_source_positions": 1500, "max_source_positions": 1500,
"max_target_positions": 448, "max_target_positions": 448,
"model_type": "whisper", "model_type": "whisper",
"non_speech_tokens": [ "num_hidden_layers": 4,
"num_mel_bins": 80,
"pad_token_id": 0,
"scale_embedding": false,
"suppress_tokens": [
1, 1,
2, 2,
6, 6,
@ -52,71 +54,71 @@
91, 91,
92, 92,
93, 93,
359, 357,
503, 366,
522, 438,
542, 532,
873, 685,
893, 705,
902, 796,
918, 930,
922, 1058,
931, 1220,
1350, 1267,
1853, 1279,
1982, 1303,
2460, 1343,
2627, 1377,
3246, 1391,
3253, 1635,
3268, 1782,
3536, 1875,
3846, 2162,
3961, 2361,
4183, 2488,
4667, 3467,
6585, 4008,
6647, 4211,
7273, 4600,
9061, 4808,
9383, 5299,
10428, 5855,
10929, 6329,
11938, 7203,
12033, 9609,
12331, 9959,
12562, 10563,
13793, 10786,
14157, 11420,
14635, 11709,
15265, 11907,
15618, 13163,
16553, 13697,
16604, 13700,
18362, 14808,
18956, 15306,
20075, 16410,
21675, 16791,
22520, 17992,
26130, 19203,
26161, 19510,
26435, 20724,
28279, 22305,
29464, 22935,
31650, 27007,
32302, 30109,
32470, 30420,
36865, 33409,
42863, 34949,
47425, 40283,
49870, 40493,
50254 40549,
47282,
49146,
50257,
50359,
50360
], ],
"num_hidden_layers": 4,
"num_mel_bins": 80,
"pad_token_id": 0,
"scale_embedding": false,
"torch_dtype": "float32",
"transformers_version": "4.23.0.dev0", "transformers_version": "4.23.0.dev0",
"use_cache": true, "use_cache": true,
"vocab_size": 51864 "vocab_size": 51864