Update `max_length` param

This commit is contained in:
Arthur Zucker 2022-10-17 11:06:59 +00:00 committed by huggingface-web
parent 27a139f678
commit 8662dcd0ff
1 changed files with 7 additions and 1 deletions

View File

@ -1,6 +1,10 @@
{ {
"_name_or_path": "openai/whisper-tiny",
"activation_dropout": 0.0, "activation_dropout": 0.0,
"activation_function": "gelu", "activation_function": "gelu",
"architectures": [
"WhisperForConditionalGeneration"
],
"attention_dropout": 0.0, "attention_dropout": 0.0,
"begin_suppress_tokens": [ "begin_suppress_tokens": [
220, 220,
@ -35,6 +39,7 @@
], ],
"init_std": 0.02, "init_std": 0.02,
"is_encoder_decoder": true, "is_encoder_decoder": true,
"max_length": 448,
"max_source_positions": 1500, "max_source_positions": 1500,
"max_target_positions": 448, "max_target_positions": 448,
"model_type": "whisper", "model_type": "whisper",
@ -132,7 +137,8 @@
50361, 50361,
50362 50362
], ],
"transformers_version": "4.23.0.dev0", "torch_dtype": "float32",
"transformers_version": "4.24.0.dev0",
"use_cache": true, "use_cache": true,
"vocab_size": 51865 "vocab_size": 51865
} }