Update `max_length` param

This commit is contained in:
Arthur Zucker 2022-10-17 11:09:07 +00:00 committed by huggingface-web
parent ea8f5d0f78
commit 407a3a8d4f
2 changed files with 9 additions and 3 deletions

View File

@ -1,6 +1,10 @@
{ {
"_name_or_path": "openai/whisper-tiny.en",
"activation_dropout": 0.0, "activation_dropout": 0.0,
"activation_function": "gelu", "activation_function": "gelu",
"architectures": [
"WhisperForConditionalGeneration"
],
"attention_dropout": 0.0, "attention_dropout": 0.0,
"begin_suppress_tokens": [ "begin_suppress_tokens": [
220, 220,
@ -27,6 +31,7 @@
], ],
"init_std": 0.02, "init_std": 0.02,
"is_encoder_decoder": true, "is_encoder_decoder": true,
"max_length": 448,
"max_source_positions": 1500, "max_source_positions": 1500,
"max_target_positions": 448, "max_target_positions": 448,
"model_type": "whisper", "model_type": "whisper",
@ -126,7 +131,8 @@
50360, 50360,
50361 50361
], ],
"transformers_version": "4.23.0.dev0", "torch_dtype": "float32",
"transformers_version": "4.24.0.dev0",
"use_cache": true, "use_cache": true,
"vocab_size": 51864 "vocab_size": 51864
} }

BIN
pytorch_model.bin (Stored with Git LFS)

Binary file not shown.