Upload processor

This commit is contained in:
Younes Belkada 2022-12-13 11:29:04 +00:00 committed by huggingface-web
parent 1d75d8b027
commit 12d1e73a3c
5 changed files with 61243 additions and 0 deletions

25
preprocessor_config.json Normal file
View File

@ -0,0 +1,25 @@
{
"do_normalize": true,
"do_pad": true,
"do_rescale": true,
"do_resize": true,
"image_mean": [
0.48145466,
0.4578275,
0.40821073
],
"image_processor_type": "BlipImageProcessor",
"image_std": [
0.26862954,
0.26130258,
0.27577711
],
"processor_class": "BlipProcessor",
"resample": 3,
"rescale_factor": 0.00392156862745098,
"size": {
"height": 384,
"width": 384
},
"size_divisor": 32
}

7
special_tokens_map.json Normal file
View File

@ -0,0 +1,7 @@
{
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"unk_token": "[UNK]"
}

30672
tokenizer.json Normal file

File diff suppressed because it is too large Load Diff

17
tokenizer_config.json Normal file
View File

@ -0,0 +1,17 @@
{
"cls_token": "[CLS]",
"do_basic_tokenize": true,
"do_lower_case": true,
"mask_token": "[MASK]",
"model_max_length": 512,
"name_or_path": "ybelkada/blip-image-captioning-base",
"never_split": null,
"pad_token": "[PAD]",
"processor_class": "BlipProcessor",
"sep_token": "[SEP]",
"special_tokens_map_file": null,
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "BertTokenizer",
"unk_token": "[UNK]"
}

30522
vocab.txt Normal file

File diff suppressed because it is too large Load Diff