Upload processor

This commit is contained in:
Younes Belkada 2022-12-12 15:19:03 +00:00 committed by huggingface-web
parent 08a482e3e3
commit f656a373b0
4 changed files with 30570 additions and 0 deletions

24
preprocessor_config.json Normal file
View File

@ -0,0 +1,24 @@
{
"do_normalize": true,
"do_pad": true,
"do_rescale": true,
"do_resize": true,
"image_mean": [
0.48145466,
0.4578275,
0.40821073
],
"image_processor_type": "BlipImageProcessor",
"image_std": [
0.26862954,
0.26130258,
0.27577711
],
"processor_class": "BlipProcessor",
"resample": 3,
"rescale_factor": 0.00392156862745098,
"size": {
"shortest_edge": 384
},
"size_divisor": 32
}

7
special_tokens_map.json Normal file
View File

@ -0,0 +1,7 @@
{
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"unk_token": "[UNK]"
}

17
tokenizer_config.json Normal file
View File

@ -0,0 +1,17 @@
{
"cls_token": "[CLS]",
"do_basic_tokenize": true,
"do_lower_case": true,
"mask_token": "[MASK]",
"model_max_length": 512,
"name_or_path": "bert-base-uncased",
"never_split": null,
"pad_token": "[PAD]",
"processor_class": "BlipProcessor",
"sep_token": "[SEP]",
"special_tokens_map_file": null,
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "BertTokenizer",
"unk_token": "[UNK]"
}

30522
vocab.txt Normal file

File diff suppressed because it is too large Load Diff