Upload processor

This commit is contained in:
Alara Dirik 2023-02-24 15:23:04 +00:00 committed by huggingface-web
parent 76291f95e4
commit e2b0c7690e
4 changed files with 30575 additions and 0 deletions

30
preprocessor_config.json Normal file
View File

@ -0,0 +1,30 @@
{
"crop_size": {
"height": 289,
"width": 289
},
"do_center_crop": true,
"do_normalize": false,
"do_rescale": true,
"do_resize": true,
"image_mean": [
0.5,
0.5,
0.5
],
"image_processor_type": "EfficientNetImageProcessor",
"image_std": [
0.5,
0.5,
0.5
],
"include_top": false,
"processor_class": "ALIGNProcessor",
"resample": 2,
"rescale_factor": 0.00784313725490196,
"rescale_offset": true,
"size": {
"height": 346,
"width": 346
}
}

7
special_tokens_map.json Normal file
View File

@ -0,0 +1,7 @@
{
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"unk_token": "[UNK]"
}

16
tokenizer_config.json Normal file
View File

@ -0,0 +1,16 @@
{
"cls_token": "[CLS]",
"do_basic_tokenize": true,
"do_lower_case": true,
"mask_token": "[MASK]",
"model_max_length": 64,
"never_split": null,
"pad_token": "[PAD]",
"processor_class": "ALIGNProcessor",
"sep_token": "[SEP]",
"special_tokens_map_file": null,
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "BertTokenizer",
"unk_token": "[UNK]"
}

30522
vocab.txt Normal file

File diff suppressed because it is too large Load Diff