Upload tiny models for ViltForQuestionAnswering

This commit is contained in:
Yih-Dar SHIEH 2022-11-23 20:08:10 +00:00
parent dc0b5a3b49
commit faeffbf43d
7 changed files with 2486 additions and 0 deletions

38
config.json Normal file
View File

@ -0,0 +1,38 @@
{
"architectures": [
"ViltForQuestionAnswering"
],
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 32,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1",
"2": "LABEL_2"
},
"image_size": 30,
"initializer_range": 0.02,
"intermediate_size": 37,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2
},
"layer_norm_eps": 1e-12,
"max_image_length": -1,
"max_position_embeddings": 512,
"modality_type_vocab_size": 2,
"model_type": "vilt",
"num_attention_heads": 4,
"num_channels": 3,
"num_hidden_layers": 5,
"num_images": -1,
"patch_size": 2,
"qkv_bias": true,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.25.0.dev0",
"type_vocab_size": 16,
"vocab_size": 1124
}

24
preprocessor_config.json Normal file
View File

@ -0,0 +1,24 @@
{
"crop_size": 30,
"do_normalize": true,
"do_pad": true,
"do_rescale": true,
"do_resize": true,
"image_mean": [
0.5,
0.5,
0.5
],
"image_processor_type": "ViltImageProcessor",
"image_std": [
0.5,
0.5,
0.5
],
"resample": 3,
"rescale_factor": 0.00392156862745098,
"size": {
"shortest_edge": 30
},
"size_divisor": 32
}

BIN
pytorch_model.bin (Stored with Git LFS) Normal file

Binary file not shown.

7
special_tokens_map.json Normal file
View File

@ -0,0 +1,7 @@
{
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"unk_token": "[UNK]"
}

1274
tokenizer.json Normal file

File diff suppressed because it is too large Load Diff

16
tokenizer_config.json Normal file
View File

@ -0,0 +1,16 @@
{
"cls_token": "[CLS]",
"do_basic_tokenize": true,
"do_lower_case": true,
"mask_token": "[MASK]",
"model_max_length": 512,
"name_or_path": "temp/dummy/vilt/processors",
"never_split": null,
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"special_tokens_map_file": null,
"strip_accents": null,
"tokenize_chinese_chars": true,
"tokenizer_class": "BertTokenizer",
"unk_token": "[UNK]"
}

1124
vocab.txt Normal file

File diff suppressed because it is too large Load Diff