36 lines
983 B
JSON
36 lines
983 B
JSON
{
|
|
"add_prefix_space": false,
|
|
"bos_token": {
|
|
"__type": "AddedToken",
|
|
"content": "<|startoftext|>",
|
|
"lstrip": false,
|
|
"normalized": true,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
},
|
|
"do_lower_case": true,
|
|
"eos_token": {
|
|
"__type": "AddedToken",
|
|
"content": "<|endoftext|>",
|
|
"lstrip": false,
|
|
"normalized": true,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
},
|
|
"errors": "replace",
|
|
"model_max_length": 16,
|
|
"name_or_path": "openai/clip-vit-base-patch32",
|
|
"pad_token": "!",
|
|
"processor_class": "OwlViTProcessor",
|
|
"special_tokens_map_file": "/Users/adirik/.cache/huggingface/transformers/18a566598f286c9139f88160c99f84eec492a26bd22738fa9cb44d5b7e0a5c76.cce1206abbad28826f000510f22f354e53e66a97f7c23745a7dfe27609cc07f5",
|
|
"tokenizer_class": "CLIPTokenizer",
|
|
"unk_token": {
|
|
"__type": "AddedToken",
|
|
"content": "<|endoftext|>",
|
|
"lstrip": false,
|
|
"normalized": true,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
}
|
|
}
|