Upload tokenizer

This commit is contained in:
Niels Rogge 2022-09-07 17:54:08 +00:00 committed by huggingface-web
parent a7c4d01c0b
commit b2f673e182
3 changed files with 49411 additions and 3 deletions

View File

@ -1,4 +1,4 @@
#version: 0.2 - Trained by `huggingface/tokenizers`
#version: 0.2
i n
t h
a n

View File

@ -21,7 +21,6 @@
"model_max_length": 77,
"name_or_path": "openai/clip-vit-base-patch32",
"pad_token": "<|endoftext|>",
"processor_class": "XCLIPProcessor",
"special_tokens_map_file": "/home/niels/.cache/huggingface/hub/models--openai--clip-vit-base-patch32/snapshots/f4881ba48ee4d21b7ed5602603b9e3e92eb1b346/special_tokens_map.json",
"tokenizer_class": "CLIPTokenizer",
"unk_token": {

49411
vocab.json

File diff suppressed because one or more lines are too long