Update config.json
This commit is contained in:
parent
e9ce49e76b
commit
451955c798
32
config.json
32
config.json
|
@ -1,19 +1,15 @@
|
|||
{
|
||||
"architectures": [
|
||||
"GPT2LMHeadModel"
|
||||
],
|
||||
"bos_token_id": 50256,
|
||||
"eos_token_ids": [
|
||||
50256
|
||||
],
|
||||
"initializer_range": 0.02,
|
||||
"layer_norm_epsilon": 1e-05,
|
||||
"model_type": "gpt2",
|
||||
"n_ctx": 1024,
|
||||
"n_embd": 1024,
|
||||
"n_head": 16,
|
||||
"n_layer": 24,
|
||||
"n_positions": 1024,
|
||||
"pad_token_id": 50256,
|
||||
"vocab_size": 50257
|
||||
}
|
||||
"architectures": [
|
||||
"GPT2LMHeadModel"
|
||||
],
|
||||
"bos_token_id": 50256,
|
||||
"initializer_range": 0.02,
|
||||
"layer_norm_epsilon": 1e-05,
|
||||
"model_type": "gpt2",
|
||||
"n_ctx": 1024,
|
||||
"n_embd": 1024,
|
||||
"n_head": 16,
|
||||
"n_layer": 24,
|
||||
"n_positions": 1024,
|
||||
"vocab_size": 50257
|
||||
}
|
Loading…
Reference in New Issue