From 268a43b3b40ae0417e8fc84424723ecdcf4b7da9 Mon Sep 17 00:00:00 2001 From: patil-suraj Date: Fri, 4 Mar 2022 13:45:43 +0000 Subject: [PATCH] add flax model --- .ipynb_checkpoints/config-checkpoint.json | 25 +++++++++++++++++++++++ config.json | 3 +++ flax_model.msgpack | 3 +++ 3 files changed, 31 insertions(+) create mode 100644 .ipynb_checkpoints/config-checkpoint.json create mode 100644 flax_model.msgpack diff --git a/.ipynb_checkpoints/config-checkpoint.json b/.ipynb_checkpoints/config-checkpoint.json new file mode 100644 index 0000000..8e5fb14 --- /dev/null +++ b/.ipynb_checkpoints/config-checkpoint.json @@ -0,0 +1,25 @@ +{ + "architectures": [ + "XLMRobertaForMaskedLM" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 514, + "model_type": "xlm-roberta", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "output_past": true, + "pad_token_id": 1, + "position_embedding_type": "absolute", + "transformers_version": "4.17.0.dev0", + "type_vocab_size": 1, + "use_cache": true, + "vocab_size": 250002 +} diff --git a/config.json b/config.json index 3acc6bc..8e5fb14 100644 --- a/config.json +++ b/config.json @@ -17,6 +17,9 @@ "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, + "position_embedding_type": "absolute", + "transformers_version": "4.17.0.dev0", "type_vocab_size": 1, + "use_cache": true, "vocab_size": 250002 } diff --git a/flax_model.msgpack b/flax_model.msgpack new file mode 100644 index 0000000..c13a365 --- /dev/null +++ b/flax_model.msgpack @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96d19a73ca044be7c23518d2d23154eb0a1e6fb301d3b086e2d80bdfff1391ce +size 2240584013