From 8524a13ea3d58428a3def9153c4877ef8807047a Mon Sep 17 00:00:00 2001 From: patil-suraj Date: Fri, 4 Mar 2022 13:26:41 +0000 Subject: [PATCH] add flax model --- .ipynb_checkpoints/config-checkpoint.json | 25 +++++++++++++++++++++++ config.json | 3 +++ flax_model.msgpack | 3 +++ 3 files changed, 31 insertions(+) create mode 100644 .ipynb_checkpoints/config-checkpoint.json create mode 100644 flax_model.msgpack diff --git a/.ipynb_checkpoints/config-checkpoint.json b/.ipynb_checkpoints/config-checkpoint.json new file mode 100644 index 0000000..1960141 --- /dev/null +++ b/.ipynb_checkpoints/config-checkpoint.json @@ -0,0 +1,25 @@ +{ + "architectures": [ + "XLMRobertaForMaskedLM" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 514, + "model_type": "xlm-roberta", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "output_past": true, + "pad_token_id": 1, + "position_embedding_type": "absolute", + "transformers_version": "4.17.0.dev0", + "type_vocab_size": 1, + "use_cache": true, + "vocab_size": 250002 +} diff --git a/config.json b/config.json index 49dd628..1960141 100644 --- a/config.json +++ b/config.json @@ -17,6 +17,9 @@ "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, + "position_embedding_type": "absolute", + "transformers_version": "4.17.0.dev0", "type_vocab_size": 1, + "use_cache": true, "vocab_size": 250002 } diff --git a/flax_model.msgpack b/flax_model.msgpack new file mode 100644 index 0000000..30d744f --- /dev/null +++ b/flax_model.msgpack @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:311b6941e02128b01c6a429f55b47b351a86fe53e6802774d87696bcbc465992 +size 1113187999