From 760b167cd71bee1ce6526044b09ebe967d8e6dbd Mon Sep 17 00:00:00 2001
From: Niels Rogge <niels.rogge1@gmail.com>
Date: Mon, 14 Mar 2022 15:12:59 +0000
Subject: [PATCH] Upload config.json

---
 config.json | 26 ++++++++++++++++++--------
 1 file changed, 18 insertions(+), 8 deletions(-)

diff --git a/config.json b/config.json
index 1c9c2c5..689be59 100644
--- a/config.json
+++ b/config.json
@@ -3,16 +3,23 @@
     "DPTForDepthEstimation"
   ],
   "attention_probs_dropout_prob": 0.0,
+  "auxiliary_loss_weight": 0.4,
   "channels": 256,
-  "expand_channels": false,
   "hidden_act": "gelu",
   "hidden_dropout_prob": 0.0,
   "hidden_size": 1024,
   "image_size": 384,
+  "in_index": -1,
   "initializer_range": 0.02,
   "intermediate_size": 4096,
   "layer_norm_eps": 1e-12,
   "model_type": "dpt",
+  "neck_hidden_sizes": [
+    256,
+    512,
+    1024,
+    1024
+  ],
   "num_attention_heads": 16,
   "num_channels": 3,
   "num_hidden_layers": 24,
@@ -23,15 +30,18 @@
     23
   ],
   "patch_size": 16,
-  "post_process_channels": [
-    256,
-    512,
-    1024,
-    1024
-  ],
   "qkv_bias": true,
   "readout_type": "project",
+  "reassemble_factors": [
+    4,
+    2,
+    1,
+    0.5
+  ],
+  "semantic_classifier_dropout": 0.1,
+  "semantic_loss_ignore_index": 255,
   "torch_dtype": "float32",
-  "transformers_version": "4.17.0.dev0",
+  "transformers_version": "4.18.0.dev0",
+  "use_auxiliary_head": true,
   "use_batch_norm": false
 }