eyad-silx commited on
Commit
7c99f7a
·
verified ·
1 Parent(s): 6551eef

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -3
config.json CHANGED
@@ -1,8 +1,13 @@
 
1
  {
2
- "architectures": [
3
  "InfinityFormerForCausalLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
 
 
 
6
  "gate_init_bias": -2.0,
7
  "gradient_checkpointing_frequency": 1,
8
  "gradient_checkpointing_use_reentrant": true,
@@ -16,7 +21,7 @@
16
  "max_position_embeddings": 812,
17
  "memory_compression_frequency": 100,
18
  "memory_compression_ratio": 0.5,
19
- "model_type": "infinityformer",
20
  "num_attention_heads": 12,
21
  "num_hidden_layers": 54,
22
  "num_memory_scales": 3,
@@ -27,7 +32,7 @@
27
  "use_gating": true,
28
  "use_gradient_checkpointing": false,
29
  "use_multi_scale_memory": true,
30
- "use_return_dict": true,
31
  "use_rotary_embeddings": true,
32
  "vocab_size": 151669
33
  }
 
 
1
+
2
  {
3
+ "architectures": [
4
  "InfinityFormerForCausalLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
+ "auto_map": {
8
+ "AutoConfig": "silx-ai/QuasarV4-Tiny--configuration_quasrav4.QuasraV4Config",
9
+ "AutoModelForCausalLM": "silx-ai/QuasarV4-Tiny--modeling_quasrav4.QuasraV4ForCausalLM"
10
+ },
11
  "gate_init_bias": -2.0,
12
  "gradient_checkpointing_frequency": 1,
13
  "gradient_checkpointing_use_reentrant": true,
 
21
  "max_position_embeddings": 812,
22
  "memory_compression_frequency": 100,
23
  "memory_compression_ratio": 0.5,
24
+ "model_type": "quasarv4",
25
  "num_attention_heads": 12,
26
  "num_hidden_layers": 54,
27
  "num_memory_scales": 3,
 
32
  "use_gating": true,
33
  "use_gradient_checkpointing": false,
34
  "use_multi_scale_memory": true,
 
35
  "use_rotary_embeddings": true,
36
  "vocab_size": 151669
37
  }
38
+