eyad-silx commited on
Commit
d5cfeec
·
verified ·
1 Parent(s): 038bacd

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -10
config.json CHANGED
@@ -1,16 +1,13 @@
1
-
2
  {
3
- "architectures": [
4
  "InfinityFormerForCausalLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "auto_map": {
8
- "AutoConfig": "silx-ai/QuasarV4-Tiny--configuration_quasrav4.QuasraV4Config",
9
- "AutoModelForCausalLM": "silx-ai/QuasarV4-Tiny--modeling_quasrav4.QuasraV4ForCausalLM"
10
  },
11
  "gate_init_bias": -2.0,
12
- "gradient_checkpointing_frequency": 1,
13
- "gradient_checkpointing_use_reentrant": true,
14
  "hidden_dropout_prob": 0.1,
15
  "hidden_size": 768,
16
  "initializer_range": 0.02,
@@ -21,18 +18,17 @@
21
  "max_position_embeddings": 812,
22
  "memory_compression_frequency": 100,
23
  "memory_compression_ratio": 0.5,
24
- "model_type": "quasarv4",
25
  "num_attention_heads": 12,
26
  "num_hidden_layers": 54,
27
  "num_memory_scales": 3,
 
28
  "rotary_embedding_base": 10000,
29
  "torch_dtype": "float32",
30
- "transformers_version": "4.52.4",
31
  "use_cache": true,
32
  "use_gating": true,
33
- "use_gradient_checkpointing": false,
34
  "use_multi_scale_memory": true,
35
  "use_rotary_embeddings": true,
36
  "vocab_size": 151669
37
  }
38
-
 
 
1
  {
2
+ "architectures": [
3
  "InfinityFormerForCausalLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "auto_map": {
7
+ "AutoConfig": "configuration_quasrav4.InfinityFormerConfig",
8
+ "AutoModelForCausalLM": "modeling_quasrav4.InfinityFormerForCausalLM"
9
  },
10
  "gate_init_bias": -2.0,
 
 
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "initializer_range": 0.02,
 
18
  "max_position_embeddings": 812,
19
  "memory_compression_frequency": 100,
20
  "memory_compression_ratio": 0.5,
21
+ "model_type": "infinity_former",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 54,
24
  "num_memory_scales": 3,
25
+ "pad_token_id": 0,
26
  "rotary_embedding_base": 10000,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.40.0",
29
  "use_cache": true,
30
  "use_gating": true,
 
31
  "use_multi_scale_memory": true,
32
  "use_rotary_embeddings": true,
33
  "vocab_size": 151669
34
  }