woodchen7 commited on
Commit
6aac973
·
verified ·
1 Parent(s): 7790d8a

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +144 -0
config.json ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 3584,
10
+ "image_token_id": 151655,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 128000,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2_5_vl",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "quantization_config": {
20
+ "bits": 4,
21
+ "group_size": 128,
22
+ "modules_to_not_convert": [
23
+ "visual"
24
+ ],
25
+ "quant_method": "awq",
26
+ "version": "gemm",
27
+ "zero_point": true
28
+ },
29
+ "rms_norm_eps": 1e-06,
30
+ "rope_scaling": {
31
+ "mrope_section": [
32
+ 16,
33
+ 24,
34
+ 24
35
+ ],
36
+ "rope_type": "default",
37
+ "type": "default"
38
+ },
39
+ "rope_theta": 1000000.0,
40
+ "sliding_window": 32768,
41
+ "text_config": {
42
+ "architectures": [
43
+ "Qwen2_5_VLForConditionalGeneration"
44
+ ],
45
+ "attention_dropout": 0.0,
46
+ "bos_token_id": 151643,
47
+ "eos_token_id": 151645,
48
+ "hidden_act": "silu",
49
+ "hidden_size": 3584,
50
+ "image_token_id": null,
51
+ "initializer_range": 0.02,
52
+ "intermediate_size": 18944,
53
+ "layer_types": [
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention",
72
+ "full_attention",
73
+ "full_attention",
74
+ "full_attention",
75
+ "full_attention",
76
+ "full_attention",
77
+ "full_attention",
78
+ "full_attention",
79
+ "full_attention",
80
+ "full_attention",
81
+ "full_attention"
82
+ ],
83
+ "max_position_embeddings": 128000,
84
+ "max_window_layers": 28,
85
+ "model_type": "qwen2_5_vl_text",
86
+ "num_attention_heads": 28,
87
+ "num_hidden_layers": 28,
88
+ "num_key_value_heads": 4,
89
+ "rms_norm_eps": 1e-06,
90
+ "rope_scaling": {
91
+ "mrope_section": [
92
+ 16,
93
+ 24,
94
+ 24
95
+ ],
96
+ "rope_type": "default",
97
+ "type": "default"
98
+ },
99
+ "rope_theta": 1000000.0,
100
+ "sliding_window": null,
101
+ "torch_dtype": "bfloat16",
102
+ "use_cache": true,
103
+ "use_sliding_window": false,
104
+ "video_token_id": null,
105
+ "vision_end_token_id": 151653,
106
+ "vision_start_token_id": 151652,
107
+ "vision_token_id": 151654,
108
+ "vocab_size": 152064
109
+ },
110
+ "tie_word_embeddings": false,
111
+ "torch_dtype": "float16",
112
+ "transformers_version": "4.55.0.dev0",
113
+ "use_cache": false,
114
+ "use_sliding_window": false,
115
+ "video_token_id": 151656,
116
+ "vision_config": {
117
+ "depth": 32,
118
+ "fullatt_block_indexes": [
119
+ 7,
120
+ 15,
121
+ 23,
122
+ 31
123
+ ],
124
+ "hidden_act": "silu",
125
+ "hidden_size": 1280,
126
+ "in_channels": 3,
127
+ "in_chans": 3,
128
+ "initializer_range": 0.02,
129
+ "intermediate_size": 3420,
130
+ "model_type": "qwen2_5_vl",
131
+ "num_heads": 16,
132
+ "out_hidden_size": 3584,
133
+ "patch_size": 14,
134
+ "spatial_merge_size": 2,
135
+ "spatial_patch_size": 14,
136
+ "temporal_patch_size": 2,
137
+ "tokens_per_second": 2,
138
+ "window_size": 112
139
+ },
140
+ "vision_end_token_id": 151653,
141
+ "vision_start_token_id": 151652,
142
+ "vision_token_id": 151654,
143
+ "vocab_size": 152064
144
+ }