ronx-labs commited on
Commit
fe8070f
·
verified ·
1 Parent(s): 856dc9d

Add files using upload-large-folder tool

Browse files
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. README.md +63 -0
  3. chat_template.jinja +118 -0
  4. config.json +76 -0
  5. generation_config.json +14 -0
  6. model-00001-of-00046.safetensors +3 -0
  7. model-00002-of-00046.safetensors +3 -0
  8. model-00003-of-00046.safetensors +3 -0
  9. model-00004-of-00046.safetensors +3 -0
  10. model-00005-of-00046.safetensors +3 -0
  11. model-00006-of-00046.safetensors +3 -0
  12. model-00007-of-00046.safetensors +3 -0
  13. model-00008-of-00046.safetensors +3 -0
  14. model-00009-of-00046.safetensors +3 -0
  15. model-00010-of-00046.safetensors +3 -0
  16. model-00011-of-00046.safetensors +3 -0
  17. model-00012-of-00046.safetensors +3 -0
  18. model-00013-of-00046.safetensors +3 -0
  19. model-00014-of-00046.safetensors +3 -0
  20. model-00015-of-00046.safetensors +3 -0
  21. model-00017-of-00046.safetensors +3 -0
  22. model-00018-of-00046.safetensors +3 -0
  23. model-00019-of-00046.safetensors +3 -0
  24. model-00020-of-00046.safetensors +3 -0
  25. model-00021-of-00046.safetensors +3 -0
  26. model-00022-of-00046.safetensors +3 -0
  27. model-00023-of-00046.safetensors +3 -0
  28. model-00025-of-00046.safetensors +3 -0
  29. model-00028-of-00046.safetensors +3 -0
  30. model-00029-of-00046.safetensors +3 -0
  31. model-00031-of-00046.safetensors +3 -0
  32. model-00033-of-00046.safetensors +3 -0
  33. model-00034-of-00046.safetensors +3 -0
  34. model-00035-of-00046.safetensors +3 -0
  35. model-00036-of-00046.safetensors +3 -0
  36. model-00037-of-00046.safetensors +3 -0
  37. model-00038-of-00046.safetensors +3 -0
  38. model-00039-of-00046.safetensors +3 -0
  39. model-00040-of-00046.safetensors +3 -0
  40. model-00041-of-00046.safetensors +3 -0
  41. model-00042-of-00046.safetensors +3 -0
  42. model-00043-of-00046.safetensors +3 -0
  43. model-00044-of-00046.safetensors +3 -0
  44. model-00045-of-00046.safetensors +3 -0
  45. model-00046-of-00046.safetensors +3 -0
  46. model.safetensors.index.json +0 -0
  47. preprocessor_config.json +11 -0
  48. tokenizer.json +3 -0
  49. tokenizer_config.json +325 -0
  50. video_preprocessor_config.json +11 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ language:
4
+ - zh
5
+ - en
6
+ base_model:
7
+ - zai-org/GLM-4.5-Air-Base
8
+ pipeline_tag: image-text-to-text
9
+ library_name: transformers
10
+ ---
11
+
12
+ # GLM-4.5V
13
+
14
+ <div align="center">
15
+ <img src=https://raw.githubusercontent.com/zai-org/GLM-V/refs/heads/main/resources/logo.svg width="40%"/>
16
+ </div>
17
+ <p align="center">
18
+ 👋 Join our <a href="https://discord.com/invite/8cnQKdAprg" target="_blank">Discord</a> communities.
19
+ <br>
20
+ 📖 Check out the <a href="https://arxiv.org/abs/2507.01006" target="_blank">paper</a>.
21
+ <br>
22
+ 📍 Access the GLM-V series models via API on the <a href="https://docs.z.ai/guides/vlm/glm-4.5v">ZhipuAI Open Platform</a>.
23
+ </p>
24
+
25
+ ## Introduction
26
+
27
+ Vision-language models (VLMs) have become a key cornerstone of intelligent systems. As real-world AI tasks grow increasingly complex, VLMs urgently need to enhance reasoning capabilities beyond basic multimodal perception — improving accuracy, comprehensiveness, and intelligence — to enable complex problem solving, long-context understanding, and multimodal agents.
28
+
29
+ Through our open-source work, we aim to explore the technological frontier together with the community while empowering more developers to create exciting and innovative applications.
30
+
31
+ GLM-4.5V is based on ZhipuAI’s next-generation flagship text foundation model GLM-4.5-Air (106B parameters, 12B active). It continues the technical approach of GLM-4.1V-Thinking, achieving SOTA performance among models of the same scale on 42 public vision-language benchmarks. It covers common tasks such as image, video, and document understanding, as well as GUI agent operations.
32
+
33
+ ![bench_45](https://raw.githubusercontent.com/zai-org/GLM-V/refs/heads/main/resources/bench_45v.jpeg)
34
+
35
+ Beyond benchmark performance, GLM-4.5V focuses on real-world usability. Through efficient hybrid training, it can handle diverse types of visual content, enabling full-spectrum vision reasoning, including:
36
+ - **Image reasoning** (scene understanding, complex multi-image analysis, spatial recognition)
37
+ - **Video understanding** (long video segmentation and event recognition)
38
+ - **GUI tasks** (screen reading, icon recognition, desktop operation assistance)
39
+ - **Complex chart & long document parsing** (research report analysis, information extraction)
40
+ - **Grounding** (precise visual element localization)
41
+
42
+ The model also introduces a **Thinking Mode** switch, allowing users to balance between quick responses and deep reasoning. This switch works the same as in the `GLM-4.5` language model.
43
+
44
+ ## Quick Start
45
+
46
+ For more code information, please visit our [GitHub](https://github.com/zai-org/GLM-V/).
47
+
48
+ ## Citation
49
+
50
+ If you use this model, please cite the following paper:
51
+
52
+ ```bibtex
53
+ @misc{glmvteam2025glm41vthinkingversatilemultimodalreasoning,
54
+ title={GLM-4.1V-Thinking: Towards Versatile Multimodal Reasoning with Scalable Reinforcement Learning},
55
+ author={GLM-V Team and Wenyi Hong and Wenmeng Yu and Xiaotao Gu and Guo Wang and Guobing Gan and Haomiao Tang and Jiale Cheng and Ji Qi and Junhui Ji and Lihang Pan and Shuaiqi Duan and Weihan Wang and Yan Wang and Yean Cheng and Zehai He and Zhe Su and Zhen Yang and Ziyang Pan and Aohan Zeng and Baoxu Wang and Boyan Shi and Changyu Pang and Chenhui Zhang and Da Yin and Fan Yang and Guoqing Chen and Jiazheng Xu and Jiali Chen and Jing Chen and Jinhao Chen and Jinghao Lin and Jinjiang Wang and Junjie Chen and Leqi Lei and Letian Gong and Leyi Pan and Mingzhi Zhang and Qinkai Zheng and Sheng Yang and Shi Zhong and Shiyu Huang and Shuyuan Zhao and Siyan Xue and Shangqin Tu and Shengbiao Meng and Tianshu Zhang and Tianwei Luo and Tianxiang Hao and Wenkai Li and Wei Jia and Xin Lyu and Xuancheng Huang and Yanling Wang and Yadong Xue and Yanfeng Wang and Yifan An and Yifan Du and Yiming Shi and Yiheng Huang and Yilin Niu and Yuan Wang and Yuanchang Yue and Yuchen Li and Yutao Zhang and Yuxuan Zhang and Zhanxiao Du and Zhenyu Hou and Zhao Xue and Zhengxiao Du and Zihan Wang and Peng Zhang and Debing Liu and Bin Xu and Juanzi Li and Minlie Huang and Yuxiao Dong and Jie Tang},
56
+ year={2025},
57
+ eprint={2507.01006},
58
+ archivePrefix={arXiv},
59
+ primaryClass={cs.CV},
60
+ url={https://arxiv.org/abs/2507.01006},
61
+ }
62
+ ```
63
+
chat_template.jinja ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [gMASK]<sop>
2
+ {%- if tools -%}
3
+ <|system|>
4
+ # Tools
5
+
6
+ You may call one or more functions to assist with the user query.
7
+
8
+ You are provided with function signatures within <tools></tools> XML tags:
9
+ <tools>
10
+ {% for tool in tools %}
11
+ {{ tool | tojson(ensure_ascii=False) }}
12
+ {% endfor %}
13
+ </tools>
14
+
15
+ For each function call, output the function name and arguments within the following XML format:
16
+ <tool_call>{function-name}
17
+ <arg_key>{arg-key-1}</arg_key>
18
+ <arg_value>{arg-value-1}</arg_value>
19
+ <arg_key>{arg-key-2}</arg_key>
20
+ <arg_value>{arg-value-2}</arg_value>
21
+ ...
22
+ </tool_call>{%- endif -%}
23
+ {%- macro visible_text(content) -%}
24
+ {%- if content is string -%}
25
+ {{- content }}
26
+ {%- elif content is iterable and content is not mapping -%}
27
+ {%- for item in content -%}
28
+ {%- if item is mapping and item.type == 'text' -%}
29
+ {{- item.text }}
30
+ {%- elif item is mapping and (item.type == 'image' or 'image' in item) -%}
31
+ <|begin_of_image|><|image|><|end_of_image|>
32
+ {%- elif item is mapping and (item.type == 'video' or 'video' in item) -%}
33
+ <|begin_of_video|><|video|><|end_of_video|>
34
+ {%- elif item is string -%}
35
+ {{- item }}
36
+ {%- endif -%}
37
+ {%- endfor -%}
38
+ {%- else -%}
39
+ {{- content }}
40
+ {%- endif -%}
41
+ {%- endmacro -%}
42
+ {%- set ns = namespace(last_user_index=-1) %}
43
+ {%- for m in messages %}
44
+ {%- if m.role == 'user' %}
45
+ {% set ns.last_user_index = loop.index0 -%}
46
+ {%- endif %}
47
+ {%- endfor %}
48
+ {% for m in messages %}
49
+ {%- if m.role == 'user' -%}<|user|>
50
+ {% if m.content is string %}
51
+ {{ m.content }}
52
+ {%- else %}
53
+ {%- for item in m.content %}
54
+ {% if item.type == 'video' or 'video' in item %}
55
+ <|begin_of_video|><|video|><|end_of_video|>{% elif item.type == 'image' or 'image' in item %}
56
+ <|begin_of_image|><|image|><|end_of_image|>{% elif item.type == 'text' %}
57
+ {{ item.text }}
58
+ {%- endif %}
59
+ {%- endfor %}
60
+ {%- endif %}
61
+ {{- '/nothink' if (enable_thinking is defined and not enable_thinking and not visible_text(m.content).endswith("/nothink")) else '' -}}
62
+ {%- elif m.role == 'assistant' -%}
63
+ <|assistant|>
64
+ {%- set reasoning_content = '' %}
65
+ {%- set content = visible_text(m.content) %}
66
+ {%- if m.reasoning_content is string %}
67
+ {%- set reasoning_content = m.reasoning_content %}
68
+ {%- else %}
69
+ {%- if '</think>' in content %}
70
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
71
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
72
+ {%- endif %}
73
+ {%- endif %}
74
+ {%- if loop.index0 > ns.last_user_index and reasoning_content -%}
75
+ {{ '\n<think>' + reasoning_content.strip() + '</think>'}}
76
+ {%- else -%}
77
+ {{ '\n<think></think>' }}
78
+ {%- endif -%}
79
+ {%- if content.strip() -%}
80
+ {{ '\n' + content.strip() }}
81
+ {%- endif -%}
82
+ {% if m.tool_calls %}
83
+ {% for tc in m.tool_calls %}
84
+ {%- if tc.function %}
85
+ {%- set tc = tc.function %}
86
+ {%- endif %}
87
+ {{ '\n<tool_call>' + tc.name }}
88
+ {% set _args = tc.arguments %}
89
+ {% for k, v in _args.items() %}
90
+ <arg_key>{{ k }}</arg_key>
91
+ <arg_value>{{ v | tojson(ensure_ascii=False) if v is not string else v }}</arg_value>
92
+ {% endfor %}
93
+ </tool_call>{% endfor %}
94
+ {% endif %}
95
+ {%- elif m.role == 'tool' -%}
96
+ {%- if m.content is string -%}
97
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
98
+ {{- '<|observation|>' }}
99
+ {%- endif %}
100
+ {{- '\n<tool_response>\n' }}
101
+ {{- m.content }}
102
+ {{- '\n</tool_response>' }}
103
+ {%- else -%}
104
+ <|observation|>{% for tr in m.content %}
105
+
106
+ <tool_response>
107
+ {{ tr.output if tr.output is defined else tr }}
108
+ </tool_response>{% endfor -%}
109
+ {% endif -%}
110
+ {%- elif m.role == 'system' -%}
111
+ <|system|>
112
+ {{ visible_text(m.content) }}
113
+ {%- endif -%}
114
+ {%- endfor -%}
115
+ {%- if add_generation_prompt -%}
116
+ <|assistant|>
117
+ {{'<think></think>\n' if (enable_thinking is defined and not enable_thinking) else ''}}
118
+ {%- endif -%}
config.json ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Glm4vMoeForConditionalGeneration"
4
+ ],
5
+ "model_type": "glm4v_moe",
6
+ "text_config": {
7
+ "pad_token_id": 151329,
8
+ "vocab_size": 151552,
9
+ "eos_token_id": [
10
+ 151329,
11
+ 151336,
12
+ 151338
13
+ ],
14
+ "image_end_token_id": 151340,
15
+ "image_start_token_id": 151339,
16
+ "image_token_id": 151363,
17
+ "head_dim": 128,
18
+ "attention_bias": true,
19
+ "attention_dropout": 0.0,
20
+ "first_k_dense_replace": 1,
21
+ "hidden_act": "silu",
22
+ "hidden_size": 4096,
23
+ "initializer_range": 0.02,
24
+ "intermediate_size": 10944,
25
+ "max_position_embeddings": 65536,
26
+ "model_type": "glm4v_moe_text",
27
+ "moe_intermediate_size": 1408,
28
+ "n_group": 1,
29
+ "n_routed_experts": 128,
30
+ "n_shared_experts": 1,
31
+ "norm_topk_prob": true,
32
+ "num_attention_heads": 96,
33
+ "num_experts_per_tok": 8,
34
+ "num_hidden_layers": 46,
35
+ "num_key_value_heads": 8,
36
+ "partial_rotary_factor": 0.5,
37
+ "rms_norm_eps": 1e-05,
38
+ "torch_dtype": "bfloat16",
39
+ "rope_scaling": {
40
+ "rope_type": "default",
41
+ "mrope_section": [
42
+ 8,
43
+ 12,
44
+ 12
45
+ ]
46
+ },
47
+ "rope_theta": 10000.0,
48
+ "routed_scaling_factor": 1.0,
49
+ "topk_group": 1,
50
+ "use_cache": true,
51
+ "use_qk_norm": false
52
+ },
53
+ "torch_dtype": "bfloat16",
54
+ "transformers_version": "4.55.0.dev0",
55
+ "video_end_token_id": 151342,
56
+ "video_start_token_id": 151341,
57
+ "video_token_id": 151364,
58
+ "vision_config": {
59
+ "attention_bias": false,
60
+ "attention_dropout": 0.0,
61
+ "depth": 24,
62
+ "hidden_act": "silu",
63
+ "hidden_size": 1536,
64
+ "image_size": 336,
65
+ "in_channels": 3,
66
+ "initializer_range": 0.02,
67
+ "intermediate_size": 10944,
68
+ "model_type": "glm4v_moe",
69
+ "num_heads": 12,
70
+ "out_hidden_size": 4096,
71
+ "patch_size": 14,
72
+ "rms_norm_eps": 1e-05,
73
+ "spatial_merge_size": 2,
74
+ "temporal_patch_size": 2
75
+ }
76
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151329,
6
+ 151336,
7
+ 151338
8
+ ],
9
+ "pad_token_id": 151329,
10
+ "temperature": 1.0,
11
+ "top_k": 1,
12
+ "top_p": 0.0001,
13
+ "transformers_version": "4.55.0.dev"
14
+ }
model-00001-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:549bacfeaa020f0a74cbaa4d8179a629ca2f02d30ce68a227a9fafeb14d157db
3
+ size 5170151456
model-00002-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c8e9b7f3e6e95fe04016fe97d1c71f64587fd6d77f97900997d437754b2693e
3
+ size 4683041216
model-00003-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35ad4d6592dce9410b2595b985bc28a701e700b22c3dad3de1f1815eb246ac5c
3
+ size 4683041216
model-00004-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cb2c87a0fafb33ef32a2cfed51f6272d9bcaf720732fd8cb4faa18caf930709
3
+ size 4683041216
model-00005-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bd6a184c20c39e3491d064a59641ddbf6712afe9110b97838ddc60cb70085e2
3
+ size 4683041216
model-00006-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45eeff6cfb262878bef8b7d8860e728c86d40ed1b2df4e697733b1c9a8731ab4
3
+ size 4683041216
model-00007-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02c9069ddcd9d7cdb16a90ad54f27d93283c4714846d4a154d82f2a547882183
3
+ size 4683041216
model-00008-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a973abb0f32501c3bf0c4237f18317dfa1a4e4b2d82d030843cfa04d2f8d76c6
3
+ size 4683041216
model-00009-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7f1232d934e84cafdaf8393e2aa546c405c0dd23ba46ceb4927060b9d5ad6c0
3
+ size 4683041216
model-00010-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a1eefe3bf9c70c68e92b62efa0aede3a734cf593535295679c39992e73d06a
3
+ size 4683041616
model-00011-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37180e0abad54a244ad54947d51274f92fb35c4fb64baaa2e4dc5640bee9a63c
3
+ size 4683041616
model-00012-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dc41b9b8908bf6ad06c496370a6bbad9616183ff8a8424d96debaa970f2cb2c
3
+ size 4683041616
model-00013-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc9aabc30644f97482919e24e00c89ff19c47b46b83063bc6e8ba26b3ede811
3
+ size 4683041616
model-00014-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:018bf381229f6138fcb57a96536130907360000df2b8198730d815ebd33bddac
3
+ size 4683041616
model-00015-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90610e7a10f28f00fc90d14d03cbea52e588f3cd2975949a31a1f326ae5d25fe
3
+ size 4683041616
model-00017-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95f11f3943347740912601957978e054cee2ae121abe2518be6f4c5e34424e14
3
+ size 4683041616
model-00018-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9970c0afcf9a9f2fc1831409879fee3ab757f02e34b83a6fbc0279f48a71831e
3
+ size 4683041616
model-00019-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e29a12490be4dba8938372d5ed492978d35953ed9a4ce589ae6219c8b6e282bc
3
+ size 4683041616
model-00020-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ee6b6244991436d1b067d97055c63abc7afc28ad09ec3b0d7847b75608c4bf1
3
+ size 4683041616
model-00021-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f126362b9bd249e9919d3d96b684eec50a2bed051ecc122440c382ccc57e8dbc
3
+ size 4683041616
model-00022-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae4dba898b357f807c69b7cf8abc447c05404391dff6fec26366f545dc63210f
3
+ size 4683041616
model-00023-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:201eb981d300dcacd3704ea8930c5be5fdf958ff6e60553e77ed2ecfb23190f3
3
+ size 4683041616
model-00025-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec9ee3be9f078b0fdeb1b29508bd4af2102fc899384c30edc4b5518e8e404acc
3
+ size 4683041616
model-00028-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:221d62d06cd6240a0e610672bea0ba5e11cd75348da3f88494e0bff8d6877483
3
+ size 4683041616
model-00029-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:417fcc09f7a1f5754e50529eba378416885c6e6906f8c1517803295787bccc68
3
+ size 4683041616
model-00031-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50e12aa40bc9c6c9831e490455cc199cf2a1c5df9fd51d1abaad1df9473bc6eb
3
+ size 4683041616
model-00033-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17f070f529a9855989acba99eaf5b2d1bc4895739f3a72673db74443a5cb1e29
3
+ size 4683041616
model-00034-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9a2984ac840c3c1ed1d63188dfdc9559c14436427607b6e5324d4a307b2f743
3
+ size 4683041616
model-00035-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6269d9874e3bc340a2dfd5e0589bbca585d6b5b5177f4775c2357e620dabd72b
3
+ size 4683041616
model-00036-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9873d75be9e8b38152f1602c10153d309ca312c51b1378e3372175145f9c7da3
3
+ size 4683041616
model-00037-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd4e4cf2af244bc69f8de5888da82e4ede4dd0386307a9a3e81ddff96361bf6c
3
+ size 4683041616
model-00038-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:680c71e7a39c9e6380e4c8ca6d1fa9cbcf6e0c6d60028a518be14fa878a605ec
3
+ size 4683041616
model-00039-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f72a83a54f976088856bf8f5c8a8029e4e012b1836604ab8ca0f5d8e41246aa
3
+ size 4683041616
model-00040-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ed932cfd371d50628c12895ce5cd49e9b27fc32ab8dbd61864783927cf8c677
3
+ size 4683041616
model-00041-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3848ce53ca69179052e972a421e16c105d41ce0088d6aa307e34221d5b028bc3
3
+ size 4683041616
model-00042-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6765bda5e7f2c01db0968c86ef6613f8787783f2ce737f5b2785c0fa056549d1
3
+ size 4683041616
model-00043-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6672c4756a3b459a4d0a2a160f90697839b9ed250039f0ed997d529b1e270461
3
+ size 4683041616
model-00044-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f835737cd4c1da8a5fe0250a9e9c7b8f8b564a7b878bf7ff73b634f232b9b069
3
+ size 4683041616
model-00045-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9340f22a9466f65b9b42b37235d0d4ea8ea4bc582615769d3b341a3ec9ffd69e
3
+ size 5362602328
model-00046-of-00046.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e42f8c540b7eaa349cc6dac69d9c066bc863328867c3291e1f47af20781b3f96
3
+ size 3520860544
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "size": {"shortest_edge": 12544, "longest_edge": 9633792},
3
+ "do_rescale": true,
4
+ "patch_size": 14,
5
+ "temporal_patch_size": 2,
6
+ "merge_size": 2,
7
+ "image_mean": [0.48145466, 0.4578275, 0.40821073],
8
+ "image_std": [0.26862954, 0.26130258, 0.27577711],
9
+ "image_processor_type": "Glm4vImageProcessor",
10
+ "processor_class": "Glm4vProcessor"
11
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9340665016419c825c4bdabbcc9acc43b7ca2c68ce142724afa829abb1be5efd
3
+ size 19970699
tokenizer_config.json ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "151329": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "151330": {
12
+ "content": "[MASK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "151331": {
20
+ "content": "[gMASK]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "151332": {
28
+ "content": "[sMASK]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "151333": {
36
+ "content": "<sop>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "151334": {
44
+ "content": "<eop>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "151335": {
52
+ "content": "<|system|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "151336": {
60
+ "content": "<|user|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "151337": {
68
+ "content": "<|assistant|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "151338": {
76
+ "content": "<|observation|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "151339": {
84
+ "content": "<|begin_of_image|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "151340": {
92
+ "content": "<|end_of_image|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "151341": {
100
+ "content": "<|begin_of_video|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "151342": {
108
+ "content": "<|end_of_video|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "151343": {
116
+ "content": "<|begin_of_audio|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "151344": {
124
+ "content": "<|end_of_audio|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "151345": {
132
+ "content": "<|begin_of_transcription|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "151346": {
140
+ "content": "<|end_of_transcription|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "151347": {
148
+ "content": "<|code_prefix|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "151348": {
156
+ "content": "<|code_middle|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "151349": {
164
+ "content": "<|code_suffix|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "151350": {
172
+ "content": "<think>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": false
178
+ },
179
+ "151351": {
180
+ "content": "</think>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": false
186
+ },
187
+ "151352": {
188
+ "content": "<tool_call>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": false
194
+ },
195
+ "151353": {
196
+ "content": "</tool_call>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": false
202
+ },
203
+ "151354": {
204
+ "content": "<tool_response>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": false
210
+ },
211
+ "151355": {
212
+ "content": "</tool_response>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": false
218
+ },
219
+ "151356": {
220
+ "content": "<arg_key>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": false
226
+ },
227
+ "151357": {
228
+ "content": "</arg_key>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": false
234
+ },
235
+ "151358": {
236
+ "content": "<arg_value>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": false
242
+ },
243
+ "151359": {
244
+ "content": "</arg_value>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": false
250
+ },
251
+ "151360": {
252
+ "content": "/nothink",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "151361": {
260
+ "content": "<|begin_of_box|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": false
266
+ },
267
+ "151362": {
268
+ "content": "<|end_of_box|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": false
274
+ },
275
+ "151363": {
276
+ "content": "<|image|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": false
282
+ },
283
+ "151364": {
284
+ "content": "<|video|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": false
290
+ }
291
+ },
292
+ "additional_special_tokens": [
293
+ "<|endoftext|>",
294
+ "[MASK]",
295
+ "[gMASK]",
296
+ "[sMASK]",
297
+ "<sop>",
298
+ "<eop>",
299
+ "<|system|>",
300
+ "<|user|>",
301
+ "<|assistant|>",
302
+ "<|observation|>",
303
+ "<|begin_of_image|>",
304
+ "<|end_of_image|>",
305
+ "<|begin_of_video|>",
306
+ "<|end_of_video|>",
307
+ "<|begin_of_audio|>",
308
+ "<|end_of_audio|>",
309
+ "<|begin_of_transcription|>",
310
+ "<|end_of_transcription|>",
311
+ "<|code_prefix|>",
312
+ "<|code_middle|>",
313
+ "<|code_suffix|>",
314
+ "/nothink"
315
+ ],
316
+ "clean_up_tokenization_spaces": false,
317
+ "do_lower_case": false,
318
+ "eos_token": "<|endoftext|>",
319
+ "extra_special_tokens": {},
320
+ "model_max_length": 128000,
321
+ "pad_token": "<|endoftext|>",
322
+ "padding_side": "left",
323
+ "remove_space": false,
324
+ "tokenizer_class": "PreTrainedTokenizer"
325
+ }
video_preprocessor_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "size": {"shortest_edge": 12544, "longest_edge": 47040000},
3
+ "do_rescale": true,
4
+ "patch_size": 14,
5
+ "temporal_patch_size": 2,
6
+ "merge_size": 2,
7
+ "image_mean": [0.48145466, 0.4578275, 0.40821073],
8
+ "image_std": [0.26862954, 0.26130258, 0.27577711],
9
+ "video_processor_type": "Glm4vVideoProcessor",
10
+ "processor_class": "Glm4vProcessor"
11
+ }