Xenova HF Staff commited on
Commit
cab364e
·
verified ·
1 Parent(s): f2956e8

Upload optimized ONNX model (#3)

Browse files

- Upload optimized ONNX model (9cf7801d45a15fb6191945a023478ee51d6d6f26)

.gitattributes CHANGED
@@ -41,3 +41,15 @@ onnx/model_q4.onnx_data filter=lfs diff=lfs merge=lfs -text
41
  onnx/model_q4f16.onnx_data filter=lfs diff=lfs merge=lfs -text
42
  onnx/model_quantized.onnx_data filter=lfs diff=lfs merge=lfs -text
43
  onnx/model_uint8.onnx_data filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  onnx/model_q4f16.onnx_data filter=lfs diff=lfs merge=lfs -text
42
  onnx/model_quantized.onnx_data filter=lfs diff=lfs merge=lfs -text
43
  onnx/model_uint8.onnx_data filter=lfs diff=lfs merge=lfs -text
44
+ onnx/model.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
45
+ onnx/model.onnx_data_2 filter=lfs diff=lfs merge=lfs -text
46
+ onnx/model.onnx_data_3 filter=lfs diff=lfs merge=lfs -text
47
+ onnx/model.onnx_data_4 filter=lfs diff=lfs merge=lfs -text
48
+ onnx/model.onnx_data_5 filter=lfs diff=lfs merge=lfs -text
49
+ onnx/model.onnx_data_6 filter=lfs diff=lfs merge=lfs -text
50
+ onnx/model_fp16.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
51
+ onnx/model_fp16.onnx_data_2 filter=lfs diff=lfs merge=lfs -text
52
+ onnx/model_fp16.onnx_data_3 filter=lfs diff=lfs merge=lfs -text
53
+ onnx/model_q4.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
54
+ onnx/model_q4f16.onnx_data_1 filter=lfs diff=lfs merge=lfs -text
55
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "nltpt/Llama-3.2-3B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
 
9
  "eos_token_id": [
10
  128001,
11
  128008,
@@ -24,28 +24,29 @@
24
  "num_key_value_heads": 8,
25
  "pretraining_tp": 1,
26
  "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
  "factor": 32.0,
29
  "high_freq_factor": 4.0,
30
  "low_freq_factor": 1.0,
31
  "original_max_position_embeddings": 8192,
 
32
  "rope_type": "llama3"
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": true,
36
- "transformers_version": "4.43.4",
37
  "use_cache": true,
38
  "vocab_size": 128256,
39
  "transformers.js_config": {
40
  "use_external_data_format": {
41
- "model.onnx": true,
42
- "model_bnb4.onnx": true,
43
- "model_fp16.onnx": true,
44
- "model_int8.onnx": true,
45
- "model_q4.onnx": true,
46
- "model_q4f16.onnx": true,
47
- "model_quantized.onnx": true,
48
- "model_uint8.onnx": true
49
  }
50
  }
51
  }
 
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
24
  "num_key_value_heads": 8,
25
  "pretraining_tp": 1,
26
  "rms_norm_eps": 1e-05,
27
+ "rope_parameters": {
28
  "factor": 32.0,
29
  "high_freq_factor": 4.0,
30
  "low_freq_factor": 1.0,
31
  "original_max_position_embeddings": 8192,
32
+ "rope_theta": 500000.0,
33
  "rope_type": "llama3"
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": true,
37
+ "transformers_version": "5.0.0.dev0",
38
  "use_cache": true,
39
  "vocab_size": 128256,
40
  "transformers.js_config": {
41
  "use_external_data_format": {
42
+ "model.onnx": 7,
43
+ "model_fp16.onnx": 4,
44
+ "model_q4.onnx": 2,
45
+ "model_q4f16.onnx": 2
46
+ },
47
+ "kv_cache_dtype": {
48
+ "q4f16": "float16",
49
+ "fp16": "float16"
50
  }
51
  }
52
  }
generation_config.json CHANGED
@@ -8,5 +8,6 @@
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.43.4"
 
12
  }
 
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "5.0.0.dev0",
12
+ "trust_remote_code": false
13
  }
onnx/model.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2031434d0666f7dae5f17624e0b356eef1afe9a9292134ccc072f7058bec7da1
3
- size 907316
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12257b4dd97dd44a429f80927e5f7ecf73a4e8286f6ba160c8695dcfdb923af6
3
+ size 187026
onnx/model.onnx_data CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:655e8a3902336ecdbf29016cb53111208eefa7bab1dc039cbeca5f6ff06fb594
3
- size 12850999296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b52e1c59497bfe9fa9a9e7af77d4e6b8f3bdc842811120681601a02d768e28a
3
+ size 2096140288
onnx/model.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d710350eef90de5e258a8b69f17a32274e91ad74334ab5321c5e3f8aeae852b4
3
+ size 2063732736
onnx/model.onnx_data_2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63ff2d850b3a545cda8caab0442f6e5f8c91469e13ec0e79d065c3d9502d6cdc
3
+ size 2013388800
onnx/model.onnx_data_3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65f239149550108e98797b3e58df16492d513a6759edd10cc4ba576f27774a2b
3
+ size 2013388800
onnx/model.onnx_data_4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1750167903456855f1dc0eae90fa7879207829bdbe1946beda841a9a962daaa2
3
+ size 2013388800
onnx/model.onnx_data_5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8716799b7471afa5915e419c8dca7598000aea3dcac50dd8ac3cb8c052108f47
3
+ size 2013388800
onnx/model.onnx_data_6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e078a90cf8cb624e1a68a14a995553e78c3357cd58266b6bc53973b35d197e4b
3
+ size 704679936
onnx/model_fp16.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12a4a9e5a004af928bef05e0341d28686a14e2fac77b9301a554848057884d54
3
- size 922260
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e015a9818899a852febc9c6892f92a9f748eab93be1546c040821c700330374d
3
+ size 188347
onnx/model_fp16.onnx_data CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f6b3794069ce717ce909de0e7008148ecf51983bc664bb7dc85be8272d8cca9c
3
- size 6425499648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d99b13254ee875a53602a4c68d4863cd884eba298ec99a54b17a5fbb3d6eca58
3
+ size 2079936512
onnx/model_fp16.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:329ca281650c53770534f34599de247facc76218b7d8cdd7bd1be0aec02839c7
3
+ size 2063720448
onnx/model_fp16.onnx_data_2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30d32902f02a8120f69fbbc09adb593949ae24d651750514e300520a1e0226ec
3
+ size 2063720448
onnx/model_fp16.onnx_data_3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f12db62ad73a49cbd3f4e016887d1074706cb641634984642bb904b2c083b2f
3
+ size 251676672
onnx/model_q4.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ec253e403912e9f3ff677e882e8fd39d30e4e8a1ff2d238f4850698e77d9c48
3
- size 950733
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06366bfcd5c1cc2580b2a602b3628b512d3c7404987508e3b4fb7550e820f0ba
3
+ size 259880
onnx/model_q4.onnx_data CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c9830cb8795f3cc5cc95ec9057afe4024e98ae2b3c2a153f1ad26ccd0275143
3
- size 3338317824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e8bde953a04cc6fd5d6a046205b97b6d69e556d34675592f87d17f176ea5245
3
+ size 2094051328
onnx/model_q4.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d8666d2aa4678fd8b6e427b11147792368bbac1bbcc28ecc454203e00d00509
3
+ size 1311375360
onnx/model_q4f16.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4abb0fc3af197c3b22b4c684b1ba3d1ba6d2aab5b416b1990ed707798e87b951
3
- size 965860
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43648be8ff45ed7bc75c75ea0d495beffa8a8632910e53d3aa824d6bfffaae46
3
+ size 260899
onnx/model_q4f16.onnx_data CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6805a8950564a96e9cc98c236a0597342747f339fd3ec5e88d621a1a13da4da7
3
- size 2373801984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0669c8c258ea5437b82cc17e5ca87bb91a9ede5b2f5ff80675c0b8e51f1b6043
3
+ size 2095929344
onnx/model_q4f16.onnx_data_1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63b1b82298ad66f940b4f918f81c386fbe4e15a4e178efb14bc558d127185113
3
+ size 311427072
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -2050,13 +2050,14 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
 
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
- "tokenizer_class": "PreTrainedTokenizerFast"
2062
- }
 
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|eot_id|>",
2055
+ "extra_special_tokens": {},
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
+ "tokenizer_class": "PreTrainedTokenizerFast",
2062
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- if strftime_now is defined %}\n {%- set date_string = strftime_now(\"%d %b %Y\") %}\n {%- else %}\n {%- set date_string = \"26 Jul 2024\" %}\n {%- endif %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {{- \"<|eot_id|>\" }}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n"
2063
+ }