lightx2v commited on
Commit
2739d76
·
verified ·
1 Parent(s): 6abbc41

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +9 -0
  2. README.md +65 -0
  3. config.json +14 -0
  4. distill_fp8/Wan2.1_VAE.pth +3 -0
  5. distill_fp8/block_0.safetensors +3 -0
  6. distill_fp8/block_1.safetensors +3 -0
  7. distill_fp8/block_10.safetensors +3 -0
  8. distill_fp8/block_11.safetensors +3 -0
  9. distill_fp8/block_12.safetensors +3 -0
  10. distill_fp8/block_13.safetensors +3 -0
  11. distill_fp8/block_14.safetensors +3 -0
  12. distill_fp8/block_15.safetensors +3 -0
  13. distill_fp8/block_16.safetensors +3 -0
  14. distill_fp8/block_17.safetensors +3 -0
  15. distill_fp8/block_18.safetensors +3 -0
  16. distill_fp8/block_19.safetensors +3 -0
  17. distill_fp8/block_2.safetensors +3 -0
  18. distill_fp8/block_20.safetensors +3 -0
  19. distill_fp8/block_21.safetensors +3 -0
  20. distill_fp8/block_22.safetensors +3 -0
  21. distill_fp8/block_23.safetensors +3 -0
  22. distill_fp8/block_24.safetensors +3 -0
  23. distill_fp8/block_25.safetensors +3 -0
  24. distill_fp8/block_26.safetensors +3 -0
  25. distill_fp8/block_27.safetensors +3 -0
  26. distill_fp8/block_28.safetensors +3 -0
  27. distill_fp8/block_29.safetensors +3 -0
  28. distill_fp8/block_3.safetensors +3 -0
  29. distill_fp8/block_30.safetensors +3 -0
  30. distill_fp8/block_31.safetensors +3 -0
  31. distill_fp8/block_32.safetensors +3 -0
  32. distill_fp8/block_33.safetensors +3 -0
  33. distill_fp8/block_34.safetensors +3 -0
  34. distill_fp8/block_35.safetensors +3 -0
  35. distill_fp8/block_36.safetensors +3 -0
  36. distill_fp8/block_37.safetensors +3 -0
  37. distill_fp8/block_38.safetensors +3 -0
  38. distill_fp8/block_39.safetensors +3 -0
  39. distill_fp8/block_4.safetensors +3 -0
  40. distill_fp8/block_5.safetensors +3 -0
  41. distill_fp8/block_6.safetensors +3 -0
  42. distill_fp8/block_7.safetensors +3 -0
  43. distill_fp8/block_8.safetensors +3 -0
  44. distill_fp8/block_9.safetensors +3 -0
  45. distill_fp8/clip-fp8.pth +3 -0
  46. distill_fp8/config.json +14 -0
  47. distill_fp8/diffusion_pytorch_model.safetensors.index.json +0 -0
  48. distill_fp8/google/umt5-xxl/special_tokens_map.json +308 -0
  49. distill_fp8/google/umt5-xxl/spiece.model +3 -0
  50. distill_fp8/google/umt5-xxl/tokenizer.json +3 -0
.gitattributes CHANGED
@@ -33,3 +33,12 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ examples/i2v_input.JPG filter=lfs diff=lfs merge=lfs -text
37
+ xlm-roberta-large/tokenizer.json filter=lfs diff=lfs merge=lfs -text
38
+ google/umt5-xxl/tokenizer.json filter=lfs diff=lfs merge=lfs -text
39
+ distill_models/xlm-roberta-large/tokenizer.json filter=lfs diff=lfs merge=lfs -text
40
+ distill_models/umt5-xxl/tokenizer.json filter=lfs diff=lfs merge=lfs -text
41
+ distill_fp8/xlm-roberta-large/tokenizer.json filter=lfs diff=lfs merge=lfs -text
42
+ distill_fp8/google/umt5-xxl/tokenizer.json filter=lfs diff=lfs merge=lfs -text
43
+ distill_int8/xlm-roberta-large/tokenizer.json filter=lfs diff=lfs merge=lfs -text
44
+ distill_int8/google/umt5-xxl/tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ language:
4
+ - en
5
+ - zh
6
+ pipeline_tag: image-to-video
7
+ tags:
8
+ - video generation
9
+ library_name: diffusers
10
+ inference:
11
+ parameters:
12
+ num_inference_steps: 4
13
+ base_model:
14
+ - Wan-AI/Wan2.1-I2V-14B-480P
15
+ ---
16
+ # Wan2.1-I2V-14B-480P-StepDistill-CfgDistill-Lightx2v
17
+
18
+ <p align="center">
19
+ <img src="assets/img_lightx2v.png" width=75%/>
20
+ <p>
21
+
22
+ ## **⚠️Important Notice**
23
+
24
+ > **Please note:** The 720P distilled model = **720P original model** + (**480P step distillation** - **480P original model**), which means **we did not train a native 720P model**.
25
+ >
26
+ > **The LoRA in this repository is completely identical to the LoRA in the 480P repository.**
27
+
28
+ ## Overview
29
+
30
+ Wan2.1-I2V-14B-480P-StepDistill-CfgDistill-Lightx2v is an advanced image-to-video generation model built upon the Wan2.1-I2V-14B-480P foundation. This approach allows the model to generate videos with significantly fewer inference steps (4 steps) and without classifier-free guidance, substantially reducing video generation time while maintaining high quality outputs.
31
+
32
+ In this version, we added the following features:
33
+ 1. Trained with higher quality datasets for extended iterations.
34
+ 2. New fp8 and int8 quantized distillation models have been added, which enable fast inference using lightx2v on RTX 4060.
35
+
36
+
37
+ ## Training
38
+
39
+ Our training code is modified based on the [Self-Forcing](https://github.com/guandeh17/Self-Forcing) repository. We extended support for the Wan2.1-14B-I2V-480P model and performed a 4-step bidirectional distillation process. The modified code is available at [Self-Forcing-Plus](https://github.com/GoatWu/Self-Forcing-Plus).
40
+
41
+ ## Inference
42
+
43
+ Our inference framework utilizes [lightx2v](https://github.com/ModelTC/lightx2v), a highly efficient inference engine that supports multiple models. This framework significantly accelerates the video generation process while maintaining high quality output.
44
+
45
+ ```bash
46
+ bash scripts/wan/run_wan_i2v_distill_4step_cfg.sh
47
+ ```
48
+
49
+ or using the lora version:
50
+
51
+ ```bash
52
+ bash scripts/wan/run_wan_i2v_distill_4step_cfg_lora.sh
53
+ ```
54
+
55
+ We recommend using the **LCM scheduler** with the following settings:
56
+ - `shift=5.0`
57
+ - `guidance_scale=1.0 (i.e., without CFG)`
58
+
59
+ ## License Agreement
60
+ The models in this repository are licensed under the Apache 2.0 License. We claim no rights over the your generate contents, granting you the freedom to use them while ensuring that your usage complies with the provisions of this license. You are fully accountable for your use of the models, which must not involve sharing any content that violates applicable laws, causes harm to individuals or groups, disseminates personal information intended for harm, spreads misinformation, or targets vulnerable populations. For a complete list of restrictions and details regarding your rights, please refer to the full text of the [license](LICENSE.txt).
61
+
62
+
63
+ ## Acknowledgements
64
+
65
+ We would like to thank the contributors to the [Wan2.1](https://huggingface.co/Wan-AI/Wan2.1-T2V-14B), [Self-Forcing](https://huggingface.co/gdhe17/Self-Forcing/tree/main) repositories, for their open research.
config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "WanModel",
3
+ "_diffusers_version": "0.30.0",
4
+ "dim": 5120,
5
+ "eps": 1e-06,
6
+ "ffn_dim": 13824,
7
+ "freq_dim": 256,
8
+ "in_dim": 36,
9
+ "model_type": "i2v",
10
+ "num_heads": 40,
11
+ "num_layers": 40,
12
+ "out_dim": 16,
13
+ "text_len": 512
14
+ }
distill_fp8/Wan2.1_VAE.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38071ab59bd94681c686fa51d75a1968f64e470262043be31f7a094e442fd981
3
+ size 507609880
distill_fp8/block_0.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2123c99153790109fcd07cd72ae0adb1c473d26a8e9a4675a01e9a839b58de35
3
+ size 404393192
distill_fp8/block_1.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55b217db7dbfac00b467a48f3b9a298f1802491a7ff78e7041a7e47ec76d6c83
3
+ size 404393192
distill_fp8/block_10.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65721ed3fea6321431538db65b1dac0c8a579c5e3479f488b95f0d2eae70fd21
3
+ size 404393232
distill_fp8/block_11.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14d303eadb851b1568029f78f3f12d9fcd1bae606150cbca6406e9b142bd8e25
3
+ size 404393232
distill_fp8/block_12.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8125dc21c068342b51e594bdeb71de960a2b55a63ca54d3ee483462d6dad8fb
3
+ size 404393232
distill_fp8/block_13.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dc2b92a12e1b5ca72bb67dc146b53464256d846cafb9de2850e8d8c5a5d0de2
3
+ size 404393232
distill_fp8/block_14.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e73013fc60e2977a91a70abf04dc6ef9616100787b13c5fadb7ccfa278e1bda
3
+ size 404393232
distill_fp8/block_15.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76b533561c4fbbe11edc3115c2b7e7d196d8ee0311bc4dde82cb679af49cd36a
3
+ size 404393232
distill_fp8/block_16.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f9d0120e65d47c752fd6db3626099879bcb9a155a1865a94be2be21cde2bcb7
3
+ size 404393232
distill_fp8/block_17.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:976471f401a0ca67250078deec1021a8323b2d5663d11f38036e13c3e13c8b1d
3
+ size 404393232
distill_fp8/block_18.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:640c334c1fb28b1100ca2c650313a050ae37eb346395d40bc82dd6907b7d3d1c
3
+ size 404393232
distill_fp8/block_19.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a2f6d4e7ccd59582f1654c12f493c9e8692d76946e5e8bd33aa1ce317b9f424
3
+ size 404393232
distill_fp8/block_2.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d1c00e7abb7aac6ecce10e00d73cc3f7b2f4af720cf275777dd8dc1682bb136
3
+ size 404393192
distill_fp8/block_20.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad0abcf3f673fe9d34fc1089b685049c84e0e4e44b42071862044b6b1f921e13
3
+ size 404393232
distill_fp8/block_21.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b9008699dc6d7b80d0104bce70dafa60f8f622ab2194b2937ea8dba54dc0e3d
3
+ size 404393232
distill_fp8/block_22.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac4c246dfdc894b5b83881f661195ceed3480d112b69698808262cce232db6b3
3
+ size 404393232
distill_fp8/block_23.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e17898b66d3d377b1529fd85225310ce92bd498369fb13e3bde5323bd6857811
3
+ size 404393232
distill_fp8/block_24.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5ce7f7f8b814a5ae8ad8a585713892e0573951723d6345b4ff5ca73b36016ab
3
+ size 404393232
distill_fp8/block_25.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04eaea0915f70a84d7e1b244d4a2f48b2386b8d0f9631c7cd98f3e5b7273fa9d
3
+ size 404393232
distill_fp8/block_26.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:300bdef84a180a533f77e8f8ce69a6ed198ec8954f5c3ba90e77a6c0d5b8a86d
3
+ size 404393232
distill_fp8/block_27.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b5dc14d9883126872731151a52a8f095dea641c844df1427d1036b50b26ac7b
3
+ size 404393232
distill_fp8/block_28.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0089fd5ee46a0bcd5c1988b3d6f0048d256041c2307d0e0e590b7474ffcbcad0
3
+ size 404393232
distill_fp8/block_29.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e8fa1ac0513953055d5fc4016ad78000a83a24006fe23924a73d4a4af9c993b
3
+ size 404393232
distill_fp8/block_3.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7d47a678d06e2a9aeb9dae8c1f52b76e458023b2e6953be6207ed7cc6b03bce
3
+ size 404393192
distill_fp8/block_30.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38e47d4d8acaf0c656bfcf5bd4cfecdeef3812ab88171f483835c62ca7e8427c
3
+ size 404393232
distill_fp8/block_31.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7ba0dfb86de274c0e05aeca2fcda9a8de01f5a4c685d5590cdd2e09152e4528
3
+ size 404393232
distill_fp8/block_32.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e0c908aac89d97964558622c53ab3e37044c9a493285090d8c930a0868e28b5
3
+ size 404393232
distill_fp8/block_33.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3564de128ae9bc6e225152c78e5a1f401d14b8c2a3a8b6dd4b693962da13ceea
3
+ size 404393232
distill_fp8/block_34.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a82fcb1c479d602b5740e8dd9cf716fdbc7b793ce992f5f3d97d60e75a42ef61
3
+ size 404393232
distill_fp8/block_35.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7cef668410f942e6aa0c8eff4c8c657719e3e404623988c7e9cc9260f8a67ea
3
+ size 404393232
distill_fp8/block_36.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bed2641f67f2fc30b87ddba9dea76c6dc172ea88bf0cdfe9aa07ea533696c9d2
3
+ size 404393232
distill_fp8/block_37.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8658a869592967f68a5fca1b75e64576dd6ee9742d54f2fa16ddb9b6441ec97
3
+ size 404393232
distill_fp8/block_38.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e371b2fed25b9fc409b209ca93406ee4d998414342b4399062e4ddde1d9ffffb
3
+ size 404393232
distill_fp8/block_39.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95b1c34739871ad3b4c70a1ed9bb606da8de9bbe7a74e3e3d9b2d5d726d606da
3
+ size 404393232
distill_fp8/block_4.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c3b1565d375ed86be3ad5df39c3ce8b6577277254bae089f2906423b579c6b1
3
+ size 404393192
distill_fp8/block_5.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ced3f8d64997d1ba687361e3483cb12892532593023763a0a8972d18d9ce0d07
3
+ size 404393192
distill_fp8/block_6.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f96da2ac1aa8822c9d7ac3b1724a971ae53dc7a24869e5992d4ccaa5a80a8492
3
+ size 404393192
distill_fp8/block_7.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1afd0a1bd1de0c4cd0a8038449153160dcdbd4aab23daed2141f62b36ef9fd8f
3
+ size 404393192
distill_fp8/block_8.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff1d716da1f0d061c96c33d2c90e271d1b2e10498bd8daba164e88f4ccfc7727
3
+ size 404393192
distill_fp8/block_9.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1baa7734dc405f575c9ba330b1a38e4e75e8a1795604582cb0d5f2f6488a954f
3
+ size 404393192
distill_fp8/clip-fp8.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f20da116c0ee6b0aeb1e73033996efec8c8b51640ba9926fb51372a1b199d289
3
+ size 636647292
distill_fp8/config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "WanModel",
3
+ "_diffusers_version": "0.30.0",
4
+ "dim": 5120,
5
+ "eps": 1e-06,
6
+ "ffn_dim": 13824,
7
+ "freq_dim": 256,
8
+ "in_dim": 36,
9
+ "model_type": "i2v",
10
+ "num_heads": 40,
11
+ "num_layers": 40,
12
+ "out_dim": 16,
13
+ "text_len": 512
14
+ }
distill_fp8/diffusion_pytorch_model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
distill_fp8/google/umt5-xxl/special_tokens_map.json ADDED
@@ -0,0 +1,308 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>",
103
+ "<extra_id_100>",
104
+ "<extra_id_101>",
105
+ "<extra_id_102>",
106
+ "<extra_id_103>",
107
+ "<extra_id_104>",
108
+ "<extra_id_105>",
109
+ "<extra_id_106>",
110
+ "<extra_id_107>",
111
+ "<extra_id_108>",
112
+ "<extra_id_109>",
113
+ "<extra_id_110>",
114
+ "<extra_id_111>",
115
+ "<extra_id_112>",
116
+ "<extra_id_113>",
117
+ "<extra_id_114>",
118
+ "<extra_id_115>",
119
+ "<extra_id_116>",
120
+ "<extra_id_117>",
121
+ "<extra_id_118>",
122
+ "<extra_id_119>",
123
+ "<extra_id_120>",
124
+ "<extra_id_121>",
125
+ "<extra_id_122>",
126
+ "<extra_id_123>",
127
+ "<extra_id_124>",
128
+ "<extra_id_125>",
129
+ "<extra_id_126>",
130
+ "<extra_id_127>",
131
+ "<extra_id_128>",
132
+ "<extra_id_129>",
133
+ "<extra_id_130>",
134
+ "<extra_id_131>",
135
+ "<extra_id_132>",
136
+ "<extra_id_133>",
137
+ "<extra_id_134>",
138
+ "<extra_id_135>",
139
+ "<extra_id_136>",
140
+ "<extra_id_137>",
141
+ "<extra_id_138>",
142
+ "<extra_id_139>",
143
+ "<extra_id_140>",
144
+ "<extra_id_141>",
145
+ "<extra_id_142>",
146
+ "<extra_id_143>",
147
+ "<extra_id_144>",
148
+ "<extra_id_145>",
149
+ "<extra_id_146>",
150
+ "<extra_id_147>",
151
+ "<extra_id_148>",
152
+ "<extra_id_149>",
153
+ "<extra_id_150>",
154
+ "<extra_id_151>",
155
+ "<extra_id_152>",
156
+ "<extra_id_153>",
157
+ "<extra_id_154>",
158
+ "<extra_id_155>",
159
+ "<extra_id_156>",
160
+ "<extra_id_157>",
161
+ "<extra_id_158>",
162
+ "<extra_id_159>",
163
+ "<extra_id_160>",
164
+ "<extra_id_161>",
165
+ "<extra_id_162>",
166
+ "<extra_id_163>",
167
+ "<extra_id_164>",
168
+ "<extra_id_165>",
169
+ "<extra_id_166>",
170
+ "<extra_id_167>",
171
+ "<extra_id_168>",
172
+ "<extra_id_169>",
173
+ "<extra_id_170>",
174
+ "<extra_id_171>",
175
+ "<extra_id_172>",
176
+ "<extra_id_173>",
177
+ "<extra_id_174>",
178
+ "<extra_id_175>",
179
+ "<extra_id_176>",
180
+ "<extra_id_177>",
181
+ "<extra_id_178>",
182
+ "<extra_id_179>",
183
+ "<extra_id_180>",
184
+ "<extra_id_181>",
185
+ "<extra_id_182>",
186
+ "<extra_id_183>",
187
+ "<extra_id_184>",
188
+ "<extra_id_185>",
189
+ "<extra_id_186>",
190
+ "<extra_id_187>",
191
+ "<extra_id_188>",
192
+ "<extra_id_189>",
193
+ "<extra_id_190>",
194
+ "<extra_id_191>",
195
+ "<extra_id_192>",
196
+ "<extra_id_193>",
197
+ "<extra_id_194>",
198
+ "<extra_id_195>",
199
+ "<extra_id_196>",
200
+ "<extra_id_197>",
201
+ "<extra_id_198>",
202
+ "<extra_id_199>",
203
+ "<extra_id_200>",
204
+ "<extra_id_201>",
205
+ "<extra_id_202>",
206
+ "<extra_id_203>",
207
+ "<extra_id_204>",
208
+ "<extra_id_205>",
209
+ "<extra_id_206>",
210
+ "<extra_id_207>",
211
+ "<extra_id_208>",
212
+ "<extra_id_209>",
213
+ "<extra_id_210>",
214
+ "<extra_id_211>",
215
+ "<extra_id_212>",
216
+ "<extra_id_213>",
217
+ "<extra_id_214>",
218
+ "<extra_id_215>",
219
+ "<extra_id_216>",
220
+ "<extra_id_217>",
221
+ "<extra_id_218>",
222
+ "<extra_id_219>",
223
+ "<extra_id_220>",
224
+ "<extra_id_221>",
225
+ "<extra_id_222>",
226
+ "<extra_id_223>",
227
+ "<extra_id_224>",
228
+ "<extra_id_225>",
229
+ "<extra_id_226>",
230
+ "<extra_id_227>",
231
+ "<extra_id_228>",
232
+ "<extra_id_229>",
233
+ "<extra_id_230>",
234
+ "<extra_id_231>",
235
+ "<extra_id_232>",
236
+ "<extra_id_233>",
237
+ "<extra_id_234>",
238
+ "<extra_id_235>",
239
+ "<extra_id_236>",
240
+ "<extra_id_237>",
241
+ "<extra_id_238>",
242
+ "<extra_id_239>",
243
+ "<extra_id_240>",
244
+ "<extra_id_241>",
245
+ "<extra_id_242>",
246
+ "<extra_id_243>",
247
+ "<extra_id_244>",
248
+ "<extra_id_245>",
249
+ "<extra_id_246>",
250
+ "<extra_id_247>",
251
+ "<extra_id_248>",
252
+ "<extra_id_249>",
253
+ "<extra_id_250>",
254
+ "<extra_id_251>",
255
+ "<extra_id_252>",
256
+ "<extra_id_253>",
257
+ "<extra_id_254>",
258
+ "<extra_id_255>",
259
+ "<extra_id_256>",
260
+ "<extra_id_257>",
261
+ "<extra_id_258>",
262
+ "<extra_id_259>",
263
+ "<extra_id_260>",
264
+ "<extra_id_261>",
265
+ "<extra_id_262>",
266
+ "<extra_id_263>",
267
+ "<extra_id_264>",
268
+ "<extra_id_265>",
269
+ "<extra_id_266>",
270
+ "<extra_id_267>",
271
+ "<extra_id_268>",
272
+ "<extra_id_269>",
273
+ "<extra_id_270>",
274
+ "<extra_id_271>",
275
+ "<extra_id_272>",
276
+ "<extra_id_273>",
277
+ "<extra_id_274>",
278
+ "<extra_id_275>",
279
+ "<extra_id_276>",
280
+ "<extra_id_277>",
281
+ "<extra_id_278>",
282
+ "<extra_id_279>",
283
+ "<extra_id_280>",
284
+ "<extra_id_281>",
285
+ "<extra_id_282>",
286
+ "<extra_id_283>",
287
+ "<extra_id_284>",
288
+ "<extra_id_285>",
289
+ "<extra_id_286>",
290
+ "<extra_id_287>",
291
+ "<extra_id_288>",
292
+ "<extra_id_289>",
293
+ "<extra_id_290>",
294
+ "<extra_id_291>",
295
+ "<extra_id_292>",
296
+ "<extra_id_293>",
297
+ "<extra_id_294>",
298
+ "<extra_id_295>",
299
+ "<extra_id_296>",
300
+ "<extra_id_297>",
301
+ "<extra_id_298>",
302
+ "<extra_id_299>"
303
+ ],
304
+ "bos_token": "<s>",
305
+ "eos_token": "</s>",
306
+ "pad_token": "<pad>",
307
+ "unk_token": "<unk>"
308
+ }
distill_fp8/google/umt5-xxl/spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3909a67b780650b35cf529ac782ad2b6b26e6d1f849d3fbb6a872905f452458
3
+ size 4548313
distill_fp8/google/umt5-xxl/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e197b4d3dbd71da14b4eb255f4fa91c9c1f2068b20a2de2472967ca3d22602b
3
+ size 16837417