Yudi Xue commited on
Commit
09e5d42
·
verified ·
1 Parent(s): ba85e25

Upload Qwen2ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -1
  3. recipe.yaml +2 -2
config.json CHANGED
@@ -55,7 +55,7 @@
55
  }
56
  },
57
  "format": "float-quantized",
58
- "global_compression_ratio": 1.5268082955288436,
59
  "ignore": [
60
  "lm_head"
61
  ],
@@ -66,10 +66,10 @@
66
  "rms_norm_eps": 1e-05,
67
  "rope_scaling": null,
68
  "rope_theta": 10000000.0,
69
- "sliding_window": null,
70
  "tie_word_embeddings": false,
71
  "torch_dtype": "bfloat16",
72
- "transformers_version": "4.48.1",
73
  "use_cache": true,
74
  "use_sliding_window": false,
75
  "vocab_size": 152064
 
55
  }
56
  },
57
  "format": "float-quantized",
58
+ "global_compression_ratio": null,
59
  "ignore": [
60
  "lm_head"
61
  ],
 
66
  "rms_norm_eps": 1e-05,
67
  "rope_scaling": null,
68
  "rope_theta": 10000000.0,
69
+ "sliding_window": 32768,
70
  "tie_word_embeddings": false,
71
  "torch_dtype": "bfloat16",
72
+ "transformers_version": "4.49.0",
73
  "use_cache": true,
74
  "use_sliding_window": false,
75
  "vocab_size": 152064
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
- "transformers_version": "4.48.1"
14
  }
 
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
+ "transformers_version": "4.49.0"
14
  }
recipe.yaml CHANGED
@@ -1,5 +1,5 @@
1
- DEFAULT_stage:
2
- DEFAULT_modifiers:
3
  QuantizationModifier:
4
  ignore: [lm_head]
5
  targets: [Linear]
 
1
+ default_stage:
2
+ default_modifiers:
3
  QuantizationModifier:
4
  ignore: [lm_head]
5
  targets: [Linear]