File size: 336 Bytes
d3c6c61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
models:
  - model: "merged_model"

  - model: Qwen/Qwen3-30B-A3B-Thinking-2507
    parameters:
      density: 0.35
      weight: 0.35

  - model: unsloth/Qwen3-Coder-30B-A3B-Instruct
    parameters:
      density: 0.25
      weight: 0.25
merge_method: dare_ties

base_model: "merged_model"
parameters:
  int8_mask: true
dtype: bfloat16