Phr00tyMix-v1-32B / mergekit_config.yml
Phr00t's picture
Upload folder using huggingface_hub
9ca5ce0 verified
raw
history blame contribute delete
656 Bytes
merge_method: dare_ties
dtype: bfloat16
base_model: rombodawg/Rombos-LLM-V2.5-Qwen-32b
parameters:
normalize_weights: true
models:
- model: Delta-Vector/Hamanasu-QwQ-V1.5-Instruct
parameters:
weight: 0.3
density: 1
- model: zetasepic/Qwen2.5-32B-Instruct-abliterated-v2
parameters:
weight: 0.1
density: 0.8
- model: THU-KEG/LongWriter-Zero-32B
parameters:
weight: 0.1
density: 0.8
- model: Delta-Vector/Hamanasu-Magnum-QwQ-32B
parameters:
weight: 0.3
density: 0.8
- model: allura-org/Qwen2.5-32b-RP-Ink
parameters:
weight: 0.2
density: 0.5