|
|
|
""" |
|
llama.cpp Hugging Face to GGUF conversion script |
|
""" |
|
|
|
import os |
|
import sys |
|
import json |
|
import subprocess |
|
from pathlib import Path |
|
|
|
def main(): |
|
|
|
model_path = "trained_models/isaac_sim_hf/complete_merged_model" |
|
output_path = "trained_models/isaac_sim_hf/gguf_final/isaac_sim_qwen2.5_coder.gguf" |
|
|
|
print(f"Converting {model_path} to {output_path}") |
|
|
|
|
|
convert_script = "/mnt/s/Train a Reasoning Model with NeMo/llama.cpp/convert_hf_to_gguf.py" |
|
if not os.path.exists(convert_script): |
|
print(f"❌ Convert script not found: {convert_script}") |
|
return False |
|
|
|
|
|
cmd = [ |
|
sys.executable, convert_script, |
|
model_path, |
|
"--outfile", output_path, |
|
"--outtype", "f16", |
|
"--model-type", "qwen2" |
|
] |
|
|
|
try: |
|
print(f"Running: {' '.join(cmd)}") |
|
result = subprocess.run(cmd, capture_output=True, text=True, check=True) |
|
print("✅ Conversion completed successfully") |
|
print(f"Output: {result.stdout}") |
|
|
|
|
|
if os.path.exists(output_path): |
|
size_mb = os.path.getsize(output_path) / (1024 * 1024) |
|
print(f"📁 Output file size: {size_mb:.1f} MB") |
|
|
|
except subprocess.CalledProcessError as e: |
|
print(f"❌ Conversion failed: {e.stderr}") |
|
return False |
|
except Exception as e: |
|
print(f"❌ Unexpected error: {str(e)}") |
|
return False |
|
|
|
return True |
|
|
|
if __name__ == "__main__": |
|
success = main() |
|
sys.exit(0 if success else 1) |
|
|