Allow chatting with SFT models
Browse files- eval_models.py +2 -2
eval_models.py
CHANGED
@@ -69,7 +69,7 @@ class ModelEvaluator:
|
|
69 |
model.to("mps")
|
70 |
|
71 |
# Create pipeline - use conversational for chat models, text-generation for others
|
72 |
-
if "chat" in model_dir.lower():
|
73 |
pipe = pipeline(
|
74 |
"text-generation",
|
75 |
model=model,
|
@@ -122,7 +122,7 @@ class ModelEvaluator:
|
|
122 |
pipe = self.pipelines[model_name]
|
123 |
|
124 |
# Check if this is a conversational pipeline
|
125 |
-
if "chat" in model_name.lower():
|
126 |
# For conversational models, use the chat format
|
127 |
chat_input = [{"role": "user", "content": prompt}]
|
128 |
outputs = pipe(
|
|
|
69 |
model.to("mps")
|
70 |
|
71 |
# Create pipeline - use conversational for chat models, text-generation for others
|
72 |
+
if "chat" in model_dir.lower() or "sft" in model_dir.lower():
|
73 |
pipe = pipeline(
|
74 |
"text-generation",
|
75 |
model=model,
|
|
|
122 |
pipe = self.pipelines[model_name]
|
123 |
|
124 |
# Check if this is a conversational pipeline
|
125 |
+
if "chat" in model_name.lower() or "sft" in model_name.lower():
|
126 |
# For conversational models, use the chat format
|
127 |
chat_input = [{"role": "user", "content": prompt}]
|
128 |
outputs = pipe(
|