ariG23498 HF Staff commited on
Commit
f9fe258
·
verified ·
1 Parent(s): 39c0e5d

Upload arcee-ai_AFM-4.5B_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. arcee-ai_AFM-4.5B_1.py +4 -14
arcee-ai_AFM-4.5B_1.py CHANGED
@@ -7,24 +7,14 @@
7
  # ///
8
 
9
  try:
10
- # Load model directly
11
- from transformers import AutoTokenizer, AutoModelForCausalLM
12
 
13
- tokenizer = AutoTokenizer.from_pretrained("arcee-ai/AFM-4.5B")
14
- model = AutoModelForCausalLM.from_pretrained("arcee-ai/AFM-4.5B")
15
  messages = [
16
  {"role": "user", "content": "Who are you?"},
17
  ]
18
- inputs = tokenizer.apply_chat_template(
19
- messages,
20
- add_generation_prompt=True,
21
- tokenize=True,
22
- return_dict=True,
23
- return_tensors="pt",
24
- ).to(model.device)
25
-
26
- outputs = model.generate(**inputs, max_new_tokens=40)
27
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
28
  with open('arcee-ai_AFM-4.5B_1.txt', 'w') as f:
29
  f.write('Everything was good in arcee-ai_AFM-4.5B_1.txt')
30
  except Exception as e:
 
7
  # ///
8
 
9
  try:
10
+ # Use a pipeline as a high-level helper
11
+ from transformers import pipeline
12
 
13
+ pipe = pipeline("text-generation", model="arcee-ai/AFM-4.5B")
 
14
  messages = [
15
  {"role": "user", "content": "Who are you?"},
16
  ]
17
+ pipe(messages)
 
 
 
 
 
 
 
 
 
18
  with open('arcee-ai_AFM-4.5B_1.txt', 'w') as f:
19
  f.write('Everything was good in arcee-ai_AFM-4.5B_1.txt')
20
  except Exception as e: