ariG23498 HF Staff commited on
Commit
39e3c83
·
verified ·
1 Parent(s): 36e6f6c

Upload HuggingFaceTB_SmolLM3-3B_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. HuggingFaceTB_SmolLM3-3B_1.py +3 -18
HuggingFaceTB_SmolLM3-3B_1.py CHANGED
@@ -7,25 +7,10 @@
7
  # ///
8
 
9
  try:
10
- # prepare the model input
11
- prompt = "Give me a brief explanation of gravity in simple terms."
12
- messages_think = [
13
- {"role": "user", "content": prompt}
14
  ]
15
-
16
- text = tokenizer.apply_chat_template(
17
- messages_think,
18
- tokenize=False,
19
- add_generation_prompt=True,
20
- )
21
- model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
22
-
23
- # Generate the output
24
- generated_ids = model.generate(**model_inputs, max_new_tokens=32768)
25
-
26
- # Get and decode the output
27
- output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :]
28
- print(tokenizer.decode(output_ids, skip_special_tokens=True))
29
  with open('HuggingFaceTB_SmolLM3-3B_1.txt', 'w') as f:
30
  f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_1.txt')
31
  except Exception as e:
 
7
  # ///
8
 
9
  try:
10
+ messages = [
11
+ {"role": "user", "content": "Give me a brief explanation of gravity in simple terms."},
 
 
12
  ]
13
+ pipe(messages)
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  with open('HuggingFaceTB_SmolLM3-3B_1.txt', 'w') as f:
15
  f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_1.txt')
16
  except Exception as e: