ariG23498 HF Staff commited on
Commit
07cc3de
·
verified ·
1 Parent(s): 74ba1bc

Upload Qwen_Qwen3-8B_0.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. Qwen_Qwen3-8B_0.py +19 -0
Qwen_Qwen3-8B_0.py CHANGED
@@ -20,6 +20,25 @@ try:
20
  {"role": "user", "content": "Who are you?"},
21
  ]
22
  pipe(messages)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  with open('Qwen_Qwen3-8B_0.txt', 'w') as f:
24
  f.write('Everything was good in Qwen_Qwen3-8B_0.txt')
25
  except Exception as e:
 
20
  {"role": "user", "content": "Who are you?"},
21
  ]
22
  pipe(messages)
23
+
24
+ # Load model directly
25
+ from transformers import AutoTokenizer, AutoModelForCausalLM
26
+
27
+ tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen3-8B")
28
+ model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen3-8B")
29
+ messages = [
30
+ {"role": "user", "content": "Who are you?"},
31
+ ]
32
+ inputs = tokenizer.apply_chat_template(
33
+ messages,
34
+ add_generation_prompt=True,
35
+ tokenize=True,
36
+ return_dict=True,
37
+ return_tensors="pt",
38
+ ).to(model.device)
39
+
40
+ outputs = model.generate(**inputs, max_new_tokens=40)
41
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
42
  with open('Qwen_Qwen3-8B_0.txt', 'w') as f:
43
  f.write('Everything was good in Qwen_Qwen3-8B_0.txt')
44
  except Exception as e: