ariG23498 HF Staff commited on
Commit
7b4d6fc
·
verified ·
1 Parent(s): ecd47c0

Upload google_medgemma-4b-it_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. google_medgemma-4b-it_1.py +4 -14
google_medgemma-4b-it_1.py CHANGED
@@ -7,11 +7,10 @@
7
  # ///
8
 
9
  try:
10
- # Load model directly
11
- from transformers import AutoProcessor, AutoModelForImageTextToText
12
 
13
- processor = AutoProcessor.from_pretrained("google/medgemma-4b-it")
14
- model = AutoModelForImageTextToText.from_pretrained("google/medgemma-4b-it")
15
  messages = [
16
  {
17
  "role": "user",
@@ -21,16 +20,7 @@ try:
21
  ]
22
  },
23
  ]
24
- inputs = processor.apply_chat_template(
25
- messages,
26
- add_generation_prompt=True,
27
- tokenize=True,
28
- return_dict=True,
29
- return_tensors="pt",
30
- ).to(model.device)
31
-
32
- outputs = model.generate(**inputs, max_new_tokens=40)
33
- print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
34
  with open('google_medgemma-4b-it_1.txt', 'w') as f:
35
  f.write('Everything was good in google_medgemma-4b-it_1.txt')
36
  except Exception as e:
 
7
  # ///
8
 
9
  try:
10
+ # Use a pipeline as a high-level helper
11
+ from transformers import pipeline
12
 
13
+ pipe = pipeline("image-text-to-text", model="google/medgemma-4b-it")
 
14
  messages = [
15
  {
16
  "role": "user",
 
20
  ]
21
  },
22
  ]
23
+ pipe(text=messages)
 
 
 
 
 
 
 
 
 
24
  with open('google_medgemma-4b-it_1.txt', 'w') as f:
25
  f.write('Everything was good in google_medgemma-4b-it_1.txt')
26
  except Exception as e: