jeff-RQ commited on
Commit
32f759e
·
1 Parent(s): 931cc9a

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +3 -4
handler.py CHANGED
@@ -1,5 +1,4 @@
1
  from typing import Any, Dict
2
- import torch
3
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
4
 
5
 
@@ -7,8 +6,8 @@ class EndpointHandler:
7
  def __init__(self, path=""):
8
  # load model and processor from path
9
  self.processor = Blip2Processor.from_pretrained(path)
10
- self.model = Blip2ForConditionalGeneration.from_pretrained(path, torch_dtype=torch.float16)
11
- self.device = "cuda" if torch.cuda.is_available() else "cpu"
12
 
13
  self.model.to(self.device)
14
 
@@ -17,7 +16,7 @@ class EndpointHandler:
17
  image = data.pop("image", data)
18
  text = data.pop("text", data)
19
 
20
- inputs = self.processor(images=image, text=text, return_tensors="pt").to(self.device, torch.float16)
21
  generated_ids = self.model.generate(**inputs)
22
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
23
 
 
1
  from typing import Any, Dict
 
2
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
3
 
4
 
 
6
  def __init__(self, path=""):
7
  # load model and processor from path
8
  self.processor = Blip2Processor.from_pretrained(path)
9
+ self.model = Blip2ForConditionalGeneration.from_pretrained(path)
10
+ self.device = "cuda"
11
 
12
  self.model.to(self.device)
13
 
 
16
  image = data.pop("image", data)
17
  text = data.pop("text", data)
18
 
19
+ inputs = self.processor(images=image, text=prompt, return_tensors="pt").to(self.device)
20
  generated_ids = self.model.generate(**inputs)
21
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
22