jeff-RQ commited on
Commit
d94fef7
·
1 Parent(s): 1e131a4

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +3 -2
handler.py CHANGED
@@ -3,12 +3,13 @@ from transformers import Blip2Processor, Blip2ForConditionalGeneration
3
  import io
4
  from PIL import Image
5
  import base64
 
6
 
7
  class EndpointHandler:
8
  def __init__(self, path=""):
9
  # load model and processor from path
10
  self.processor = Blip2Processor.from_pretrained(path)
11
- self.model = Blip2ForConditionalGeneration.from_pretrained(path)
12
  self.device = "cuda"
13
 
14
  self.model.to(self.device)
@@ -21,7 +22,7 @@ class EndpointHandler:
21
  image_string = base64.b64decode(data["image"])
22
  image = Image.open(io.BytesIO(image_string))
23
 
24
- inputs = self.processor(images=image, text=text, return_tensors="pt").to(self.device)
25
  generated_ids = self.model.generate(**inputs)
26
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
27
 
 
3
  import io
4
  from PIL import Image
5
  import base64
6
+ import torch
7
 
8
  class EndpointHandler:
9
  def __init__(self, path=""):
10
  # load model and processor from path
11
  self.processor = Blip2Processor.from_pretrained(path)
12
+ self.model = Blip2ForConditionalGeneration.from_pretrained(path, torch_dtype=torch.float16)
13
  self.device = "cuda"
14
 
15
  self.model.to(self.device)
 
22
  image_string = base64.b64decode(data["image"])
23
  image = Image.open(io.BytesIO(image_string))
24
 
25
+ inputs = self.processor(images=image, text=text, return_tensors="pt").to(device, torch.float16)
26
  generated_ids = self.model.generate(**inputs)
27
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
28