dailei.127 commited on
Commit
bea38d5
·
1 Parent(s): aaef4ba

Minor typo changed vllm_model.model to llm

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -32,7 +32,7 @@ model_name = "mistralai/Pixtral-12B-2409"
32
 
33
  sampling_params = SamplingParams(max_tokens=8192)
34
 
35
- llm = LLM(model=model_name, tokenizer_mode="mistral")
36
 
37
  prompt = "Describe this image in one sentence."
38
  image_url = "https://picsum.photos/id/237/200/300"
@@ -44,7 +44,7 @@ messages = [
44
  },
45
  ]
46
 
47
- outputs = vllm_model.model.chat(messages, sampling_params=sampling_params)
48
 
49
  print(outputs[0].outputs[0].text)
50
  ```
@@ -123,4 +123,4 @@ curl --location 'http://<your-node-url>:8000/v1/chat/completions' \
123
  }
124
  ]
125
  }'
126
- ```
 
32
 
33
  sampling_params = SamplingParams(max_tokens=8192)
34
 
35
+ llm = LLM(model=model_name, tokenizer_mode="mistral", max_model_len=1024)
36
 
37
  prompt = "Describe this image in one sentence."
38
  image_url = "https://picsum.photos/id/237/200/300"
 
44
  },
45
  ]
46
 
47
+ outputs = llm.chat.chat(messages, sampling_params=sampling_params)
48
 
49
  print(outputs[0].outputs[0].text)
50
  ```
 
123
  }
124
  ]
125
  }'
126
+ ```