Update README.md
Browse files
README.md
CHANGED
|
@@ -18,9 +18,11 @@ or execute the following test code:
|
|
| 18 |
```python
|
| 19 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 20 |
from generate_model import *
|
| 21 |
-
|
|
|
|
|
|
|
| 22 |
config = model.config
|
| 23 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
| 24 |
prompt="you want to ask"
|
| 25 |
image="/path/to/related/image"
|
| 26 |
output_text, genertaion_time = generate(prompt=prompt, image=image, model=model, tokenizer=tokenizer)
|
|
|
|
| 18 |
```python
|
| 19 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 20 |
from generate_model import *
|
| 21 |
+
|
| 22 |
+
hf_path = 'tinyllava/TinyLLaVA-Phi-2-SigLIP-3.1B'
|
| 23 |
+
model = AutoModelForCausalLM.from_pretrained(hf_path, trust_remote_code=True)
|
| 24 |
config = model.config
|
| 25 |
+
tokenizer = AutoTokenizer.from_pretrained(hf_path, use_fast=False, model_max_length = config.tokenizer_model_max_length,padding_side = config.tokenizer_padding_side)
|
| 26 |
prompt="you want to ask"
|
| 27 |
image="/path/to/related/image"
|
| 28 |
output_text, genertaion_time = generate(prompt=prompt, image=image, model=model, tokenizer=tokenizer)
|