Update README.md
Browse files
README.md
CHANGED
@@ -102,6 +102,34 @@ lr_scheduler_type: "linear"
|
|
102 |
group_by_length: False
|
103 |
```
|
104 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
|
106 |
### References
|
107 |
- Base model: [microsoft/phi-2](https://huggingface.co/microsoft/phi-2)
|
|
|
102 |
group_by_length: False
|
103 |
```
|
104 |
|
105 |
+
## How to Get Started with the Model
|
106 |
+
```python
|
107 |
+
import torch
|
108 |
+
from transformers import PhiForCausalLM, AutoModelForCausalLM, AutoTokenizer
|
109 |
+
|
110 |
+
torch.set_default_device("cuda")
|
111 |
+
|
112 |
+
# Load model and tokenizer
|
113 |
+
model = PhiForCausalLM.from_pretrained("daekeun-ml/phi-2-ko-v0.1", torch_dtype="auto")
|
114 |
+
tokenizer = AutoTokenizer.from_pretrained("daekeun-ml/phi-2-ko-v0.1", trust_remote_code=True)
|
115 |
+
|
116 |
+
# Korean
|
117 |
+
inputs = tokenizer("머신러닝은 ", return_tensors="pt", return_attention_mask=False)
|
118 |
+
|
119 |
+
outputs = model.generate(**inputs, max_length=200)
|
120 |
+
text = tokenizer.batch_decode(outputs)[0]
|
121 |
+
print(text)
|
122 |
+
|
123 |
+
# English
|
124 |
+
inputs = tokenizer('''def print_prime(n):
|
125 |
+
"""
|
126 |
+
Print all primes between 1 and n
|
127 |
+
"""''', return_tensors="pt", return_attention_mask=False)
|
128 |
+
|
129 |
+
outputs = model.generate(**inputs, max_length=200)
|
130 |
+
text = tokenizer.batch_decode(outputs)[0]
|
131 |
+
print(text)
|
132 |
+
```
|
133 |
|
134 |
### References
|
135 |
- Base model: [microsoft/phi-2](https://huggingface.co/microsoft/phi-2)
|