Update README.md
Browse files
README.md
CHANGED
|
@@ -65,8 +65,8 @@ You can utilize our newly contributed HF integration to run inference on our Bam
|
|
| 65 |
```python
|
| 66 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 67 |
|
| 68 |
-
model = AutoModelForCausalLM.from_pretrained("ibm-
|
| 69 |
-
tokenizer = AutoTokenizer.from_pretrained("ibm-
|
| 70 |
|
| 71 |
message = ["Mamba is a snake with following properties "]
|
| 72 |
inputs = tokenizer(message, return_tensors='pt', return_token_type_ids=False)
|
|
|
|
| 65 |
```python
|
| 66 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 67 |
|
| 68 |
+
model = AutoModelForCausalLM.from_pretrained("ibm-ai-platform/Bamba-9B-v2")
|
| 69 |
+
tokenizer = AutoTokenizer.from_pretrained("ibm-ai-platform/Bamba-9B-v2")
|
| 70 |
|
| 71 |
message = ["Mamba is a snake with following properties "]
|
| 72 |
inputs = tokenizer(message, return_tensors='pt', return_token_type_ids=False)
|