Update README.md
Browse files
README.md
CHANGED
|
@@ -1,5 +1,4 @@
|
|
| 1 |
---
|
| 2 |
-
license: apache-2.0
|
| 3 |
base_model: ibm-granite/granite-3.2-8b-instruct
|
| 4 |
library_name: peft
|
| 5 |
---
|
|
@@ -46,7 +45,7 @@ device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
|
| 46 |
|
| 47 |
ANSWERABILITY_PROMPT = "<|start_of_role|>answerability<|end_of_role|>"
|
| 48 |
BASE_NAME = "ibm-granite/granite-3.2-8b-instruct"
|
| 49 |
-
LORA_NAME = "ibm-granite/granite-
|
| 50 |
|
| 51 |
tokenizer = AutoTokenizer.from_pretrained(BASE_NAME, padding_side='left',trust_remote_code=True)
|
| 52 |
model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME,device_map="auto")
|
|
|
|
| 1 |
---
|
|
|
|
| 2 |
base_model: ibm-granite/granite-3.2-8b-instruct
|
| 3 |
library_name: peft
|
| 4 |
---
|
|
|
|
| 45 |
|
| 46 |
ANSWERABILITY_PROMPT = "<|start_of_role|>answerability<|end_of_role|>"
|
| 47 |
BASE_NAME = "ibm-granite/granite-3.2-8b-instruct"
|
| 48 |
+
LORA_NAME = "ibm-granite/granite-3.2-8b-lora-rag-answerability-prediction"
|
| 49 |
|
| 50 |
tokenizer = AutoTokenizer.from_pretrained(BASE_NAME, padding_side='left',trust_remote_code=True)
|
| 51 |
model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME,device_map="auto")
|