Update README.md
Browse files
README.md
CHANGED
|
@@ -226,7 +226,7 @@ This is a simple example of how to use **Granite-34B-Code-Instruct** model.
|
|
| 226 |
import torch
|
| 227 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 228 |
device = "cuda" # or "cpu"
|
| 229 |
-
model_path = "granite-
|
| 230 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 231 |
# drop device_map if running on CPU
|
| 232 |
model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
|
|
@@ -250,6 +250,7 @@ for i in output:
|
|
| 250 |
print(i)
|
| 251 |
```
|
| 252 |
|
|
|
|
| 253 |
<!-- TO DO: Check this part -->
|
| 254 |
## Training Data
|
| 255 |
Granite Code Instruct models are trained on the following types of data.
|
|
|
|
| 226 |
import torch
|
| 227 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 228 |
device = "cuda" # or "cpu"
|
| 229 |
+
model_path = "ibm-granite/granite-34b-code-instruct"
|
| 230 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 231 |
# drop device_map if running on CPU
|
| 232 |
model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
|
|
|
|
| 250 |
print(i)
|
| 251 |
```
|
| 252 |
|
| 253 |
+
|
| 254 |
<!-- TO DO: Check this part -->
|
| 255 |
## Training Data
|
| 256 |
Granite Code Instruct models are trained on the following types of data.
|