fix model name
Browse files
README.md
CHANGED
|
@@ -218,7 +218,7 @@ This is a simple example of how to use **Granite-3B-Code-Base-128K** model.
|
|
| 218 |
import torch
|
| 219 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 220 |
device = "cuda" # or "cpu"
|
| 221 |
-
model_path = "ibm-granite/granite-3b-code-base-
|
| 222 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 223 |
# drop device_map if running on CPU
|
| 224 |
model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
|
|
|
|
| 218 |
import torch
|
| 219 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 220 |
device = "cuda" # or "cpu"
|
| 221 |
+
model_path = "ibm-granite/granite-3b-code-base-128k"
|
| 222 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 223 |
# drop device_map if running on CPU
|
| 224 |
model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
|