Update README.md
Browse files
README.md
CHANGED
@@ -3,7 +3,7 @@ tags:
|
|
3 |
- generated_from_trainer
|
4 |
- code
|
5 |
- coding
|
6 |
-
-
|
7 |
model-index:
|
8 |
- name: gemma-2b-coder
|
9 |
results: []
|
@@ -79,21 +79,23 @@ WIP
|
|
79 |
|
80 |
|
81 |
### Example of usage 👩💻
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
```py
|
83 |
import torch
|
84 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
85 |
|
86 |
-
model_id = "
|
87 |
|
88 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
89 |
|
90 |
model = AutoModelForCausalLM.from_pretrained(model_id).to("cuda")
|
91 |
|
92 |
-
def create_prompt(instruction):
|
93 |
-
system = "You are a coding assistant that will help the user to resolve the following instruction:"
|
94 |
-
instruction = "### Instruction: " + instruction
|
95 |
-
return system + "\n" + instruction + "\n\n" + "### Solution:" + "\n"
|
96 |
-
|
97 |
def generate(
|
98 |
instruction,
|
99 |
max_new_tokens=256,
|
|
|
3 |
- generated_from_trainer
|
4 |
- code
|
5 |
- coding
|
6 |
+
- gemma
|
7 |
model-index:
|
8 |
- name: gemma-2b-coder
|
9 |
results: []
|
|
|
79 |
|
80 |
|
81 |
### Example of usage 👩💻
|
82 |
+
|
83 |
+
I recommend install the following version of `torch`:
|
84 |
+
|
85 |
+
```sh
|
86 |
+
pip install "torch>=2.1.1" -U
|
87 |
+
```
|
88 |
+
|
89 |
```py
|
90 |
import torch
|
91 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
92 |
|
93 |
+
model_id = "MAISAAI/gemma-2b-coder"
|
94 |
|
95 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
96 |
|
97 |
model = AutoModelForCausalLM.from_pretrained(model_id).to("cuda")
|
98 |
|
|
|
|
|
|
|
|
|
|
|
99 |
def generate(
|
100 |
instruction,
|
101 |
max_new_tokens=256,
|