Update README.md
Browse files
README.md
CHANGED
@@ -73,14 +73,32 @@ Users (both direct and downstream) should be made aware of the risks, biases and
|
|
73 |
|
74 |
Use the code below to get started with the model.
|
75 |
```python
|
76 |
-
from transformers import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
model_id = "zephyr-7b-beta-Agent-Instruct-math"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, max_length=600)
|
79 |
prompt = "Enter your query"
|
80 |
result = pipe(prompt)
|
81 |
print(result[0]['generated_text'])
|
82 |
|
83 |
-
|
84 |
|
85 |
[More Information Needed]
|
86 |
|
|
|
73 |
|
74 |
Use the code below to get started with the model.
|
75 |
```python
|
76 |
+
from transformers import (
|
77 |
+
AutoModelForCausalLM,
|
78 |
+
AutoTokenizer,
|
79 |
+
BitsAndBytesConfig,
|
80 |
+
HfArgumentParser,
|
81 |
+
TrainingArguments,
|
82 |
+
pipeline,
|
83 |
+
logging,
|
84 |
+
)
|
85 |
+
|
86 |
+
|
87 |
model_id = "zephyr-7b-beta-Agent-Instruct-math"
|
88 |
+
base_model = "HuggingFaceH4/zephyr-7b-beta"
|
89 |
+
tokenizer = AutoTokenizer.from_pretrained(base_model, trust_remote_code=True)
|
90 |
+
tokenizer.padding_side = 'right'
|
91 |
+
tokenizer.pad_token = tokenizer.eos_token
|
92 |
+
tokenizer.add_eos_token = True
|
93 |
+
tokenizer.add_bos_token, tokenizer.add_eos_token
|
94 |
+
|
95 |
+
|
96 |
pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, max_length=600)
|
97 |
prompt = "Enter your query"
|
98 |
result = pipe(prompt)
|
99 |
print(result[0]['generated_text'])
|
100 |
|
101 |
+
```
|
102 |
|
103 |
[More Information Needed]
|
104 |
|