Use correct llama-cpp-python example
Browse files
README.md
CHANGED
|
@@ -119,7 +119,7 @@ For other parameters and how to use them, please refer to [the llama.cpp documen
|
|
| 119 |
|
| 120 |
## How to run from Python code
|
| 121 |
|
| 122 |
-
You can use GGUF models from Python using the [llama-cpp-python](https://github.com/abetlen/llama-cpp-python).
|
| 123 |
|
| 124 |
### How to load this model in Python code, using llama-cpp-python
|
| 125 |
|
|
@@ -167,41 +167,15 @@ from llama_cpp import Llama
|
|
| 167 |
|
| 168 |
# Chat Completion API
|
| 169 |
|
| 170 |
-
llm = Llama(model_path="./
|
| 171 |
print(llm.create_chat_completion(
|
| 172 |
-
|
|
|
|
| 173 |
{
|
| 174 |
-
|
| 175 |
-
|
| 176 |
}
|
| 177 |
-
|
| 178 |
-
tools=[{
|
| 179 |
-
"type": "function",
|
| 180 |
-
"function": {
|
| 181 |
-
"name": "get_current_weather",
|
| 182 |
-
"description": "Get the current weather in a given location",
|
| 183 |
-
"parameters": {
|
| 184 |
-
"type": "object",
|
| 185 |
-
"properties": {
|
| 186 |
-
"location": {
|
| 187 |
-
"type": "string",
|
| 188 |
-
"description": "The city and state, e.g. San Francisco, CA"
|
| 189 |
-
},
|
| 190 |
-
"unit": {
|
| 191 |
-
"type": "string",
|
| 192 |
-
"enum": [ "celsius", "fahrenheit" ]
|
| 193 |
-
}
|
| 194 |
-
},
|
| 195 |
-
"required": [ "location" ]
|
| 196 |
-
}
|
| 197 |
-
}
|
| 198 |
-
}],
|
| 199 |
-
tool_choice=[{
|
| 200 |
-
"type": "function",
|
| 201 |
-
"function": {
|
| 202 |
-
"name": "get_current_weather"
|
| 203 |
-
}
|
| 204 |
-
}]
|
| 205 |
))
|
| 206 |
```
|
| 207 |
|
|
|
|
| 119 |
|
| 120 |
## How to run from Python code
|
| 121 |
|
| 122 |
+
You can use GGUF models from Python using the [llama-cpp-python](https://github.com/abetlen/llama-cpp-python) module.
|
| 123 |
|
| 124 |
### How to load this model in Python code, using llama-cpp-python
|
| 125 |
|
|
|
|
| 167 |
|
| 168 |
# Chat Completion API
|
| 169 |
|
| 170 |
+
llm = Llama(model_path="./OpenCodeInterpreter-DS-6.7B.IQ3_M.gguf", n_gpu_layers=33, n_ctx=16384)
|
| 171 |
print(llm.create_chat_completion(
|
| 172 |
+
messages = [
|
| 173 |
+
{"role": "system", "content": "You are an expert AI coding assistant."},
|
| 174 |
{
|
| 175 |
+
"role": "user",
|
| 176 |
+
"content": "Pick a LeetCode challenge and solve it in Python."
|
| 177 |
}
|
| 178 |
+
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 179 |
))
|
| 180 |
```
|
| 181 |
|