Commit
·
997d9f9
1
Parent(s):
b05f7a5
clarify prompt syntax
Browse files
README.md
CHANGED
@@ -18,6 +18,7 @@ tags:
|
|
18 |
- The model responds with a structured json argument with the function name and arguments.
|
19 |
|
20 |
**Recent Updates**
|
|
|
21 |
- October 11th 2023 -> added Mistral 7B with function calling
|
22 |
- October 11th 2023 -> new models pushed, trained on an improved underlying dataset
|
23 |
|
@@ -60,6 +61,8 @@ The dataset used for training this model can be found at [Trelis Function Callin
|
|
60 |
|
61 |
## Inference
|
62 |
|
|
|
|
|
63 |
**Quick Start in Google Colab**
|
64 |
Try out this notebook [fLlama_Inference notebook](https://colab.research.google.com/drive/1Ow5cQ0JNv-vXsT-apCceH6Na3b4L7JyW?usp=sharing)
|
65 |
|
@@ -81,8 +84,9 @@ import requests
|
|
81 |
import json
|
82 |
|
83 |
# Define the roles and markers
|
84 |
-
B_INST, E_INST = "[INST]", "[/INST]"
|
85 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
|
|
|
|
86 |
|
87 |
# Define the function metadata
|
88 |
function_metadata = {
|
@@ -102,7 +106,7 @@ user_prompt = 'Search for the latest news on AI.'
|
|
102 |
|
103 |
# Format the function list and prompt
|
104 |
function_list = json.dumps(function_metadata, indent=4)
|
105 |
-
prompt = f"{B_FUNC}{function_list.strip()}{E_FUNC}{B_INST}
|
106 |
|
107 |
# Define the API endpoint
|
108 |
url = "http:/localhost:8080/completion"
|
@@ -123,21 +127,23 @@ The function descriptions must be wrapped within a function block. You can put t
|
|
123 |
Example without a system message:
|
124 |
```
|
125 |
# Define the roles and markers
|
126 |
-
B_INST, E_INST = "[INST]", "[/INST]"
|
127 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
|
|
|
|
128 |
|
129 |
functionList = {function_1_metadata}{function_2_metadata}...
|
130 |
user_prompt = '...'
|
131 |
|
132 |
# Format your prompt template
|
133 |
-
prompt = f"{B_FUNC}{functionList.strip()}{E_FUNC}{B_INST}
|
134 |
```
|
135 |
|
136 |
Example with a system message:
|
137 |
```
|
138 |
# Define the roles and markers
|
139 |
-
B_INST, E_INST = "[INST]", "[/INST]"
|
140 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
|
|
|
|
141 |
B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
|
142 |
|
143 |
# assuming functionList is defined as above
|
@@ -145,7 +151,7 @@ Example with a system message:
|
|
145 |
user_prompt = '...'
|
146 |
|
147 |
# Format your prompt template
|
148 |
-
prompt = f"{B_FUNC}{functionList.strip()}{E_FUNC}{B_INST}
|
149 |
|
150 |
```
|
151 |
Notice that the function block is placed at the very start of the sequence, before 'B_INST'.
|
|
|
18 |
- The model responds with a structured json argument with the function name and arguments.
|
19 |
|
20 |
**Recent Updates**
|
21 |
+
- November 6th 2023 -> added Deepseek Coder 6.7B and 33B
|
22 |
- October 11th 2023 -> added Mistral 7B with function calling
|
23 |
- October 11th 2023 -> new models pushed, trained on an improved underlying dataset
|
24 |
|
|
|
61 |
|
62 |
## Inference
|
63 |
|
64 |
+
!!! Make sure to check the prompt format below and adjust inference accordingly !!!
|
65 |
+
|
66 |
**Quick Start in Google Colab**
|
67 |
Try out this notebook [fLlama_Inference notebook](https://colab.research.google.com/drive/1Ow5cQ0JNv-vXsT-apCceH6Na3b4L7JyW?usp=sharing)
|
68 |
|
|
|
84 |
import json
|
85 |
|
86 |
# Define the roles and markers
|
|
|
87 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
88 |
+
B_INST, E_INST = "[INST] ", " [/INST]" #Llama style
|
89 |
+
# B_INST, E_INST = "\n### Instruction:\n", "\n### Response:\n" #DeepSeek Coder Style
|
90 |
|
91 |
# Define the function metadata
|
92 |
function_metadata = {
|
|
|
106 |
|
107 |
# Format the function list and prompt
|
108 |
function_list = json.dumps(function_metadata, indent=4)
|
109 |
+
prompt = f"{B_FUNC}{function_list.strip()}{E_FUNC}{B_INST}{user_prompt.strip()}{E_INST}\n\n"
|
110 |
|
111 |
# Define the API endpoint
|
112 |
url = "http:/localhost:8080/completion"
|
|
|
127 |
Example without a system message:
|
128 |
```
|
129 |
# Define the roles and markers
|
|
|
130 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
131 |
+
B_INST, E_INST = "[INST] ", " [/INST]" #Llama style
|
132 |
+
# B_INST, E_INST = "\n### Instruction:\n", "\n### Response:\n" #DeepSeek Coder Style
|
133 |
|
134 |
functionList = {function_1_metadata}{function_2_metadata}...
|
135 |
user_prompt = '...'
|
136 |
|
137 |
# Format your prompt template
|
138 |
+
prompt = f"{B_FUNC}{functionList.strip()}{E_FUNC}{B_INST}{user_prompt.strip()}{E_INST}\n\n"
|
139 |
```
|
140 |
|
141 |
Example with a system message:
|
142 |
```
|
143 |
# Define the roles and markers
|
|
|
144 |
B_FUNC, E_FUNC = "<FUNCTIONS>", "</FUNCTIONS>\n\n"
|
145 |
+
B_INST, E_INST = "[INST] ", " [/INST]" #Llama style
|
146 |
+
# B_INST, E_INST = "\n### Instruction:\n", "\n### Response:\n" #DeepSeek Coder Style
|
147 |
B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
|
148 |
|
149 |
# assuming functionList is defined as above
|
|
|
151 |
user_prompt = '...'
|
152 |
|
153 |
# Format your prompt template
|
154 |
+
prompt = f"{B_FUNC}{functionList.strip()}{E_FUNC}{B_INST}{B_SYS}{system_prompt.strip()}{E_SYS}{user_prompt.strip()}{E_INST}\n\n"
|
155 |
|
156 |
```
|
157 |
Notice that the function block is placed at the very start of the sequence, before 'B_INST'.
|