Upload 2 files
Browse files- internal/types/monica.go +361 -0
- internal/types/openai.go +22 -0
internal/types/monica.go
ADDED
@@ -0,0 +1,361 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
package types
|
2 |
+
|
3 |
+
import (
|
4 |
+
"context"
|
5 |
+
"encoding/json"
|
6 |
+
"fmt"
|
7 |
+
"log"
|
8 |
+
"strings"
|
9 |
+
|
10 |
+
lop "github.com/samber/lo/parallel"
|
11 |
+
|
12 |
+
"github.com/google/uuid"
|
13 |
+
"github.com/sashabaranov/go-openai"
|
14 |
+
)
|
15 |
+
|
16 |
+
//const (
|
17 |
+
// MonicaModelGPT4o = "gpt-4o"
|
18 |
+
// MonicaModelGPT4oMini = "gpt-4o-mini"
|
19 |
+
// MonicaModelClaudeSonnet = "claude-3"
|
20 |
+
// MonicaModelClaudeHaiku = "claude-3.5-haiku"
|
21 |
+
// MonicaModelGemini2 = "gemini_2_0"
|
22 |
+
// MonicaModelO1Preview = "openai_o_1"
|
23 |
+
// MonicaModelO1Mini = "openai-o-1-mini"
|
24 |
+
//)
|
25 |
+
|
26 |
+
const (
|
27 |
+
BotChatURL = "https://api.monica.im/api/custom_bot/chat"
|
28 |
+
PreSignURL = "https://api.monica.im/api/file_object/pre_sign_list_by_module"
|
29 |
+
FileUploadURL = "https://api.monica.im/api/files/batch_create_llm_file"
|
30 |
+
FileGetURL = "https://api.monica.im/api/files/batch_get_file"
|
31 |
+
)
|
32 |
+
|
33 |
+
// 图片相关常量
|
34 |
+
const (
|
35 |
+
MaxImageSize = 10 * 1024 * 1024 // 10MB
|
36 |
+
ImageModule = "chat_bot"
|
37 |
+
ImageLocation = "files"
|
38 |
+
)
|
39 |
+
|
40 |
+
// 支持的图片格式
|
41 |
+
var SupportedImageTypes = map[string]bool{
|
42 |
+
"image/jpeg": true,
|
43 |
+
"image/png": true,
|
44 |
+
"image/gif": true,
|
45 |
+
"image/webp": true,
|
46 |
+
}
|
47 |
+
|
48 |
+
type ChatGPTRequest struct {
|
49 |
+
Model string `json:"model"` // gpt-3.5-turbo, gpt-4, ...
|
50 |
+
Messages []ChatMessage `json:"messages"` // 对话数组
|
51 |
+
Stream bool `json:"stream"` // 是否流式返回
|
52 |
+
}
|
53 |
+
|
54 |
+
type ChatMessage struct {
|
55 |
+
Role string `json:"role"` // "system", "user", "assistant"
|
56 |
+
Content interface{} `json:"content"` // 可以是字符串或MessageContent数组
|
57 |
+
}
|
58 |
+
|
59 |
+
// MessageContent 消息内容
|
60 |
+
type MessageContent struct {
|
61 |
+
Type string `json:"type"` // "text" 或 "image_url"
|
62 |
+
Text string `json:"text,omitempty"` // 文本内容
|
63 |
+
ImageURL string `json:"image_url,omitempty"` // 图片URL
|
64 |
+
}
|
65 |
+
|
66 |
+
// MonicaRequest 为 Monica 自定义 AI 的请求格式
|
67 |
+
// 注意:以下字段仅示例。真正要与 Monica 对接时,请根据其 API 要求调整字段。
|
68 |
+
type MonicaRequest struct {
|
69 |
+
TaskUID string `json:"task_uid"`
|
70 |
+
BotUID string `json:"bot_uid"`
|
71 |
+
Data DataField `json:"data"`
|
72 |
+
Language string `json:"language"`
|
73 |
+
TaskType string `json:"task_type"`
|
74 |
+
ToolData ToolData `json:"tool_data"`
|
75 |
+
}
|
76 |
+
|
77 |
+
// DataField 在 Monica 的 body 中
|
78 |
+
type DataField struct {
|
79 |
+
ConversationID string `json:"conversation_id"`
|
80 |
+
PreParentItemID string `json:"pre_parent_item_id"`
|
81 |
+
Items []Item `json:"items"`
|
82 |
+
TriggerBy string `json:"trigger_by"`
|
83 |
+
UseModel string `json:"use_model,omitempty"`
|
84 |
+
IsIncognito bool `json:"is_incognito"`
|
85 |
+
UseNewMemory bool `json:"use_new_memory"`
|
86 |
+
}
|
87 |
+
|
88 |
+
type Item struct {
|
89 |
+
ConversationID string `json:"conversation_id"`
|
90 |
+
ParentItemID string `json:"parent_item_id,omitempty"`
|
91 |
+
ItemID string `json:"item_id"`
|
92 |
+
ItemType string `json:"item_type"`
|
93 |
+
Data ItemContent `json:"data"`
|
94 |
+
}
|
95 |
+
|
96 |
+
type ItemContent struct {
|
97 |
+
Type string `json:"type"`
|
98 |
+
Content string `json:"content"`
|
99 |
+
MaxToken int `json:"max_token,omitempty"`
|
100 |
+
IsIncognito bool `json:"is_incognito,omitempty"` // 是否无痕模式
|
101 |
+
FromTaskType string `json:"from_task_type,omitempty"`
|
102 |
+
ManualWebSearchEnabled bool `json:"manual_web_search_enabled,omitempty"` // 网页搜索
|
103 |
+
UseModel string `json:"use_model,omitempty"`
|
104 |
+
FileInfos []FileInfo `json:"file_infos,omitempty"`
|
105 |
+
}
|
106 |
+
|
107 |
+
// ToolData 这里演示放空
|
108 |
+
type ToolData struct {
|
109 |
+
SysSkillList []string `json:"sys_skill_list"`
|
110 |
+
}
|
111 |
+
|
112 |
+
// PreSignRequest 预签名请求
|
113 |
+
type PreSignRequest struct {
|
114 |
+
FilenameList []string `json:"filename_list"`
|
115 |
+
Module string `json:"module"`
|
116 |
+
Location string `json:"location"`
|
117 |
+
ObjID string `json:"obj_id"`
|
118 |
+
}
|
119 |
+
|
120 |
+
// PreSignResponse 预签名响应
|
121 |
+
type PreSignResponse struct {
|
122 |
+
Code int `json:"code"`
|
123 |
+
Msg string `json:"msg"`
|
124 |
+
Data struct {
|
125 |
+
PreSignURLList []string `json:"pre_sign_url_list"`
|
126 |
+
ObjectURLList []string `json:"object_url_list"`
|
127 |
+
CDNURLList []string `json:"cdn_url_list"`
|
128 |
+
} `json:"data"`
|
129 |
+
}
|
130 |
+
|
131 |
+
// FileInfo 文件信息
|
132 |
+
type FileInfo struct {
|
133 |
+
URL string `json:"url,omitempty"`
|
134 |
+
FileURL string `json:"file_url"`
|
135 |
+
FileUID string `json:"file_uid"`
|
136 |
+
Parse bool `json:"parse"`
|
137 |
+
FileName string `json:"file_name"`
|
138 |
+
FileSize int64 `json:"file_size"`
|
139 |
+
FileType string `json:"file_type"`
|
140 |
+
FileExt string `json:"file_ext"`
|
141 |
+
FileTokens int64 `json:"file_tokens"`
|
142 |
+
FileChunks int64 `json:"file_chunks"`
|
143 |
+
ObjectURL string `json:"object_url,omitempty"`
|
144 |
+
//Embedding bool `json:"embedding"`
|
145 |
+
FileMetaInfo map[string]interface{} `json:"file_meta_info,omitempty"`
|
146 |
+
UseFullText bool `json:"use_full_text"`
|
147 |
+
}
|
148 |
+
|
149 |
+
// FileUploadRequest 文件上传请求
|
150 |
+
type FileUploadRequest struct {
|
151 |
+
Data []FileInfo `json:"data"`
|
152 |
+
}
|
153 |
+
|
154 |
+
// FileUploadResponse 文件上传响应
|
155 |
+
type FileUploadResponse struct {
|
156 |
+
Code int `json:"code"`
|
157 |
+
Msg string `json:"msg"`
|
158 |
+
Data struct {
|
159 |
+
Items []struct {
|
160 |
+
FileName string `json:"file_name"`
|
161 |
+
FileType string `json:"file_type"`
|
162 |
+
FileSize int64 `json:"file_size"`
|
163 |
+
FileUID string `json:"file_uid"`
|
164 |
+
FileTokens int64 `json:"file_tokens"`
|
165 |
+
FileChunks int64 `json:"file_chunks"`
|
166 |
+
// 其他字段暂时不需要
|
167 |
+
} `json:"items"`
|
168 |
+
} `json:"data"`
|
169 |
+
}
|
170 |
+
|
171 |
+
// FileBatchGetResponse 获取文件llm处理是否完成
|
172 |
+
type FileBatchGetResponse struct {
|
173 |
+
Data struct {
|
174 |
+
Items []struct {
|
175 |
+
FileName string `json:"file_name"`
|
176 |
+
FileType string `json:"file_type"`
|
177 |
+
FileSize int `json:"file_size"`
|
178 |
+
ObjectUrl string `json:"object_url"`
|
179 |
+
Url string `json:"url"`
|
180 |
+
FileMetaInfo struct {
|
181 |
+
} `json:"file_meta_info"`
|
182 |
+
DriveFileUid string `json:"drive_file_uid"`
|
183 |
+
FileUid string `json:"file_uid"`
|
184 |
+
IndexState int `json:"index_state"`
|
185 |
+
IndexDesc string `json:"index_desc"`
|
186 |
+
ErrorMessage string `json:"error_message"`
|
187 |
+
FileTokens int64 `json:"file_tokens"`
|
188 |
+
FileChunks int64 `json:"file_chunks"`
|
189 |
+
IndexProgress int `json:"index_progress"`
|
190 |
+
} `json:"items"`
|
191 |
+
} `json:"data"`
|
192 |
+
}
|
193 |
+
|
194 |
+
// OpenAIModel represents a model in the OpenAI API format
|
195 |
+
type OpenAIModel struct {
|
196 |
+
ID string `json:"id"`
|
197 |
+
Object string `json:"object"`
|
198 |
+
OwnedBy string `json:"owned_by"`
|
199 |
+
}
|
200 |
+
|
201 |
+
// OpenAIModelList represents the response format for the /v1/models endpoint
|
202 |
+
type OpenAIModelList struct {
|
203 |
+
Object string `json:"object"`
|
204 |
+
Data []OpenAIModel `json:"data"`
|
205 |
+
}
|
206 |
+
|
207 |
+
// GetSupportedModels returns all supported models in OpenAI format
|
208 |
+
func GetSupportedModels() OpenAIModelList {
|
209 |
+
models := []OpenAIModel{
|
210 |
+
{ID: "gpt-4o-mini", Object: "model", OwnedBy: "monica"},
|
211 |
+
{ID: "gpt-4o", Object: "model", OwnedBy: "monica"},
|
212 |
+
{ID: "claude-3-5-sonnet", Object: "model", OwnedBy: "monica"},
|
213 |
+
{ID: "claude-3-5-haiku", Object: "model", OwnedBy: "monica"},
|
214 |
+
{ID: "gemini-2.0", Object: "model", OwnedBy: "monica"},
|
215 |
+
{ID: "gemini-1.5", Object: "model", OwnedBy: "monica"},
|
216 |
+
{ID: "o1-mini", Object: "model", OwnedBy: "monica"},
|
217 |
+
{ID: "o1-preview", Object: "model", OwnedBy: "monica"},
|
218 |
+
{ID: "deepseek-reasoner", Object: "model", OwnedBy: "monica"},
|
219 |
+
{ID: "deepseek-chat", Object: "model", OwnedBy: "monica"},
|
220 |
+
}
|
221 |
+
|
222 |
+
return OpenAIModelList{
|
223 |
+
Object: "list",
|
224 |
+
Data: models,
|
225 |
+
}
|
226 |
+
}
|
227 |
+
|
228 |
+
// ChatGPTToMonica 将 ChatGPTRequest 转换为 MonicaRequest
|
229 |
+
func ChatGPTToMonica(chatReq openai.ChatCompletionRequest) (*MonicaRequest, error) {
|
230 |
+
if len(chatReq.Messages) == 0 {
|
231 |
+
return nil, fmt.Errorf("empty messages")
|
232 |
+
}
|
233 |
+
|
234 |
+
// 生成会话ID
|
235 |
+
conversationID := fmt.Sprintf("conv:%s", uuid.New().String())
|
236 |
+
|
237 |
+
// 转换消息
|
238 |
+
|
239 |
+
// 设置默认欢迎消息头,不加上就有几率去掉问题最后的十几个token,不清楚是不是bug
|
240 |
+
defaultItem := Item{
|
241 |
+
ItemID: fmt.Sprintf("msg:%s", uuid.New().String()),
|
242 |
+
ConversationID: conversationID,
|
243 |
+
ItemType: "reply",
|
244 |
+
Data: ItemContent{Type: "text", Content: "__RENDER_BOT_WELCOME_MSG__"},
|
245 |
+
}
|
246 |
+
var items = make([]Item, 1, len(chatReq.Messages))
|
247 |
+
items[0] = defaultItem
|
248 |
+
preItemID := defaultItem.ItemID
|
249 |
+
|
250 |
+
for _, msg := range chatReq.Messages {
|
251 |
+
if msg.Role == "system" {
|
252 |
+
// monica不支持设置prompt,所以直接跳过
|
253 |
+
continue
|
254 |
+
}
|
255 |
+
var msgContext string
|
256 |
+
var imgUrl []*openai.ChatMessageImageURL
|
257 |
+
if len(msg.MultiContent) > 0 { // 说明应该是多内容,可能是图片内容
|
258 |
+
for _, content := range msg.MultiContent {
|
259 |
+
switch content.Type {
|
260 |
+
case "text":
|
261 |
+
msgContext = content.Text
|
262 |
+
case "image_url":
|
263 |
+
imgUrl = append(imgUrl, content.ImageURL)
|
264 |
+
}
|
265 |
+
}
|
266 |
+
}
|
267 |
+
itemID := fmt.Sprintf("msg:%s", uuid.New().String())
|
268 |
+
itemType := "question"
|
269 |
+
if msg.Role == "assistant" {
|
270 |
+
itemType = "reply"
|
271 |
+
}
|
272 |
+
|
273 |
+
var content ItemContent
|
274 |
+
if len(imgUrl) > 0 {
|
275 |
+
ctx := context.Background()
|
276 |
+
fileIfoList := lop.Map(imgUrl, func(item *openai.ChatMessageImageURL, _ int) FileInfo {
|
277 |
+
f, err := UploadBase64Image(ctx, item.URL)
|
278 |
+
if err != nil {
|
279 |
+
log.Println(err)
|
280 |
+
return FileInfo{}
|
281 |
+
}
|
282 |
+
return *f
|
283 |
+
})
|
284 |
+
|
285 |
+
content = ItemContent{
|
286 |
+
Type: "file_with_text",
|
287 |
+
Content: msgContext,
|
288 |
+
FileInfos: fileIfoList,
|
289 |
+
IsIncognito: true,
|
290 |
+
}
|
291 |
+
} else {
|
292 |
+
content = ItemContent{
|
293 |
+
Type: "text",
|
294 |
+
Content: msg.Content,
|
295 |
+
IsIncognito: true,
|
296 |
+
}
|
297 |
+
}
|
298 |
+
|
299 |
+
item := Item{
|
300 |
+
ConversationID: conversationID,
|
301 |
+
ItemID: itemID,
|
302 |
+
ParentItemID: preItemID,
|
303 |
+
ItemType: itemType,
|
304 |
+
Data: content,
|
305 |
+
}
|
306 |
+
items = append(items, item)
|
307 |
+
preItemID = itemID
|
308 |
+
}
|
309 |
+
|
310 |
+
// 构建请求
|
311 |
+
mReq := &MonicaRequest{
|
312 |
+
TaskUID: fmt.Sprintf("task:%s", uuid.New().String()),
|
313 |
+
BotUID: modelToBot(chatReq.Model),
|
314 |
+
Data: DataField{
|
315 |
+
ConversationID: conversationID,
|
316 |
+
Items: items,
|
317 |
+
PreParentItemID: preItemID,
|
318 |
+
TriggerBy: "auto",
|
319 |
+
IsIncognito: true,
|
320 |
+
UseModel: "", //TODO 好像写啥都没影响
|
321 |
+
UseNewMemory: false,
|
322 |
+
},
|
323 |
+
Language: "auto",
|
324 |
+
TaskType: "chat",
|
325 |
+
}
|
326 |
+
|
327 |
+
indent, err := json.MarshalIndent(mReq, "", " ")
|
328 |
+
if err != nil {
|
329 |
+
return nil, err
|
330 |
+
}
|
331 |
+
log.Printf("send: \n%s\n", indent)
|
332 |
+
|
333 |
+
return mReq, nil
|
334 |
+
}
|
335 |
+
|
336 |
+
func modelToBot(model string) string {
|
337 |
+
switch {
|
338 |
+
case strings.HasPrefix(model, "gpt-4o-mini"):
|
339 |
+
return "gpt_4_o_mini_chat"
|
340 |
+
case strings.HasPrefix(model, "gpt-4o"):
|
341 |
+
return "gpt_4_o_chat"
|
342 |
+
case strings.HasPrefix(model, "claude-3-5-sonnet"):
|
343 |
+
return "claude_3.5_sonnet"
|
344 |
+
case strings.Contains(model, "haiku"):
|
345 |
+
return "claude_3.5_haiku"
|
346 |
+
case strings.HasPrefix(model, "gemini-2"):
|
347 |
+
return "gemini_2_0"
|
348 |
+
case strings.HasPrefix(model, "gemini-1"):
|
349 |
+
return "gemini_1_5"
|
350 |
+
case strings.HasPrefix(model, "o1-mini"):
|
351 |
+
return "openai_o_1_mini"
|
352 |
+
case strings.HasPrefix(model, "o1-preview"):
|
353 |
+
return "openai_o_1"
|
354 |
+
case model == "deepseek-reasoner":
|
355 |
+
return "deepseek_reasoner"
|
356 |
+
case model == "deepseek-chat":
|
357 |
+
return "deepseek_chat"
|
358 |
+
default:
|
359 |
+
return "claude_3.5_sonnet"
|
360 |
+
}
|
361 |
+
}
|
internal/types/openai.go
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
package types
|
2 |
+
|
3 |
+
import "github.com/sashabaranov/go-openai"
|
4 |
+
|
5 |
+
type ChatCompletionStreamResponse struct {
|
6 |
+
ID string `json:"id"`
|
7 |
+
Object string `json:"object"`
|
8 |
+
Created int64 `json:"created"`
|
9 |
+
Model string `json:"model"`
|
10 |
+
Choices []ChatCompletionStreamChoice `json:"choices"`
|
11 |
+
SystemFingerprint string `json:"system_fingerprint"`
|
12 |
+
PromptAnnotations []openai.PromptAnnotation `json:"prompt_annotations,omitempty"`
|
13 |
+
PromptFilterResults []openai.PromptFilterResult `json:"prompt_filter_results,omitempty"`
|
14 |
+
Usage *openai.Usage `json:"usage,omitempty"`
|
15 |
+
}
|
16 |
+
|
17 |
+
type ChatCompletionStreamChoice struct {
|
18 |
+
Index int `json:"index"`
|
19 |
+
Delta openai.ChatCompletionStreamChoiceDelta `json:"delta"`
|
20 |
+
Logprobs *openai.ChatCompletionStreamChoiceLogprobs `json:"logprobs,omitempty"`
|
21 |
+
FinishReason openai.FinishReason `json:"finish_reason"`
|
22 |
+
}
|