File size: 11,406 Bytes
9231019
 
 
 
dac1960
9231019
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dac1960
9231019
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f6c3932
 
9231019
 
f6c3932
 
 
 
9231019
 
 
f6c3932
 
9231019
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dac1960
 
 
 
 
9231019
 
 
 
 
 
 
 
 
 
f6c3932
 
 
 
9231019
 
f6c3932
9231019
f6c3932
 
 
9231019
 
 
 
 
f6c3932
 
9231019
 
 
 
f6c3932
 
 
 
9231019
f6c3932
9231019
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
package types

import (
	"context"

	"fmt"
	"log"
	"strings"

	lop "github.com/samber/lo/parallel"

	"github.com/google/uuid"
	"github.com/sashabaranov/go-openai"
)

//const (
//	MonicaModelGPT4o        = "gpt-4o"
//	MonicaModelGPT4oMini    = "gpt-4o-mini"
//	MonicaModelClaudeSonnet = "claude-3"
//	MonicaModelClaudeHaiku  = "claude-3.5-haiku"
//	MonicaModelGemini2      = "gemini_2_0"
//	MonicaModelO1Preview    = "openai_o_1"
//	MonicaModelO1Mini       = "openai-o-1-mini"
//)

const (
	BotChatURL    = "https://api.monica.im/api/custom_bot/chat"
	PreSignURL    = "https://api.monica.im/api/file_object/pre_sign_list_by_module"
	FileUploadURL = "https://api.monica.im/api/files/batch_create_llm_file"
	FileGetURL    = "https://api.monica.im/api/files/batch_get_file"
)

// 图片相关常量
const (
	MaxImageSize  = 10 * 1024 * 1024 // 10MB
	ImageModule   = "chat_bot"
	ImageLocation = "files"
)

// 支持的图片格式
var SupportedImageTypes = map[string]bool{
	"image/jpeg": true,
	"image/png":  true,
	"image/gif":  true,
	"image/webp": true,
}

type ChatGPTRequest struct {
	Model    string        `json:"model"`    // gpt-3.5-turbo, gpt-4, ...
	Messages []ChatMessage `json:"messages"` // 对话数组
	Stream   bool          `json:"stream"`   // 是否流式返回
}

type ChatMessage struct {
	Role    string      `json:"role"`    // "system", "user", "assistant"
	Content interface{} `json:"content"` // 可以是字符串或MessageContent数组
}

// MessageContent 消息内容
type MessageContent struct {
	Type     string `json:"type"`                // "text" 或 "image_url"
	Text     string `json:"text,omitempty"`      // 文本内容
	ImageURL string `json:"image_url,omitempty"` // 图片URL
}

// MonicaRequest 为 Monica 自定义 AI 的请求格式

type MonicaRequest struct {
	TaskUID  string    `json:"task_uid"`
	BotUID   string    `json:"bot_uid"`
	Data     DataField `json:"data"`
	Language string    `json:"language"`
	TaskType string    `json:"task_type"`
	ToolData ToolData  `json:"tool_data"`
}

// DataField 在 Monica 的 body 中
type DataField struct {
	ConversationID  string `json:"conversation_id"`
	PreParentItemID string `json:"pre_parent_item_id"`
	Items           []Item `json:"items"`
	TriggerBy       string `json:"trigger_by"`
	UseModel        string `json:"use_model,omitempty"`
	IsIncognito     bool   `json:"is_incognito"`
	UseNewMemory    bool   `json:"use_new_memory"`
}

type Item struct {
	ConversationID string      `json:"conversation_id"`
	ParentItemID   string      `json:"parent_item_id,omitempty"`
	ItemID         string      `json:"item_id"`
	ItemType       string      `json:"item_type"`
	Data           ItemContent `json:"data"`
}

type ItemContent struct {
	Type                   string     `json:"type"`
	Content                string     `json:"content"`
	MaxToken               int        `json:"max_token,omitempty"`
	IsIncognito            bool       `json:"is_incognito,omitempty"` // 是否无痕模式
	FromTaskType           string     `json:"from_task_type,omitempty"`
	ManualWebSearchEnabled bool       `json:"manual_web_search_enabled,omitempty"` // 网页搜索
	UseModel               string     `json:"use_model,omitempty"`
	FileInfos              []FileInfo `json:"file_infos,omitempty"`
}

// ToolData 这里演示放空
type ToolData struct {
	SysSkillList []string `json:"sys_skill_list"`
}

// PreSignRequest 预签名请求
type PreSignRequest struct {
	FilenameList []string `json:"filename_list"`
	Module       string   `json:"module"`
	Location     string   `json:"location"`
	ObjID        string   `json:"obj_id"`
}

// PreSignResponse 预签名响应
type PreSignResponse struct {
	Code int    `json:"code"`
	Msg  string `json:"msg"`
	Data struct {
		PreSignURLList []string `json:"pre_sign_url_list"`
		ObjectURLList  []string `json:"object_url_list"`
		CDNURLList     []string `json:"cdn_url_list"`
	} `json:"data"`
}

// FileInfo 文件信息
type FileInfo struct {
	URL        string `json:"url,omitempty"`
	FileURL    string `json:"file_url"`
	FileUID    string `json:"file_uid"`
	Parse      bool   `json:"parse"`
	FileName   string `json:"file_name"`
	FileSize   int64  `json:"file_size"`
	FileType   string `json:"file_type"`
	FileExt    string `json:"file_ext"`
	FileTokens int64  `json:"file_tokens"`
	FileChunks int64  `json:"file_chunks"`
	ObjectURL  string `json:"object_url,omitempty"`
	//Embedding    bool                   `json:"embedding"`
	FileMetaInfo map[string]interface{} `json:"file_meta_info,omitempty"`
	UseFullText  bool                   `json:"use_full_text"`
}

// FileUploadRequest 文件上传请求
type FileUploadRequest struct {
	Data []FileInfo `json:"data"`
}

// FileUploadResponse 文件上传响应
type FileUploadResponse struct {
	Code int    `json:"code"`
	Msg  string `json:"msg"`
	Data struct {
		Items []struct {
			FileName   string `json:"file_name"`
			FileType   string `json:"file_type"`
			FileSize   int64  `json:"file_size"`
			FileUID    string `json:"file_uid"`
			FileTokens int64  `json:"file_tokens"`
			FileChunks int64  `json:"file_chunks"`
			// 其他字段暂时不需要
		} `json:"items"`
	} `json:"data"`
}

// FileBatchGetResponse 获取文件llm处理是否完成
type FileBatchGetResponse struct {
	Data struct {
		Items []struct {
			FileName     string `json:"file_name"`
			FileType     string `json:"file_type"`
			FileSize     int    `json:"file_size"`
			ObjectUrl    string `json:"object_url"`
			Url          string `json:"url"`
			FileMetaInfo struct {
			} `json:"file_meta_info"`
			DriveFileUid  string `json:"drive_file_uid"`
			FileUid       string `json:"file_uid"`
			IndexState    int    `json:"index_state"`
			IndexDesc     string `json:"index_desc"`
			ErrorMessage  string `json:"error_message"`
			FileTokens    int64  `json:"file_tokens"`
			FileChunks    int64  `json:"file_chunks"`
			IndexProgress int    `json:"index_progress"`
		} `json:"items"`
	} `json:"data"`
}

// OpenAIModel represents a model in the OpenAI API format
type OpenAIModel struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	OwnedBy string `json:"owned_by"`
}

// OpenAIModelList represents the response format for the /v1/models endpoint
type OpenAIModelList struct {
	Object string        `json:"object"`
	Data   []OpenAIModel `json:"data"`
}

// GetSupportedModels returns all supported models in OpenAI format
func GetSupportedModels() OpenAIModelList {
	models := []OpenAIModel{
		{ID: "gpt-4o-mini", Object: "model", OwnedBy: "monica"},
		{ID: "gpt-4o", Object: "model", OwnedBy: "monica"},
		{ID: "claude-3-7-sonnet", Object: "model", OwnedBy: "monica"},
		{ID: "claude-3-7-sonnet-thinking", Object: "model", OwnedBy: "monica"},
		{ID: "claude-3-5-sonnet", Object: "model", OwnedBy: "monica"},
		{ID: "claude-3-5-haiku", Object: "model", OwnedBy: "monica"},
		{ID: "gemini-2.0-pro", Object: "model", OwnedBy: "monica"},
		{ID: "gemini-2.0-flash", Object: "model", OwnedBy: "monica"},
		{ID: "gemini-1.5-pro", Object: "model", OwnedBy: "monica"},
		{ID: "o3-mini", Object: "model", OwnedBy: "monica"},
		{ID: "o1-preview", Object: "model", OwnedBy: "monica"},
		{ID: "deepseek-reasoner", Object: "model", OwnedBy: "monica"},
		{ID: "deepseek-chat", Object: "model", OwnedBy: "monica"},
		{ID: "deepclaude", Object: "model", OwnedBy: "monica"},
		{ID: "sonar", Object: "model", OwnedBy: "monica"},
	}

	return OpenAIModelList{
		Object: "list",
		Data:   models,
	}
}

// ChatGPTToMonica 将 ChatGPTRequest 转换为 MonicaRequest
func ChatGPTToMonica(chatReq openai.ChatCompletionRequest) (*MonicaRequest, error) {
	if len(chatReq.Messages) == 0 {
		return nil, fmt.Errorf("empty messages")
	}

	// 生成会话ID
	conversationID := fmt.Sprintf("conv:%s", uuid.New().String())

	// 转换消息

	// 设置默认欢迎消息头,不加上就有几率去掉问题最后的十几个token,不清楚是不是bug
	defaultItem := Item{
		ItemID:         fmt.Sprintf("msg:%s", uuid.New().String()),
		ConversationID: conversationID,
		ItemType:       "reply",
		Data:           ItemContent{Type: "text", Content: "__RENDER_BOT_WELCOME_MSG__"},
	}
	var items = make([]Item, 1, len(chatReq.Messages))
	items[0] = defaultItem
	preItemID := defaultItem.ItemID

	for _, msg := range chatReq.Messages {
		if msg.Role == "system" {
			// monica不支持设置prompt,所以直接跳过
			continue
		}
		var msgContext string
		var imgUrl []*openai.ChatMessageImageURL
		if len(msg.MultiContent) > 0 { // 说明应该是多内容,可能是图片内容
			for _, content := range msg.MultiContent {
				switch content.Type {
				case "text":
					msgContext = content.Text
				case "image_url":
					imgUrl = append(imgUrl, content.ImageURL)
				}
			}
		}
		itemID := fmt.Sprintf("msg:%s", uuid.New().String())
		itemType := "question"
		if msg.Role == "assistant" {
			itemType = "reply"
		}

		var content ItemContent
		if len(imgUrl) > 0 {
			ctx := context.Background()
			fileIfoList := lop.Map(imgUrl, func(item *openai.ChatMessageImageURL, _ int) FileInfo {
				f, err := UploadBase64Image(ctx, item.URL)
				if err != nil {
					log.Println(err)
					return FileInfo{}
				}
				return *f
			})

			content = ItemContent{
				Type:        "file_with_text",
				Content:     msgContext,
				FileInfos:   fileIfoList,
				IsIncognito: true,
			}
		} else {
			content = ItemContent{
				Type:        "text",
				Content:     msg.Content,
				IsIncognito: true,
			}
		}

		item := Item{
			ConversationID: conversationID,
			ItemID:         itemID,
			ParentItemID:   preItemID,
			ItemType:       itemType,
			Data:           content,
		}
		items = append(items, item)
		preItemID = itemID
	}

	// 构建请求
	mReq := &MonicaRequest{
		TaskUID: fmt.Sprintf("task:%s", uuid.New().String()),
		BotUID:  modelToBot(chatReq.Model),
		Data: DataField{
			ConversationID:  conversationID,
			Items:           items,
			PreParentItemID: preItemID,
			TriggerBy:       "auto",
			IsIncognito:     true,
			UseModel:        "", //TODO 好像写啥都没影响
			UseNewMemory:    false,
		},
		Language: "auto",
		TaskType: "chat",
	}

	// indent, err := json.MarshalIndent(mReq, "", "  ")
	// if err != nil {
	// 	return nil, err
	// }
	// log.Printf("send: \n%s\n", indent)

	return mReq, nil
}

func modelToBot(model string) string {
	switch {
	case strings.HasPrefix(model, "gpt-4o-mini"):
		return "gpt_4_o_mini_chat"
	case strings.HasPrefix(model, "gpt-4o"):
		return "gpt_4_o_chat"
	case strings.HasPrefix(model, "claude-3-7-sonnet-thinking"):
		return "claude_3_7_sonnet_think"
	case strings.HasPrefix(model, "claude-3-7-sonnet"):
		return "claude_3_7_sonnet"
	case strings.HasPrefix(model, "claude-3-5-sonnet"):
		return "claude_3.5_sonnet"
	case strings.HasPrefix(model, "claude-3-5-haiku"):
		return "claude_3.5_haiku"
	case strings.HasPrefix(model, "gemini-2.0-pro"):
		return "gemini_2_0_pro"
	case strings.HasPrefix(model, "gemini-2.0-flash"):
		return "gemini_2_0"
	case strings.HasPrefix(model, "gemini-1"):
		return "gemini_1_5"
	case strings.HasPrefix(model, "o1-preview"):
		return "openai_o_1"
	case strings.HasPrefix(model, "o3-mini"):
		return "openai_o_3_mini"
	case model == "deepseek-reasoner":
		return "deepseek_reasoner"
	case model == "deepseek-chat":
		return "deepseek_chat"
	case model == "deepclaude":
		return "deepclaude"
	case model == "sonar":
		return "sonar"
	default:
		return model
	}
}