Kevin Hu commited on
Commit
7d04573
·
1 Parent(s): f586a68

fix minimax bug (#1528)

Browse files

### What problem does this PR solve?

#1353

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

Files changed (2) hide show
  1. api/db/init_data.py +7 -12
  2. rag/llm/__init__.py +1 -0
api/db/init_data.py CHANGED
@@ -573,42 +573,35 @@ def init_llm_factory():
573
  # ------------------------ Minimax -----------------------
574
  {
575
  "fid": factory_infos[13]["name"],
576
- "llm_name": "abab6.5-chat",
577
  "tags": "LLM,CHAT,8k",
578
  "max_tokens": 8192,
579
  "model_type": LLMType.CHAT.value
580
  },
581
  {
582
  "fid": factory_infos[13]["name"],
583
- "llm_name": "abab6.5s-chat",
584
  "tags": "LLM,CHAT,245k",
585
  "max_tokens": 245760,
586
  "model_type": LLMType.CHAT.value
587
  },
588
  {
589
  "fid": factory_infos[13]["name"],
590
- "llm_name": "abab6.5t-chat",
591
  "tags": "LLM,CHAT,8k",
592
  "max_tokens": 8192,
593
  "model_type": LLMType.CHAT.value
594
  },
595
  {
596
  "fid": factory_infos[13]["name"],
597
- "llm_name": "abab6.5g-chat",
598
  "tags": "LLM,CHAT,8k",
599
  "max_tokens": 8192,
600
  "model_type": LLMType.CHAT.value
601
  },
602
  {
603
  "fid": factory_infos[13]["name"],
604
- "llm_name": "abab5.5-chat",
605
- "tags": "LLM,CHAT,16k",
606
- "max_tokens": 16384,
607
- "model_type": LLMType.CHAT.value
608
- },
609
- {
610
- "fid": factory_infos[13]["name"],
611
- "llm_name": "abab5.5s-chat",
612
  "tags": "LLM,CHAT,8k",
613
  "max_tokens": 8192,
614
  "model_type": LLMType.CHAT.value
@@ -987,6 +980,8 @@ def init_llm_factory():
987
  LLMFactoriesService.save(**info)
988
  except Exception as e:
989
  pass
 
 
990
  for info in llm_infos:
991
  try:
992
  LLMService.save(**info)
 
573
  # ------------------------ Minimax -----------------------
574
  {
575
  "fid": factory_infos[13]["name"],
576
+ "llm_name": "abab6.5",
577
  "tags": "LLM,CHAT,8k",
578
  "max_tokens": 8192,
579
  "model_type": LLMType.CHAT.value
580
  },
581
  {
582
  "fid": factory_infos[13]["name"],
583
+ "llm_name": "abab6.5s",
584
  "tags": "LLM,CHAT,245k",
585
  "max_tokens": 245760,
586
  "model_type": LLMType.CHAT.value
587
  },
588
  {
589
  "fid": factory_infos[13]["name"],
590
+ "llm_name": "abab6.5t",
591
  "tags": "LLM,CHAT,8k",
592
  "max_tokens": 8192,
593
  "model_type": LLMType.CHAT.value
594
  },
595
  {
596
  "fid": factory_infos[13]["name"],
597
+ "llm_name": "abab6.5g",
598
  "tags": "LLM,CHAT,8k",
599
  "max_tokens": 8192,
600
  "model_type": LLMType.CHAT.value
601
  },
602
  {
603
  "fid": factory_infos[13]["name"],
604
+ "llm_name": "abab5.5s",
 
 
 
 
 
 
 
605
  "tags": "LLM,CHAT,8k",
606
  "max_tokens": 8192,
607
  "model_type": LLMType.CHAT.value
 
980
  LLMFactoriesService.save(**info)
981
  except Exception as e:
982
  pass
983
+
984
+ LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
985
  for info in llm_infos:
986
  try:
987
  LLMService.save(**info)
rag/llm/__init__.py CHANGED
@@ -61,6 +61,7 @@ ChatModel = {
61
  "VolcEngine": VolcEngineChat,
62
  "BaiChuan": BaiChuanChat,
63
  "MiniMax": MiniMaxChat,
 
64
  "Mistral": MistralChat,
65
  'Gemini' : GeminiChat,
66
  "Bedrock": BedrockChat,
 
61
  "VolcEngine": VolcEngineChat,
62
  "BaiChuan": BaiChuanChat,
63
  "MiniMax": MiniMaxChat,
64
+ "Minimax": MiniMaxChat,
65
  "Mistral": MistralChat,
66
  'Gemini' : GeminiChat,
67
  "Bedrock": BedrockChat,