Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
ba32a82
1
Parent(s):
8beda5f
chore: 函数名称修改
Browse files- ChuanhuChatbot.py +1 -1
- modules/models/base_model.py +1 -1
- modules/utils.py +2 -2
ChuanhuChatbot.py
CHANGED
|
@@ -641,7 +641,7 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
|
| 641 |
|
| 642 |
# S&L
|
| 643 |
renameHistoryBtn.click(
|
| 644 |
-
|
| 645 |
[current_model, saveFileName, chatbot, user_name],
|
| 646 |
[historySelectList],
|
| 647 |
show_progress=True,
|
|
|
|
| 641 |
|
| 642 |
# S&L
|
| 643 |
renameHistoryBtn.click(
|
| 644 |
+
rename_chat_history,
|
| 645 |
[current_model, saveFileName, chatbot, user_name],
|
| 646 |
[historySelectList],
|
| 647 |
show_progress=True,
|
modules/models/base_model.py
CHANGED
|
@@ -661,7 +661,7 @@ class BaseLLMModel:
|
|
| 661 |
token_sum += sum(token_lst[: i + 1])
|
| 662 |
return i18n("Token 计数: ") + f"{sum(token_lst)}" + i18n(",本次对话累计消耗了 ") + f"{token_sum} tokens"
|
| 663 |
|
| 664 |
-
def
|
| 665 |
if filename == "":
|
| 666 |
return gr.update(), gr.update()
|
| 667 |
if not filename.endswith(".json"):
|
|
|
|
| 661 |
token_sum += sum(token_lst[: i + 1])
|
| 662 |
return i18n("Token 计数: ") + f"{sum(token_lst)}" + i18n(",本次对话累计消耗了 ") + f"{token_sum} tokens"
|
| 663 |
|
| 664 |
+
def rename_chat_history(self, filename, chatbot, user_name):
|
| 665 |
if filename == "":
|
| 666 |
return gr.update(), gr.update()
|
| 667 |
if not filename.endswith(".json"):
|
modules/utils.py
CHANGED
|
@@ -68,8 +68,8 @@ def delete_last_conversation(current_model, *args):
|
|
| 68 |
def set_system_prompt(current_model, *args):
|
| 69 |
return current_model.set_system_prompt(*args)
|
| 70 |
|
| 71 |
-
def
|
| 72 |
-
return current_model.
|
| 73 |
|
| 74 |
def export_markdown(current_model, *args):
|
| 75 |
return current_model.export_markdown(*args)
|
|
|
|
| 68 |
def set_system_prompt(current_model, *args):
|
| 69 |
return current_model.set_system_prompt(*args)
|
| 70 |
|
| 71 |
+
def rename_chat_history(current_model, *args):
|
| 72 |
+
return current_model.rename_chat_history(*args)
|
| 73 |
|
| 74 |
def export_markdown(current_model, *args):
|
| 75 |
return current_model.export_markdown(*args)
|