Update app.py
Browse files
app.py
CHANGED
|
@@ -740,13 +740,13 @@ class ConversationManager:
|
|
| 740 |
|
| 741 |
conversation_manager = ConversationManager()
|
| 742 |
|
| 743 |
-
def chat_function(message
|
| 744 |
if not conversation_id:
|
| 745 |
conversation_id = conversation_manager.create_conversation(message[:30] + "...")
|
| 746 |
|
| 747 |
conversation = conversation_manager.get_conversation(conversation_id)
|
| 748 |
if not conversation:
|
| 749 |
-
return "Error: Conversation not found", conversation_id
|
| 750 |
|
| 751 |
conversation.add_message("user", message)
|
| 752 |
|
|
@@ -774,13 +774,16 @@ def chat_function(message: str, history: List[Tuple[str, str]], conversation_id:
|
|
| 774 |
)
|
| 775 |
|
| 776 |
conversation.add_message("assistant", response)
|
| 777 |
-
|
|
|
|
| 778 |
|
| 779 |
-
def load_conversation(
|
|
|
|
| 780 |
conversation = conversation_manager.get_conversation(conversation_id)
|
| 781 |
if not conversation:
|
| 782 |
return [], ""
|
| 783 |
-
|
|
|
|
| 784 |
|
| 785 |
# Create the Gradio interface
|
| 786 |
with gr.Blocks(theme=gr.themes.Soft()) as iface:
|
|
@@ -788,10 +791,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as iface:
|
|
| 788 |
|
| 789 |
with gr.Row():
|
| 790 |
with gr.Column(scale=1):
|
| 791 |
-
conversation_list = gr.
|
| 792 |
-
|
|
|
|
| 793 |
label="Conversations",
|
| 794 |
-
|
| 795 |
)
|
| 796 |
|
| 797 |
with gr.Column(scale=3):
|
|
@@ -817,30 +821,24 @@ with gr.Blocks(theme=gr.themes.Soft()) as iface:
|
|
| 817 |
model = gr.Dropdown(["huggingface", "groq", "mistral"], value="mistral", label="LLM Model")
|
| 818 |
use_pydf2 = gr.Checkbox(label="Use PyPDF2 for PDF scraping", value=False)
|
| 819 |
|
| 820 |
-
def
|
| 821 |
-
return
|
| 822 |
-
|
| 823 |
-
def bot(history, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2):
|
| 824 |
-
user_message = history[-1][0]
|
| 825 |
-
bot_message, new_conversation_id = chat_function(
|
| 826 |
user_message, history, conversation_id, num_results, max_chars, time_range,
|
| 827 |
language, category, engines, safesearch, method, llm_temperature, model, use_pydf2
|
| 828 |
)
|
| 829 |
-
history[-1][1] = bot_message
|
| 830 |
-
return history, new_conversation_id, conversation_manager.get_conversation_list()
|
| 831 |
|
| 832 |
-
send.click(
|
| 833 |
bot,
|
| 834 |
-
[chatbot, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2],
|
| 835 |
[chatbot, conversation_id, conversation_list]
|
| 836 |
)
|
| 837 |
|
| 838 |
-
msg.submit(
|
| 839 |
bot,
|
| 840 |
-
[chatbot, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2],
|
| 841 |
[chatbot, conversation_id, conversation_list]
|
| 842 |
)
|
| 843 |
|
| 844 |
-
conversation_list.select(load_conversation,
|
| 845 |
|
| 846 |
iface.launch(share=True)
|
|
|
|
| 740 |
|
| 741 |
conversation_manager = ConversationManager()
|
| 742 |
|
| 743 |
+
def chat_function(message, history, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2):
|
| 744 |
if not conversation_id:
|
| 745 |
conversation_id = conversation_manager.create_conversation(message[:30] + "...")
|
| 746 |
|
| 747 |
conversation = conversation_manager.get_conversation(conversation_id)
|
| 748 |
if not conversation:
|
| 749 |
+
return "Error: Conversation not found", conversation_id, conversation_manager.get_conversation_list()
|
| 750 |
|
| 751 |
conversation.add_message("user", message)
|
| 752 |
|
|
|
|
| 774 |
)
|
| 775 |
|
| 776 |
conversation.add_message("assistant", response)
|
| 777 |
+
history.append((message, response))
|
| 778 |
+
return history, conversation_id, conversation_manager.get_conversation_list()
|
| 779 |
|
| 780 |
+
def load_conversation(evt: gr.SelectData):
|
| 781 |
+
conversation_id = evt.value['id']
|
| 782 |
conversation = conversation_manager.get_conversation(conversation_id)
|
| 783 |
if not conversation:
|
| 784 |
return [], ""
|
| 785 |
+
history = [(msg["content"], msg["content"]) if msg["role"] == "user" else (None, msg["content"]) for msg in conversation.messages]
|
| 786 |
+
return history, conversation.title, conversation_id
|
| 787 |
|
| 788 |
# Create the Gradio interface
|
| 789 |
with gr.Blocks(theme=gr.themes.Soft()) as iface:
|
|
|
|
| 791 |
|
| 792 |
with gr.Row():
|
| 793 |
with gr.Column(scale=1):
|
| 794 |
+
conversation_list = gr.Dataframe(
|
| 795 |
+
headers=["id", "title"],
|
| 796 |
+
datatype=["str", "str"],
|
| 797 |
label="Conversations",
|
| 798 |
+
interactive=False
|
| 799 |
)
|
| 800 |
|
| 801 |
with gr.Column(scale=3):
|
|
|
|
| 821 |
model = gr.Dropdown(["huggingface", "groq", "mistral"], value="mistral", label="LLM Model")
|
| 822 |
use_pydf2 = gr.Checkbox(label="Use PyPDF2 for PDF scraping", value=False)
|
| 823 |
|
| 824 |
+
def bot(user_message, history, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2):
|
| 825 |
+
return chat_function(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 826 |
user_message, history, conversation_id, num_results, max_chars, time_range,
|
| 827 |
language, category, engines, safesearch, method, llm_temperature, model, use_pydf2
|
| 828 |
)
|
|
|
|
|
|
|
| 829 |
|
| 830 |
+
send.click(
|
| 831 |
bot,
|
| 832 |
+
[msg, chatbot, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2],
|
| 833 |
[chatbot, conversation_id, conversation_list]
|
| 834 |
)
|
| 835 |
|
| 836 |
+
msg.submit(
|
| 837 |
bot,
|
| 838 |
+
[msg, chatbot, conversation_id, num_results, max_chars, time_range, language, category, engines, safesearch, method, llm_temperature, model, use_pydf2],
|
| 839 |
[chatbot, conversation_id, conversation_list]
|
| 840 |
)
|
| 841 |
|
| 842 |
+
conversation_list.select(load_conversation, None, [chatbot, msg, conversation_id])
|
| 843 |
|
| 844 |
iface.launch(share=True)
|