Daemontatox's picture
Update app.py
736da61 verified
raw
history blame
4.54 kB
import os
from dotenv import load_dotenv
from langchain_community.vectorstores import Qdrant
from langchain_huggingface import HuggingFaceEmbeddings
from langchain.prompts import ChatPromptTemplate
from langchain.schema.runnable import RunnablePassthrough
from langchain.schema.output_parser import StrOutputParser
from qdrant_client import QdrantClient, models
from langchain_openai import ChatOpenAI
import gradio as gr
import logging
from typing import List, Tuple
from dataclasses import dataclass
from datetime import datetime
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from langchain_huggingface.llms import HuggingFacePipeline
import spaces
# [Previous imports and configurations remain the same]
# Modified Gradio Interface
with gr.Blocks(theme=gr.themes.Soft()) as iface:
gr.Image("Image.jpg", width=1200, height=300, show_label=False, show_download_button=False)
gr.Markdown("# Mawared HR Assistant")
gr.Markdown("Ask questions about the Mawared HR system, and this assistant will provide answers based on the available context and conversation history.")
# Create a state to store the latest assistant response
latest_response = gr.State("")
with gr.Row():
chatbot = gr.Chatbot(
height=400,
show_label=False,
type="messages"
)
with gr.Row():
# Add copy button next to the response
copy_button = gr.Button("πŸ“‹ Copy Last Response", visible=True)
with gr.Row():
question_input = gr.Textbox(
label="Ask a question:",
placeholder="Type your question here...",
scale=25
)
clear_button = gr.Button("Clear Chat", scale=1)
def copy_last_response(history):
if history:
# Find the last assistant message
for message in reversed(history):
if message["role"] == "assistant":
return message["content"]
return ""
# Modified ask_question_gradio function to update the latest response
def ask_question_gradio(question, history):
try:
# Add user question to chat history
chat_history.add_message("user", question)
# Get formatted history
formatted_history = chat_history.get_formatted_history()
# Create chain with current chat history
rag_chain = create_rag_chain(formatted_history)
# Generate response
response = ""
for chunk in rag_chain.stream(question):
response += chunk
# Add assistant response to chat history
chat_history.add_message("assistant", response)
# Update Gradio chat history
history.append({"role": "user", "content": question})
history.append({"role": "assistant", "content": response})
return "", history
except Exception as e:
logger.error(f"Error during question processing: {e}")
return "", history + [{"role": "assistant", "content": "An error occurred. Please try again later."}]
def clear_chat():
chat_history.clear()
return [], ""
# Connect the components
question_input.submit(
ask_question_gradio,
inputs=[question_input, chatbot],
outputs=[question_input, chatbot]
)
clear_button.click(
clear_chat,
outputs=[chatbot, question_input]
)
# Add copy button functionality
copy_button.click(
copy_last_response,
inputs=[chatbot],
outputs=[],
_js="""
async (response) => {
await navigator.clipboard.writeText(response);
// Optional: Show a toast notification
const toast = document.createElement('div');
toast.textContent = 'Response copied to clipboard!';
toast.style.position = 'fixed';
toast.style.bottom = '20px';
toast.style.right = '20px';
toast.style.backgroundColor = '#4CAF50';
toast.style.color = 'white';
toast.style.padding = '15px';
toast.style.borderRadius = '5px';
toast.style.zIndex = '1000';
document.body.appendChild(toast);
setTimeout(() => toast.remove(), 2000);
}
"""
)
# Launch the Gradio App
if __name__ == "__main__":
iface.launch()