File size: 7,247 Bytes
f5d22a4
 
aab5640
e3a5dff
aab5640
 
 
 
 
 
6f2843a
f5d22a4
fdb6484
aab5640
 
f5d22a4
aab5640
f5d22a4
aab5640
e3a5dff
 
 
 
 
aab5640
 
 
 
fdb6484
aab5640
3b24cdb
 
 
 
 
 
f5d22a4
 
3b24cdb
 
 
 
e3a5dff
aab5640
 
 
 
 
fdb6484
aab5640
3b24cdb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e3a5dff
aab5640
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f5d22a4
aab5640
 
 
 
e3a5dff
 
 
 
f5d22a4
e3a5dff
 
 
 
 
 
aab5640
 
 
 
 
 
f5d22a4
e3a5dff
 
 
 
 
 
 
 
 
 
 
aab5640
 
 
 
 
 
 
 
 
 
 
f5d22a4
aab5640
 
 
e3a5dff
 
 
f5d22a4
aab5640
 
 
 
 
 
e3a5dff
 
 
 
 
 
f5d22a4
3b24cdb
 
f5d22a4
e3a5dff
aab5640
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
# Import Gradio for UI, along with other necessary libraries
import gradio as gr
from fastapi import FastAPI
from fastapi import FastAPI
from rag_app.agents.react_agent import agent_executor, llm
from rag_app.chains import user_response_sentiment_prompt
from typing import Dict
import re
from rag_app.utils.utils import extract_responses
from rag_app.loading_data.load_S3_vector_stores import get_chroma_vs
from rag_app.agents.react_agent import agent_executor
# need to import the qa!

app = FastAPI()
get_chroma_vs()

if __name__ == "__main__":

    # Function to add a new input to the chat history
    def add_text(history, text):
        # Append the new text to the history with a placeholder for the response
        history = history + [(text, None)]
        return history, ""
    # Function to add a new input to the chat history
    def add_text(history, text):
        # Append the new text to the history with a placeholder for the response
        history = history + [(text, None)]
        return history, ""

    # Function representing the bot's response mechanism
    def bot(history):
        # Obtain the response from the 'infer' function using the latest input
        response = infer(history[-1][0], history)
        #sources = [doc.metadata.get("source") for doc in response['source_documents']]
        #src_list = '\n'.join(sources)
        #print_this = response['result'] + "\n\n\n Sources: \n\n\n" + src_list


        #history[-1][1] = print_this #response['answer']
        # Update the history with the bot's response
        history[-1][1] = response['output']
        return history
    # Function representing the bot's response mechanism
    def bot(history):
        # Obtain the response from the 'infer' function using the latest input
        response = infer(history[-1][0], history)
        history[-1][1] = response['output']
        return history

    # Function to infer the response using the RAG model
    def infer(question, history):
        # Use the question and history to query the RAG model
        #result = qa({"query": question, "history": history, "question": question})
        try:
            result = agent_executor.invoke(
                {
                    "input": question,
                    "chat_history": history
                }
            )
            return result
        except Exception:
            raise gr.Error("Model is Overloaded, Please retry later!")
        
    def vote(data: gr.LikeData):
        if data.liked:
            print("You upvoted this response: " + data.value)
        else:
            print("You downvoted this response: " + data.value)
    # Function to infer the response using the RAG model
    def infer(question, history):
        # Use the question and history to query the RAG model
        #result = qa({"query": question, "history": history, "question": question})
        # try:
        #     data = user_sentiment_chain.invoke({"user_reponse":question})
        #     responses = extract_responses(data)
        #     if responses['AI'] == "1":
        #         pass
        #         # Do important stuff here plox
        #         # store into database
        # except Exception as e:
        #     raise e
        try:
            result = agent_executor.invoke(
                {
                    "input": question,
                    "chat_history": history
                }
            )
            return result
        except Exception as e:
            # raise gr.Error("Model is Overloaded, Please retry later!")
            raise e
        
    def vote(data: gr.LikeData):
        if data.liked:
            print("You upvoted this response: ")
        else:
            print("You downvoted this response: ")

    # CSS styling for the Gradio interface
    css = """
    #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
    """
    # CSS styling for the Gradio interface
    css = """
    #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
    """

    # HTML content for the Gradio interface title
    title = """
    <div style="text-align:left;">
        <p>Hello, I BotTina 2.0, your intelligent AI assistant. I can help you explore Wuerttembergische Versicherungs products.<br />
    </div>
    """
    # HTML content for the Gradio interface title
    title = """
    <div style="text-align:left;">
        <p>Hello, I BotTina 2.0, your intelligent AI assistant. I can help you explore Wuerttembergische Versicherungs products.<br />
    </div>
    """

    # Building the Gradio interface
    with gr.Blocks(theme=gr.themes.Soft()) as demo:
        with gr.Column(elem_id="col-container"):
            gr.HTML(title)  # Add the HTML title to the interface
            chatbot = gr.Chatbot([], elem_id="chatbot",
                                        label="BotTina 2.0",
                                        bubble_full_width=False,
                                        avatar_images=(None, "https://dacodi-production.s3.amazonaws.com/store/87bc00b6727589462954f2e3ff6f531c.png"),
                                        height=680,)  # Initialize the chatbot component
            chatbot.like(vote, None, None)
            clear = gr.Button("Clear")  # Add a button to clear the chat
    # Building the Gradio interface
    with gr.Blocks(theme=gr.themes.Soft()) as demo:
        with gr.Column(elem_id="col-container"):
            gr.HTML(title)  # Add the HTML title to the interface
            chatbot = gr.Chatbot([], elem_id="chatbot",
                                        label="BotTina 2.0",
                                        bubble_full_width=False,
                                        avatar_images=(None, "https://dacodi-production.s3.amazonaws.com/store/87bc00b6727589462954f2e3ff6f531c.png"),
                                        height=680,)  # Initialize the chatbot component
            chatbot.like(vote, None, None)
            clear = gr.Button("Clear")  # Add a button to clear the chat

            # Create a row for the question input
            with gr.Row():
                question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")
            # Create a row for the question input
            with gr.Row():
                question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")

        # Define the action when the question is submitted
        question.submit(add_text, [chatbot, question], [chatbot, question], queue=False).then(
            bot, chatbot, chatbot
        )
        # Define the action for the clear button
        clear.click(lambda: None, None, chatbot, queue=False)
        # Define the action when the question is submitted
        question.submit(add_text, [chatbot, question], [chatbot, question], queue=False).then(
            bot, chatbot, chatbot
        )
        # Define the action for the clear button
        clear.click(lambda: None, None, chatbot, queue=False)

    # Launch the Gradio demo interface
    demo.queue().launch(share=False, debug=True)

    app = gr.mount_gradio_app(app, demo, path="/")
    # Launch the Gradio demo interface
    demo.queue().launch(share=False, debug=True)