File size: 9,103 Bytes
d4e00f6
2adad16
2329051
 
d4e00f6
2adad16
77b60b2
 
7d58ec4
 
 
 
 
2adad16
7d58ec4
2329051
2adad16
d4e00f6
2adad16
 
2329051
d4e00f6
2adad16
36d83b1
2329051
7d58ec4
d4e00f6
67a06c2
2adad16
 
a6c9658
2adad16
 
 
 
 
 
 
 
 
60cb3bc
 
 
 
 
eb0eca8
2adad16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67a06c2
7d58ec4
 
2329051
7d58ec4
 
2329051
 
 
 
7d58ec4
 
 
2329051
d4e00f6
2329051
 
 
 
 
 
 
 
d4e00f6
7d58ec4
d4e00f6
 
a6c9658
 
7d58ec4
 
d4e00f6
7d58ec4
2adad16
 
 
d4e00f6
 
 
 
7d58ec4
d4e00f6
 
a6c9658
 
 
 
 
 
 
 
 
 
 
d4e00f6
 
a6c9658
d4e00f6
7d58ec4
d4e00f6
 
 
a6c9658
 
2adad16
7d58ec4
 
d4e00f6
 
 
 
7d58ec4
 
77b60b2
d4e00f6
2adad16
 
d4e00f6
 
 
 
a6c9658
d4e00f6
 
 
 
7d58ec4
d4e00f6
65f8bda
d4e00f6
77b60b2
2389189
 
7d58ec4
65f8bda
 
77b60b2
7d58ec4
d4e00f6
 
2adad16
d4e00f6
31abf01
7d58ec4
d4e00f6
 
a6c9658
7d58ec4
d4e00f6
77b60b2
 
 
d4e00f6
 
2adad16
d4e00f6
 
77b60b2
 
7d58ec4
d4e00f6
67a06c2
d4e00f6
a6c9658
2adad16
 
7d58ec4
 
d4e00f6
67a06c2
2329051
7d58ec4
02585f2
67a06c2
 
02585f2
2329051
 
 
77b60b2
 
 
 
 
 
 
 
 
 
 
 
 
 
177f0a1
 
 
77b60b2
177f0a1
 
 
 
77b60b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60cb3bc
77b60b2
 
 
 
 
 
 
 
 
e43e145
 
2adad16
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
import glob
from venv import create
import gradio as gr
from typing import Any
from dotenv import load_dotenv
import requests
from griptape.structures import Agent, Structure, Workflow
from griptape.tasks import PromptTask, StructureRunTask
from griptape.drivers import (
    LocalConversationMemoryDriver,
    GriptapeCloudStructureRunDriver,
    LocalFileManagerDriver,
    LocalStructureRunDriver,
    GriptapeCloudConversationMemoryDriver,
)
from griptape.memory.structure import ConversationMemory
from griptape.tools import StructureRunTool, FileManagerTool
from griptape.rules import Rule, Ruleset
from griptape.configs.drivers import AnthropicDriversConfig
from griptape.configs import Defaults
import time
import os
from urllib.parse import urljoin


# Load environment variables
load_dotenv()

Defaults.drivers_config = AnthropicDriversConfig()


base_url = "https://cloud.griptape.ai"

headers_api = {
    "Authorization": f"Bearer {os.environ['GT_CLOUD_API_KEY']}",
    "Content-Type": "application/json",
}

threads = {}

# custom_css = """
# #component-2 {
#     height: 75vh !important;
#     min-height: 600px !important;
# """


def create_thread_id(session_id: str) -> str:
    if not session_id in threads:
        params = {
            "name": session_id,
            "messages": [],
        }
        response = requests.post(
            url=urljoin(base_url, "/api/threads"), headers=headers_api, json=params
        )
        response.raise_for_status()
        thread_id = response.json()["thread_id"]
        threads[session_id] = thread_id
        return thread_id
    else:
        return threads[session_id]


# Create an agent that will create a prompt that can be used as input for the query agent from the Griptape Cloud.


# Function that logs user history - adds to history parameter of Gradio
# TODO: Figure out the exact use of this function
def user(user_message, history):
    history.append([user_message, None])
    return ("", history)


# Function that logs bot history - adds to the history parameter of Gradio
# TODO: Figure out the exact use of this function
def bot(history):
    response = send_message(history[-1][0])
    history[-1][1] = ""
    for character in response:
        history[-1][1] += character

        time.sleep(0.005)

        yield history


def create_prompt_task(session_id: str, message: str) -> PromptTask:
    return PromptTask(
        f"""
            Re-structure the values to form a query from the user's questions: '{message}' and the input value from the conversation memory. Leave out attributes that aren't important to the user: 
        """,
    )


def build_talk_agent(session_id: str, message: str) -> Agent:

    create_thread_id(session_id)

    ruleset = Ruleset(
        name="Local Gradio Agent",
        rules=[
            Rule(
                value="You are responsible for structuring a user's questions into a specific format for a query."
            ),
            Rule(
                value="""You ask the user follow-up questions to fill in missing information for:
                years experience, 
                location, 
                role, 
                skills, 
                expected salary,
                availability,
                past companies,
                past projects,
                show reel details
                """
            ),
            Rule(
                value="Return the current query structure and any questions to fill in missing information."
            ),
        ],
    )
    return Agent(
        conversation_memory=ConversationMemory(
            conversation_memory_driver=GriptapeCloudConversationMemoryDriver(
                thread_id=threads[session_id],
            )
        ),
        tasks=[create_prompt_task(session_id, message)],
        rulesets=[ruleset],
    )


# Creates an agent for each run
# The agent uses local memory, which it differentiates between by session_hash.
def build_agent(session_id: str, message: str, kbs: str) -> Agent:

    create_thread_id(session_id)

    ruleset = Ruleset(
        name="Local Gradio Agent",
        rules=[
            Rule(
                value="You are responsible for structuring a user's questions into a query and then querying."
            ),
            Rule(
                value="Only return the result of the query, do not provide additional commentary."
            ),
            Rule(value="Only perform one task at a time."),
            Rule(
                value="Do not perform the query unless the user has confirmed they are done with formulating."
            ),
            Rule(value="Only perform the query as one string argument."),
            Rule(
                value="If the user says they want to start over, then you must delete the conversation memory file."
            ),
            Rule(
                value="Do not ever search conversation memory for a formulated query instead of querying. Query every time."
            ),
        ],
    )

    query_client = StructureRunTool(
        name="QueryResumeSearcher",
        description=f"""Use it to search for a candidate with the query. Add each item in this list as separate arguments:{kbs}. Do not add any other arguments.""",
        driver=GriptapeCloudStructureRunDriver(
            structure_id=os.getenv("GT_STRUCTURE_ID"),
            api_key=os.getenv("GT_CLOUD_API_KEY"),
            structure_run_wait_time_interval=3,
            structure_run_max_wait_time_attempts=30,
        ),
        # structure_run_driver = LocalStructureRunDriver(
        #     create_structure=create_structure
        # )
    )

    talk_client = StructureRunTool(
        name="FormulateQueryFromUser",
        description="Used to formulate a query from the user's input.",
        structure_run_driver=LocalStructureRunDriver(
            create_structure=lambda: build_talk_agent(session_id, message),
        ),
    )
    return Agent(
        conversation_memory=ConversationMemory(
            conversation_memory_driver=GriptapeCloudConversationMemoryDriver(
                thread_id=threads[session_id],
            )
        ),
        tools=[talk_client, query_client],
        rulesets=[ruleset],
    )


def send_message(message: str, history, knowledge_bases, request: gr.Request) -> Any:
    if request:
        session_hash = request.session_hash
        agent = build_agent(session_hash, message, str(knowledge_bases))
    response = agent.run(message)
    return response.output.value


def send_message_call(message: str, history, knowledge_bases) -> Any:

    structure_id = os.getenv("GT_STRUCTURE_ID")
    api_key = os.getenv("GT_CLOUD_API_KEY")
    structure_url = f"https://cloud.griptape.ai/api/structures/{structure_id}/runs"
    headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
    payload = {"args": [message, *knowledge_bases]}
    response = requests.post(structure_url, headers=headers, json=payload)
    response.raise_for_status()
    if response.status_code == 201:
        data = response.json()
        structure_run_id = data["structure_run_id"]
        output = poll_structure(structure_run_id, headers)
        output = output["output_task_output"]["value"]
        output += f" \n UTC Timestamp: {data['created_at']}\n Structure ID: {structure_id} \n Run ID: {structure_run_id}"
        return output
    else:
        data = response.json()
        return (
            f"Assistant Call Failed due to these errors: \n {','.join(data['errors'])} "
        )


def poll_for_events(offset: int, structure_run_id: str, headers: dict):
    url = f"https://cloud.griptape.ai/api/structure-runs/{structure_run_id}/events"
    response = requests.get(
        url=url, headers=headers, params={"offset": offset, "limit": 100}
    )
    response.raise_for_status()

    return response


def poll_structure(structure_run_id: str, headers: dict):
    response = poll_for_events(0, structure_run_id, headers)
    events = response.json()["events"]
    offset = response.json()["next_offset"]
    not_finished = True
    output = ""
    while not_finished:
        time.sleep(0.5)
        for event in events:
            if event["type"] == "FinishStructureRunEvent":
                not_finished = False
                output = dict(event["payload"])
                break
        response = response = poll_for_events(offset, structure_run_id, headers)
        response.raise_for_status()
        events = response.json()["events"]
        offset = response.json()["next_offset"]
    return output


with gr.Blocks() as demo:
    knowledge_bases = gr.CheckboxGroup(
        label="Select Knowledge Bases",
        choices=["skills", "demographics", "linked_in", "showreels"],
    )
    chatbot = gr.ChatInterface(
        fn=send_message_call,
        chatbot=gr.Chatbot(height=300),
        additional_inputs=knowledge_bases,
    )
demo.launch(auth=(os.environ.get("GRADIO_USERNAME"), os.environ.get("GRADIO_PASSWORD")))
# demo.launch()

# Set it back to empty when a session is done
# Is there a better way?
threads = {}