Spaces:
Running
Running
File size: 3,221 Bytes
aa0eed8 12338d7 031211a aa0eed8 82995c2 2fe1d3f 82995c2 ee66ad7 d06678c 20aaf43 d06678c 5295650 2fe1d3f 273f79a 82995c2 5295650 2fe1d3f 82995c2 5295650 240d98a 453ee12 d06678c 453ee12 24755bb 38dfd80 24755bb 3017744 719f157 7fd13ba 2fe1d3f 7fd13ba f612802 244f050 f612802 244f050 f612802 719f157 244f050 1ec4b78 422ed7b 273f79a 5bb778f 7ec6e7a 5f04474 244f050 5f04474 06d9591 b577162 06d9591 06cc452 06d9591 5f04474 06cc452 b577162 06d9591 2fe1d3f 06d9591 5f04474 453ee12 240d98a 06d9591 5f04474 453ee12 5f04474 24755bb aa0eed8 19797f3 976e692 8fae4d3 a985e86 19797f3 12338d7 976e692 19797f3 8fae4d3 19797f3 5d6c0c0 19797f3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
import gradio as gr
import json, openai, os, time
from openai import OpenAI
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
assistant2 = client.beta.assistants.create(
name="Math Tutor",
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
model="gpt-4-1106-preview",
)
thread = None
def show_json(str, obj):
print(f"### {str}")
#print(json.loads(obj.model_dump_json()))
print(obj)
def init_assistant():
global client, assistant2, thread
#client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
#assistant2 = client.beta.assistants.create(
# name="Math Tutor",
# instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
# model="gpt-4-1106-preview",
#)
thread = client.beta.threads.create()
def wait_on_run(client, run, thread):
while run.status == "queued" or run.status == "in_progress":
run = client.beta.threads.runs.retrieve(
thread_id=thread.id,
run_id=run.id,
)
time.sleep(0.25)
return run
def extract_content_value(data):
content_values = []
for item in data.data:
for content in item.content:
if content.type == 'text':
content_values.append(content.text.value)
return content_values
def chat(message, history):
global client
global assistant2
global thread
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human})
history_openai_format.append({"role": "assistant", "content":assistant})
history_openai_format.append({"role": "user", "content": message})
if len(history_openai_format) == 1:
init_assistant()
show_json("assistant", assistant2)
show_json("thread", thread)
#print("### history")
#print(len(history_openai_format))
#print(history_openai_format)
message = client.beta.threads.messages.create(
thread_id=thread.id,
role="user",
content=message,
)
#show_json("message", message)
run = client.beta.threads.runs.create(
thread_id=thread.id,
assistant_id=assistant2.id,
)
#show_json("run", run)
run = wait_on_run(client, run, thread)
#show_json("run", run)
messages = client.beta.threads.messages.list(thread_id=thread.id)
#show_json("messages", messages)
return extract_content_value(messages)[0]
gr.ChatInterface(
chat,
chatbot=gr.Chatbot(height=300),
textbox=gr.Textbox(placeholder="Ask Math Tutor any question", container=False, scale=7),
title="Math Tutor",
description="Question",
theme="soft",
examples=["I need to solve the equation `3x + 13 = 11`. Can you help me?"],
cache_examples=False,
retry_btn=None,
undo_btn=None,
clear_btn="Clear",
#multimodal=True,
#additional_inputs=[
# gr.Textbox("You are a personal math tutor. Answer questions briefly, in a sentence or less.", label="System Prompt"),
#],
).launch() |