Spaces:
Sleeping
Sleeping
File size: 1,644 Bytes
c882f64 fd6729b c882f64 fd6729b c882f64 fd6729b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import os
import gradio as gr
from langchain.schema import AIMessage, HumanMessage
from langchain_openai import ChatOpenAI
from pydantic import BaseModel, SecretStr
class APIKey(BaseModel):
api_key: SecretStr
def set_api_key(api_key: SecretStr):
os.environ["OPENAI_API_KEY"] = api_key.get_secret_value()
llm = ChatOpenAI(temperature=1.0, model="gpt-3.5-turbo-0125")
return llm
def predict(message, chat_history, api_key):
api_key_model = APIKey(api_key=api_key)
llm = set_api_key(api_key_model.api_key)
history_langchain_format = []
for human, ai in chat_history:
history_langchain_format.append(HumanMessage(content=human))
history_langchain_format.append(AIMessage(content=ai))
history_langchain_format.append(HumanMessage(content=message))
openai_response = llm.invoke(history_langchain_format)
chat_history.append((message, openai_response.content))
return "", chat_history
with gr.Blocks() as demo:
with gr.Row():
api_key = gr.Textbox(
label="Please enter your OpenAI API key",
type="password",
elem_id="lets-chat-langchain-oakey",
)
with gr.Row():
msg = gr.Textbox(label="Please enter your message")
with gr.Row():
chatbot = gr.Chatbot(label="OpenAI Chatbot")
with gr.Row():
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history, api_key):
return predict(message, chat_history, api_key)
api_key.submit(respond, [msg, chatbot, api_key], [msg, chatbot])
msg.submit(respond, [msg, chatbot, api_key], [msg, chatbot])
demo.launch()
|