Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import openai | |
| import os | |
| # Fetch your OpenAI API key from an environment variable | |
| openai.api_key = os.getenv("OPENAI_API_KEY") | |
| if openai.api_key is None: | |
| raise ValueError("OPENAI_API_KEY environment variable not set.") | |
| def chat_with_gpt(system_message, user_message): | |
| conversation = f"System: {system_message}\nUser: {user_message}" | |
| response = openai.Completion.create( | |
| model="gpt-3.5-turbo", | |
| prompt=conversation, | |
| temperature=0.5, | |
| max_tokens=500, | |
| frequency_penalty=0.0, | |
| presence_penalty=0.0, | |
| ) | |
| return response.choices[0].text.strip() | |
| # Define the interface | |
| iface = gr.Interface( | |
| fn=chat_with_gpt, | |
| inputs=[ | |
| gr.inputs.Textbox(lines=2, placeholder="System Message Here..."), | |
| gr.inputs.Textbox(lines=5, placeholder="Your Message Here..."), | |
| ], | |
| outputs="text", | |
| title="Chat with GPT-3.5", | |
| description="This Gradio app lets you chat with OpenAI's GPT-3.5 model. Enter a system message for initial context, and then chat as you would with a human.", | |
| theme="default", # or "huggingface" for Hugging Face theme | |
| allow_flagging="never", | |
| ).launch() | |