Tobias Geisler commited on
Commit
2843934
·
1 Parent(s): e873f8b

update to .env and using gpt-3.5-turbo

Browse files
Files changed (2) hide show
  1. app.py +28 -38
  2. requirements.txt +3 -2
app.py CHANGED
@@ -1,45 +1,35 @@
1
  import gradio as gr
2
- from transformers import pipeline, set_seed
 
3
 
4
- # Initialize the chat model pipeline
5
- chat = pipeline('text-generation', model='gpt-3.5-turbo', use_auth_token='Your_Hugging_Face_API_Token_Here')
6
 
7
- def chat_with_chatgpt(user_message, system_message, chat_history):
8
- set_seed(42) # Optional: for consistent results
9
-
10
- # Combine system message, chat history, and current user message for context
11
- if system_message not in chat_history: # Include system message only at the beginning
12
- input_text = f"{system_message}\n{chat_history} You: {user_message}"
13
- else:
14
- input_text = f"{chat_history} You: {user_message}"
15
-
16
- # Generate response from ChatGPT
17
- response = chat(input_text, max_length=1000)
18
- generated_text = response[0]['generated_text']
19
-
20
- # Extract only ChatGPT's latest response
21
- new_response = generated_text[len(input_text):].strip()
22
-
23
- # Update chat history
24
- new_chat_history = f"{chat_history} You: {user_message}\nChatGPT: {new_response}\n"
25
-
26
- return new_chat_history, new_chat_history # Return updated chat history for both display and state
27
 
28
- # Create the Gradio interface
 
 
 
 
 
 
 
 
 
 
 
 
29
  iface = gr.Interface(
30
- fn=chat_with_chatgpt,
31
  inputs=[
32
- gr.inputs.Textbox(label="Your Message"),
33
- gr.inputs.Textbox(label="System Message (Enter only before starting the chat)", lines=2),
34
- gr.State(label="Chat History")
35
- ],
36
- outputs=[
37
- gr.outputs.Textbox(label="Chat History"),
38
- gr.outputs.Textbox(label="New Chat History", visible=False)
39
  ],
40
- title="Chat with ChatGPT 3.5",
41
- description="Start with a system message and then continue chatting like in ChatGPT.",
42
- )
43
-
44
- if __name__ == "__main__":
45
- iface.launch()
 
1
  import gradio as gr
2
+ import openai
3
+ import os
4
 
5
+ # Fetch your OpenAI API key from an environment variable
6
+ openai.api_key = os.getenv("OPENAI_API_KEY")
7
 
8
+ if openai.api_key is None:
9
+ raise ValueError("OPENAI_API_KEY environment variable not set.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
+ def chat_with_gpt(system_message, user_message):
12
+ conversation = f"System: {system_message}\nUser: {user_message}"
13
+ response = openai.Completion.create(
14
+ model="gpt-3.5-turbo",
15
+ prompt=conversation,
16
+ temperature=0.5,
17
+ max_tokens=500,
18
+ frequency_penalty=0.0,
19
+ presence_penalty=0.0,
20
+ )
21
+ return response.choices[0].text.strip()
22
+
23
+ # Define the interface
24
  iface = gr.Interface(
25
+ fn=chat_with_gpt,
26
  inputs=[
27
+ gr.inputs.Textbox(lines=2, placeholder="System Message Here..."),
28
+ gr.inputs.Textbox(lines=5, placeholder="Your Message Here..."),
 
 
 
 
 
29
  ],
30
+ outputs="text",
31
+ title="Chat with GPT-3.5",
32
+ description="This Gradio app lets you chat with OpenAI's GPT-3.5 model. Enter a system message for initial context, and then chat as you would with a human.",
33
+ theme="default", # or "huggingface" for Hugging Face theme
34
+ allow_flagging="never",
35
+ ).launch()
requirements.txt CHANGED
@@ -1,2 +1,3 @@
1
- gradio
2
- transformers
 
 
1
+ os
2
+ openai
3
+ gradio