File size: 1,734 Bytes
2e9f49b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import gradio as gr
from transformers import pipeline, set_seed

# Initialize the chat model pipeline
chat = pipeline('text-generation', model='gpt-3.5-turbo', use_auth_token='Your_Hugging_Face_API_Token_Here')

def chat_with_chatgpt(user_message, system_message, chat_history):
    set_seed(42)  # Optional: for consistent results
    
    # Combine system message, chat history, and current user message for context
    if system_message not in chat_history:  # Include system message only at the beginning
        input_text = f"{system_message}\n{chat_history} You: {user_message}"
    else:
        input_text = f"{chat_history} You: {user_message}"
    
    # Generate response from ChatGPT
    response = chat(input_text, max_length=1000)
    generated_text = response[0]['generated_text']
    
    # Extract only ChatGPT's latest response
    new_response = generated_text[len(input_text):].strip()
    
    # Update chat history
    new_chat_history = f"{chat_history} You: {user_message}\nChatGPT: {new_response}\n"
    
    return new_chat_history, new_chat_history  # Return updated chat history for both display and state

# Create the Gradio interface
iface = gr.Interface(
    fn=chat_with_chatgpt,
    inputs=[
        gr.inputs.Textbox(label="Your Message"),
        gr.inputs.Textbox(label="System Message (Enter only before starting the chat)", lines=2),
        gr.State(label="Chat History")
    ],
    outputs=[
        gr.outputs.Textbox(label="Chat History"),
        gr.outputs.Textbox(label="New Chat History", visible=False)
    ],
    title="Chat with ChatGPT 3.5",
    description="Start with a system message and then continue chatting like in ChatGPT.",
)

if __name__ == "__main__":
    iface.launch()