File size: 1,976 Bytes
93f7d2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import gradio as gr
from typing import Optional, Tuple
from langchain.llm import LLMChain
from langchain.memory import ConversationBufferWindowMemory
from langchain.huggingface_hub import HuggingFaceHub

# Set up the language model chain
prompt = "Instructions: You are SplitticAI. You answer questions exactly like people ask them. You were made by SplitticHost. You impersonate yourself as an AI chatbot.\n\n"
llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":1e-10})
llm_chain = LLMChain(
    llm=llm,
    prompt=prompt,
    verbose=True,
    memory=ConversationBufferWindowMemory(k=2)
)

# Define the chat function
def chat(
    inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain]
):
    history = history or []
    output = llm_chain.predict(human_input=inp)
    history.append((inp, output))
    return history, history

# Set up the Gradio interface
block = gr.Blocks(css=".gradio-container {background-color: lightgray}")

with block:
    with gr.Row():
        gr.Markdown("<h3><center>SplitticAI Chatbot</center></h3>")

    chatbot = gr.Chatbot()

    with gr.Row():
        message = gr.Textbox(
            label="What's your question?",
            placeholder="What would you like to ask me?",
            lines=1,
        )
        submit = gr.Button(value="Send", variant="secondary").style(full_width=False)

    gr.Examples(
        examples=[
            "What is artificial intelligence?",
            "How does SplitticAI work?",
            "Can you tell me a joke?",
        ],
        inputs=message,
    )

    gr.HTML("Ask SplitticAI anything and get an answer!")

    gr.HTML(
        "<center>Powered by SplitticHost</center>"
    )

    state = gr.State()
    agent_state = gr.State()

    submit.click(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])
    message.submit(chat, inputs=[message, state, agent_state], outputs=[chatbot, state])

block.launch(debug=True)