File size: 2,689 Bytes
9fdb387
 
 
 
 
 
 
 
 
 
576ac04
9fdb387
 
 
 
576ac04
9fdb387
ac85bcd
 
250b0b9
ac85bcd
9fdb387
a05cc1e
 
 
7dfd8ee
46e96d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a05cc1e
 
 
 
 
 
 
 
 
 
 
 
9fdb387
 
 
 
a05cc1e
 
9fdb387
 
a05cc1e
9fdb387
 
 
a05cc1e
 
9fdb387
a05cc1e
 
c75a093
9fdb387
 
 
 
a05cc1e
 
9fdb387
 
 
 
 
 
 
 
 
 
a05cc1e
 
 
 
 
 
 
 
 
 
9fdb387
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import os
import google.generativeai as genai
import gradio as gr
from dotenv import load_dotenv

load_dotenv()

GEMINI_API_KEY_NIGHTLY = os.getenv("GEMINI_API_KEY_NIGHTLY")
model_name = "gemini-1.5-flash"

TITLE_NIGHTLY = """<h1 align="center">🎮Chat with Gemini 1.5 Flash🔥 - Nightly</h1>"""
NOTICE_NIGHTLY = """
Notices 📜:
- This app is still in development (extreme unstable)
- Some features may not work as expected
- The chatbot supports text, documents, and images
"""
ERROR_NIGHTLY = """
Known errors ⚠️:
- Error when submit messages from uploading files. (**Fixed**)
"""

genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
model = genai.GenerativeModel(
    model_name,
    safety_settings=[
        {
            "category": "HARM_CATEGORY_HARASSMENT",
            "threshold": "BLOCK_NONE"
        },
        {
            "category": "HARM_CATEGORY_HATE_SPEECH",
            "threshold": "BLOCK_NONE"
        },
        {
            "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
            "threshold": "BLOCK_NONE"
        },
        {
            "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
            "threshold": "BLOCK_NONE"
        }
    ],
    generation_config={
        "temperature": 1,
        "top_p": 0.95,
        "top_k": 64,
        "max_output_tokens": 8192,
        "response_mime_type": "text/plain",
    }
)
chat = model.start_chat(history=[])

def clear_chat_history():
    chat.history = []

def transform_history(history):
    new_history = []
    for user_msg, model_msg in history:
        new_history.append({"role": "user", "parts": [user_msg]})
        new_history.append({"role": "model", "parts": [model_msg]})
    return new_history

def chatbot_nightly(message, history):
    message_text = message["text"]
    message_files = message["files"]
    if message_files:
        image_uris = [genai.upload_file(path=file_path["path"]) for file_path in message_files]
        message_content = [message_text] + image_uris
    else:
        message_content = [message_text]

    response = chat.send_message(message_content, stream=True)
    response.resolve()

    return response.text

gemini_chatbot_interface_nightly = gr.Chatbot(
    height=400,
    likeable=True,
    avatar_images=(
        None,
        "https://media.roboflow.com/spaces/gemini-icon.png"
    ),
    show_copy_button=True,
    show_share_button=True,
    render_markdown=True
)

clear_chat_button = gr.ClearButton(
    components=[gemini_chatbot_interface_nightly],
    value="🗑️ Clear"
)

gemini_chatbot_nightly = gr.ChatInterface(
    fn=chatbot_nightly,
    chatbot=gemini_chatbot_interface_nightly,
    multimodal=True,
    clear_btn=clear_chat_button
)