File size: 3,081 Bytes
9fdb387
 
 
 
 
 
 
 
 
 
 
 
 
ac85bcd
9fdb387
 
 
 
a837281
9fdb387
ac85bcd
 
 
 
9fdb387
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import os
import time
from typing import List, Tuple, Optional
import google.generativeai as genai
import gradio as gr
from PIL import Image
from dotenv import load_dotenv

load_dotenv()

GEMINI_API_KEY_NIGHTLY = os.getenv("GEMINI_API_KEY_NIGHTLY")
model_name = "gemini-1.5-flash"

TITLE_NIGHTLY = """<h1 align="center">🎮Chat with Gemini 1.5🔥 -- Nightly</h1>"""
NOTICE_NIGHTLY = """
Notices 📜:
- This app is still in development (extreme unstable)
- Some features may not work as expected
- Currently the chatbot only support text and images
"""
ERROR_NIGHTLY = """
Known errors ⚠️:
- Error when submit messages from uploading files.
"""

def upload_to_gemini(path, mime_type=None):
    file = genai.upload_file(path, mime_type=mime_type)
    print(f"Uploaded file '{file.display_name}' as: {file.uri}")
    return file

def transform_history(history):
    new_history = []
    for user_msg, model_msg in history:
        new_history.append({"role": "user", "parts": [{"text": user_msg}]})
        new_history.append({"role": "model", "parts": [{"text": model_msg}]})
    return new_history

def chatbot_stable(message, history):
    message_text = message["text"]
    message_files = message["files"]
    print("Message text:", message_text)
    print("Message files:", message_files)
    if message_files:
        image_uris = [upload_to_gemini(file_path["path"]) for file_path in message_files]
        message_content = [{"text": message_text}] + image_uris
    else:
        message_content = {"text": message_text}
    genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
    model = genai.GenerativeModel(
        model_name,
        # safety_settings=[
        #     {
        #         "category": "HARM_CATEGORY_HARASSMENT",
        #         "threshold": "BLOCK_NONE"
        #     },
        #     {
        #         "category": "HARM_CATEGORY_HATE_SPEECH",
        #         "threshold": "BLOCK_NONE"
        #     },
        #     {
        #         "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
        #         "threshold": "BLOCK_NONE"
        #     },
        #     {
        #         "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
        #         "threshold": "BLOCK_NONE"
        #     },
        # ],
        generation_config={
            "temperature": 1,
            "top_p": 0.95,
            "top_k": 64,
            "max_output_tokens": 8192,
            "response_mime_type": "text/plain",
        }
    )

    global chat
    chat = model.start_chat(history=[])
    chat.history = transform_history(history)
    response = chat.send_message(message_content)
    response.resolve()

    return response.text

gemini_chatbot_interface_stable = gr.Chatbot(
    height=500,
    likeable=True,
    avatar_images=(
        None,
        "https://media.roboflow.com/spaces/gemini-icon.png"
    ),
    show_copy_button=True,
    show_share_button=True,
    render_markdown=True
)

gemini_chatbot_stable = gr.ChatInterface(
    fn=chatbot_stable,
    chatbot=gemini_chatbot_interface_stable,
    title="Gemini 1.5 Chatbot",
    multimodal=True
)