Google-AI-Playground / Tabs /Gemini_Chabot_Nightly.py
NotASI's picture
Updated Nightly channel description
ac85bcd
raw
history blame
3.08 kB
import os
import time
from typing import List, Tuple, Optional
import google.generativeai as genai
import gradio as gr
from PIL import Image
from dotenv import load_dotenv
load_dotenv()
GEMINI_API_KEY_NIGHTLY = os.getenv("GEMINI_API_KEY_NIGHTLY")
model_name = "gemini-1.5-flash"
TITLE_NIGHTLY = """<h1 align="center">🎮Chat with Gemini 1.5🔥 -- Nightly</h1>"""
NOTICE_NIGHTLY = """
Notices 📜:
- This app is still in development (extreme unstable)
- Some features may not work as expected
- Currently the chatbot only support text and images
"""
ERROR_NIGHTLY = """
Known errors ⚠️:
- Error when submit messages from uploading files.
"""
def upload_to_gemini(path, mime_type=None):
file = genai.upload_file(path, mime_type=mime_type)
print(f"Uploaded file '{file.display_name}' as: {file.uri}")
return file
def transform_history(history):
new_history = []
for user_msg, model_msg in history:
new_history.append({"role": "user", "parts": [{"text": user_msg}]})
new_history.append({"role": "model", "parts": [{"text": model_msg}]})
return new_history
def chatbot_stable(message, history):
message_text = message["text"]
message_files = message["files"]
print("Message text:", message_text)
print("Message files:", message_files)
if message_files:
image_uris = [upload_to_gemini(file_path["path"]) for file_path in message_files]
message_content = [{"text": message_text}] + image_uris
else:
message_content = {"text": message_text}
genai.configure(api_key=GEMINI_API_KEY_NIGHTLY)
model = genai.GenerativeModel(
model_name,
# safety_settings=[
# {
# "category": "HARM_CATEGORY_HARASSMENT",
# "threshold": "BLOCK_NONE"
# },
# {
# "category": "HARM_CATEGORY_HATE_SPEECH",
# "threshold": "BLOCK_NONE"
# },
# {
# "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
# "threshold": "BLOCK_NONE"
# },
# {
# "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
# "threshold": "BLOCK_NONE"
# },
# ],
generation_config={
"temperature": 1,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
)
global chat
chat = model.start_chat(history=[])
chat.history = transform_history(history)
response = chat.send_message(message_content)
response.resolve()
return response.text
gemini_chatbot_interface_stable = gr.Chatbot(
height=500,
likeable=True,
avatar_images=(
None,
"https://media.roboflow.com/spaces/gemini-icon.png"
),
show_copy_button=True,
show_share_button=True,
render_markdown=True
)
gemini_chatbot_stable = gr.ChatInterface(
fn=chatbot_stable,
chatbot=gemini_chatbot_interface_stable,
title="Gemini 1.5 Chatbot",
multimodal=True
)