Spaces:
Paused
Paused
File size: 2,975 Bytes
9fdb387 61e7204 79f77bb 9fdb387 f02a829 61e7204 f02a829 61e7204 9fdb387 61e7204 79f77bb 61e7204 f02a829 79f77bb 61e7204 ac85bcd 61e7204 f02a829 61e7204 f02a829 ac85bcd 9fdb387 61e7204 a05cc1e 7dfd8ee 46e96d2 a05cc1e 0efcae6 9fdb387 a05cc1e 9fdb387 61e7204 9fdb387 a05cc1e 9fdb387 a05cc1e c75a093 9fdb387 61e7204 a05cc1e 9fdb387 a05cc1e 61e7204 a05cc1e 0efcae6 61e7204 a05cc1e 0efcae6 9fdb387 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
import os
import google.generativeai as genai
import gradio as gr
from dotenv import load_dotenv
load_dotenv()
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
model_name = "gemini-1.5-flash-exp-0827"
TITLE = """<h1 align="center">๐ฎChat with Gemini 1.5๐ฅ</h1>"""
NOTICE = """
**Notices** ๐:
- This app is still in development
- Some features may not work as expected
"""
ABOUT = """
**Updates (2024-8-28)** ๐: Upgrade model to SOTA Gemini 1.5 Flash Experimental 0827
**Info** ๐:
- Model: Gemini 1.5 Flash Experimental 0827
- Chat with Gemini 1.5 Flash model with images and documents
"""
ERRORS = """
Known errors โ ๏ธ:
"""
FUTURE_IMPLEMENTATIONS = """
Future features ๐:
- Select other Gemini / Gemma models
- More tools such as web search
"""
genai.configure(api_key=GEMINI_API_KEY)
model = genai.GenerativeModel(
model_name,
safety_settings=[
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_NONE"
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_NONE"
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_NONE"
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_NONE"
}
],
generation_config={
"temperature": 1,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
)
chat = model.start_chat(history=[])
def clear_chat_history():
chat.history = []
def undo_chat():
last_send, last_received = chat.rewind()
def transform_history(history):
new_history = []
for user_msg, model_msg in history:
new_history.append({"role": "user", "parts": [user_msg]})
new_history.append({"role": "model", "parts": [model_msg]})
return new_history
def chatbot_stable(message, history):
message_text = message["text"]
message_files = message["files"]
if message_files:
image_uris = [genai.upload_file(path=file_path["path"]) for file_path in message_files]
message_content = [message_text] + image_uris
else:
message_content = [message_text]
response = chat.send_message(message_content, stream=True)
response.resolve()
return response.text
gemini_chatbot_interface = gr.Chatbot(
height=400,
likeable=True,
avatar_images=(
None,
"https://media.roboflow.com/spaces/gemini-icon.png"
),
show_copy_button=True,
show_share_button=True,
render_markdown=True
)
clear_chat_button = gr.ClearButton(
components=[gemini_chatbot_interface],
value="๐๏ธ Clear"
)
undo_chat_button = gr.Button(
value="โฉ๏ธ Undo"
)
gemini_chatbot = gr.ChatInterface(
fn=chatbot_stable,
chatbot=gemini_chatbot_interface,
multimodal=True,
clear_btn=clear_chat_button,
undo_btn=undo_chat_button
) |