randydev commited on
Commit
7466b57
·
verified ·
1 Parent(s): 3ddfc83

Upload gemini.py

Browse files
Files changed (1) hide show
  1. akn/Gemini/gemini.py +264 -0
akn/Gemini/gemini.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
4
+ #
5
+ # from : https://github.com/TeamKillerX
6
+ # Channel : @RendyProjects
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as published by
9
+ # the Free Software Foundation, either version 3 of the License, or
10
+ # (at your option) any later version.
11
+ #
12
+ # This program is distributed in the hope that it will be useful,
13
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # GNU Affero General Public License for more details.
16
+ #
17
+ # You should have received a copy of the GNU Affero General Public License
18
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
+
20
+ import requests
21
+ import time
22
+ import json
23
+ import asyncio
24
+ import io
25
+ import os
26
+ import re
27
+ from PIL import Image
28
+
29
+ from pyrogram import *
30
+ from pyrogram import enums
31
+ from pyrogram import Client, filters
32
+ from pyrogram.types import *
33
+ from pyrogram.errors import *
34
+ from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
35
+ from config import *
36
+
37
+ from akn.utils.database import db
38
+ from akn.utils.logger import LOGS
39
+
40
+ import google.generativeai as genai
41
+ from google.api_core.exceptions import InvalidArgument
42
+
43
+ async def geni_files_delete(name: str):
44
+ url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
45
+ params = {"key": GOOGLE_API_KEY}
46
+ response = requests.delete(url, params=params)
47
+ if response.status_code != 200:
48
+ return None
49
+ return response.text
50
+
51
+ @Client.on_message(
52
+ filters.incoming
53
+ & (
54
+ filters.text
55
+ | filters.photo
56
+ | filters.video
57
+ | filters.audio
58
+ | filters.voice
59
+ | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
60
+ )
61
+ & filters.private
62
+ & ~filters.bot
63
+ & ~filters.via_bot
64
+ & ~filters.forwarded,
65
+ group=2,
66
+ )
67
+ async def chatbot_talk(client: Client, message: Message):
68
+ genai.configure(api_key=GOOGLE_API_KEY)
69
+ if message.photo:
70
+ await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
71
+ await asyncio.sleep(1.5)
72
+ file_path = await message.download()
73
+ caption = message.caption or "What's this?"
74
+ x = GeminiLatest(api_keys=GOOGLE_API_KEY)
75
+ if client.me.is_premium:
76
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
77
+ else:
78
+ ai_reply = await message.reply_text(f"Processing...")
79
+ try:
80
+ await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
81
+ await asyncio.sleep(1.5)
82
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
83
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
84
+ response_reads = x.get_response_image(caption, file_path)
85
+ if len(response_reads) > 4096:
86
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
87
+ out_file.write(response_reads)
88
+ await message.reply_document(
89
+ document="chat.txt",
90
+ disable_notification=True
91
+ )
92
+ await ai_reply.delete()
93
+ os.remove("chat.txt")
94
+ else:
95
+ await ai_reply.edit_text(response_reads)
96
+ backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
97
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
98
+ await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
99
+ os.remove(file_path)
100
+ return
101
+ except InvalidArgument as e:
102
+ return await ai_reply.edit_text(f"Error: {e}")
103
+ except Exception as e:
104
+ return await ai_reply.edit_text(f"Error: {e}")
105
+
106
+ if message.audio or message.voice:
107
+ await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_AUDIO)
108
+ await asyncio.sleep(1.5)
109
+ if client.me.is_premium:
110
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
111
+ else:
112
+ ai_reply = await message.reply_text(f"Processing...")
113
+ if message.audio:
114
+ audio_file_name = await message.download()
115
+ if message.voice:
116
+ audio_file_name = await message.download()
117
+ caption = message.caption or "What's this?"
118
+ model = genai.GenerativeModel(
119
+ model_name="gemini-1.5-flash",
120
+ safety_settings={
121
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
122
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
123
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
124
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
125
+ }
126
+ )
127
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
128
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
129
+ if client.me.is_premium:
130
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
131
+ else:
132
+ await ai_reply.edit_text("Uploading file..")
133
+ audio_file = genai.upload_file(path=audio_file_name)
134
+ while audio_file.state.name == "PROCESSING":
135
+ await asyncio.sleep(10)
136
+ audio_file = genai.get_file(audio_file.name)
137
+ if audio_file.state.name == "FAILED":
138
+ return await ai_reply.edit_text(f"Error: {audio_file.state.name}")
139
+ try:
140
+ await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
141
+ await asyncio.sleep(1.5)
142
+ response = model.generate_content(
143
+ [audio_file, caption],
144
+ request_options={"timeout": 600}
145
+ )
146
+ if len(response.text) > 4096:
147
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
148
+ out_file.write(response.text)
149
+ await message.reply_document(
150
+ document="chat.txt",
151
+ disable_notification=True
152
+ )
153
+ await ai_reply.delete()
154
+ os.remove("chat.txt")
155
+ else:
156
+ await ai_reply.edit_text(response.text)
157
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
158
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
159
+ await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
160
+ audio_file.delete()
161
+ os.remove(audio_file_name)
162
+ return
163
+ except InvalidArgument as e:
164
+ return await ai_reply.edit_text(f"Error: {e}")
165
+ except Exception as e:
166
+ return await ai_reply.edit_text(f"Error: {e}")
167
+
168
+ if message.video:
169
+ await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
170
+ await asyncio.sleep(1.5)
171
+ if client.me.is_premium:
172
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
173
+ else:
174
+ ai_reply = await message.reply_text(f"Processing...")
175
+ video_file_name = await message.download(file_name="newvideo.mp4")
176
+ caption = message.caption or "What's this?"
177
+ model = genai.GenerativeModel(
178
+ model_name="gemini-1.5-pro",
179
+ safety_settings={
180
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
181
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
182
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
183
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
184
+ }
185
+ )
186
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
187
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
188
+ if client.me.is_premium:
189
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
190
+ else:
191
+ await ai_reply.edit_text("Uploading file..")
192
+ video_file = genai.upload_file(path=video_file_name)
193
+ while video_file.state.name == "PROCESSING":
194
+ await asyncio.sleep(10)
195
+ video_file = genai.get_file(video_file.name)
196
+ if video_file.state.name == "FAILED":
197
+ return await ai_reply.edit_text(f"Error: {video_file.state.name}")
198
+ try:
199
+ await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
200
+ await asyncio.sleep(1.5)
201
+ response = model.generate_content(
202
+ [video_file, caption],
203
+ request_options={"timeout": 600}
204
+ )
205
+ if len(response.text) > 4096:
206
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
207
+ out_file.write(response.text)
208
+ await message.reply_document(
209
+ document="chat.txt",
210
+ disable_notification=True
211
+ )
212
+ await ai_reply.delete()
213
+ os.remove("chat.txt")
214
+ else:
215
+ await ai_reply.edit_text(response.text)
216
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
217
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
218
+ await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
219
+ video_file.delete()
220
+ os.remove(video_file_name)
221
+ return
222
+ except InvalidArgument as e:
223
+ return await ai_reply.edit_text(f"Error: {e}")
224
+ except Exception as e:
225
+ return await ai_reply.edit_text(f"Error: {e}")
226
+
227
+ if message.text:
228
+ await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
229
+ await asyncio.sleep(1.5)
230
+ query = message.text.strip()
231
+ match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
232
+ if match:
233
+ rest_of_sentence = match.group(2).strip()
234
+ query_base = rest_of_sentence if rest_of_sentence else query
235
+ else:
236
+ query_base = query
237
+ parts = query.split(maxsplit=1)
238
+ command = parts[0].lower()
239
+ pic_query = parts[1].strip() if len(parts) > 1 else ""
240
+ try:
241
+ model_flash = genai.GenerativeModel(
242
+ model_name="gemini-1.5-flash"
243
+ )
244
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
245
+ backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
246
+ chat_session = model_flash.start_chat(history=backup_chat)
247
+ response_data = chat_session.send_message(query_base)
248
+ output = response_data.text
249
+ if len(output) > 4096:
250
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
251
+ out_file.write(output)
252
+ await message.reply_document(
253
+ document="chat.txt",
254
+ disable_notification=True
255
+ )
256
+ os.remove("chat.txt")
257
+ else:
258
+ await message.reply_text(output)
259
+ backup_chat.append({"role": "model", "parts": [{"text": output}]})
260
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
261
+ await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
262
+ return
263
+ except Exception as e:
264
+ return await message.reply_text(f"Error: {e}")