randydev commited on
Commit
27b6805
·
verified ·
1 Parent(s): bf482e3
Files changed (1) hide show
  1. chatbot/plugins/chat.py +13 -293
chatbot/plugins/chat.py CHANGED
@@ -49,9 +49,9 @@ from database import db
49
  from database import users_collection
50
  from logger import LOGS
51
 
52
- import google.generativeai as genai
 
53
  import akenoai.pyro_decorator as akeno
54
- from google.api_core.exceptions import InvalidArgument
55
 
56
  async def geni_files_delete(name: str):
57
  url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
@@ -61,36 +61,13 @@ async def geni_files_delete(name: str):
61
  return None
62
  return response.text
63
 
64
- spam_chats = []
65
-
66
- DISABLE_COMMAND = [
67
- "start",
68
- "status",
69
- "offchat",
70
- "onchat",
71
- "setmodel",
72
- ]
73
-
74
- NOT_ALLOWED_NON_PROGRAMMER = [
75
- 466019692, # @myexcid,
76
- 1423479724, # tonic,
77
- 883761960, # ari
78
- 6824458358, # None
79
- 1982318761, # paman
80
- 5575183435, #suku
81
- 948247711, # akay
82
- ]
83
 
84
  GEMINI_START_TEXT = """
85
- Hey! {name}
86
-
87
- I am ready to be a gemini bot developer
88
-
89
- - Command: /onchat (pm or group)
90
- - Command: /offchat (pm or group)
91
- - Command: /setmodel to change model
92
  """
93
 
 
 
94
  class TaskManager:
95
  def __init__(self):
96
  self.running_tasks = {}
@@ -200,113 +177,6 @@ async def cancel_(client, callback_query):
200
  else:
201
  await callback_query.edit_message_text("⚠️ No active task to cancel.")
202
 
203
- async def handle_photo(client, message):
204
- chat_id = message.chat.id
205
- async def process_photo():
206
- buttons = [
207
- [
208
- InlineKeyboardButton(
209
- text="Cancel",
210
- callback_data="cancels"
211
- )
212
- ],
213
- ]
214
- try:
215
- spam_chats.append(chat_id)
216
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
217
- await asyncio.sleep(1.5)
218
- ai_reply = await message.reply_text(
219
- "Uploading file..",
220
- reply_markup=InlineKeyboardMarkup(buttons)
221
- )
222
- seconds_time = time.time()
223
- file_path = await message.download(
224
- progress=progress,
225
- progress_args=(ai_reply, seconds_time, "Uploading..."),
226
- )
227
- caption = message.caption or "What's this?"
228
- x = GeminiLatest(api_keys=GOOGLE_API_KEY)
229
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
230
- await asyncio.sleep(1.5)
231
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
232
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
233
- response_reads = x.get_response_image(caption, file_path)
234
- if len(response_reads) > 4096:
235
- with open("chat.txt", "w+", encoding="utf8") as out_file:
236
- out_file.write(response_reads)
237
- await message.reply_document(
238
- document="chat.txt",
239
- disable_notification=True
240
- )
241
- await ai_reply.delete()
242
- os.remove("chat.txt")
243
- else:
244
- await ai_reply.edit_text(response_reads)
245
- backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
246
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
247
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
248
- os.remove(file_path)
249
- return
250
- except asyncio.CancelledError:
251
- await ai_reply.edit_text("⚠️ Photo processing was canceled.")
252
- finally:
253
- spam_chats.remove(chat_id)
254
- await task_manager.cancel_task(chat_id)
255
-
256
- await task_manager.add_task(chat_id, process_photo())
257
-
258
- async def handle_video(client, message, model_):
259
- chat_id = message.chat.id
260
- async def process_video():
261
- buttons = [
262
- [
263
- InlineKeyboardButton(
264
- text="Cancel",
265
- callback_data="cancels"
266
- )
267
- ],
268
- ]
269
- try:
270
- spam_chats.append(chat_id)
271
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
272
- await asyncio.sleep(1.5)
273
- ai_reply = await message.reply_text(
274
- "Uploading file..",
275
- reply_markup=InlineKeyboardMarkup(buttons)
276
- )
277
- seconds_time = time.time()
278
- video_file_name = await message.download(
279
- file_name="newvideo.mp4",
280
- progress=progress,
281
- progress_args=(ai_reply, seconds_time, "Uploading...")
282
- )
283
- caption = message.caption or "What's this?"
284
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
285
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
286
- model = genai.GenerativeModel(model_name=model_)
287
- video_file = genai.upload_file(path=video_file_name)
288
- while video_file.state.name == "PROCESSING":
289
- await asyncio.sleep(10)
290
- video_file = genai.get_file(video_file.name)
291
-
292
- if video_file.state.name == "FAILED":
293
- return await ai_reply.edit_text(f"Error: {video_file.state.name}")
294
-
295
- response = model.generate_content(
296
- [video_file, caption],
297
- request_options={"timeout": 600}
298
- )
299
- await ai_reply.edit_text(response.text)
300
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
301
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
302
- except asyncio.CancelledError:
303
- await ai_reply.edit_text("⚠️ Video processing was canceled.")
304
- finally:
305
- spam_chats.remove(chat_id)
306
- task_manager.cancel_task(chat_id)
307
-
308
- await task_manager.add_task(chat_id, process_video())
309
-
310
  @Client.on_message(
311
  ~filters.scheduled
312
  & filters.command(["start"])
@@ -325,87 +195,14 @@ async def startbot(client: Client, message: Message):
325
  text="Channel",
326
  url='https://t.me/RendyProjects'
327
  ),
328
- ],
329
- [
330
- InlineKeyboardButton(
331
- text="Donate Via Web",
332
- web_app=WebAppInfo(url="https://sociabuzz.com/randydev99/tribe")
333
- )
334
  ]
335
  ]
336
  await message.reply_text(
337
  text=GEMINI_START_TEXT.format(name=message.from_user.mention),
338
  disable_web_page_preview=True,
339
- effect_id=5104841245755180586,
340
  reply_markup=InlineKeyboardMarkup(buttons)
341
  )
342
 
343
- FREE_GEMINI_TEXT = """
344
- • User Free : {name}
345
- - Text: `{check_enable}`
346
- - Image: `{check_enable}`
347
- - Video: `{check_enable}`
348
- - Voice: `{check_enable}`
349
- - Document: `{check_status}`
350
-
351
- {info}
352
- """
353
-
354
- @Client.on_message(
355
- ~filters.scheduled
356
- & filters.command(["status"])
357
- & ~filters.forwarded
358
- )
359
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
360
- async def userstatus(client: Client, message: Message):
361
- if message.from_user.id in NOT_ALLOWED_NON_PROGRAMMER:
362
- return
363
- is_check_plan = await db.is_gemini_plan(user_id=message.from_user.id)
364
- chat_user = await db.get_chatbot(message.chat.id)
365
- if not is_check_plan and not chat_user:
366
- return await message.reply_text(
367
- FREE_GEMINI_TEXT.format(
368
- name=message.from_user.first_name,
369
- check_enable="Unlimited" if chat_user else "Stopped",
370
- check_status="Unlimited" if is_check_plan else "Stopped",
371
- info="You need email business: [register](https://forms.gle/egRciGY39mmhNyScA)\nRemember: **can't ownership**"
372
- ),
373
- disable_web_page_preview=True
374
- )
375
- return await message.reply_text(
376
- FREE_GEMINI_TEXT.format(
377
- name=message.from_user.first_name,
378
- check_enable="Unlimited" if chat_user else "Stopped",
379
- check_status="Unlimited" if is_check_plan else "Stopped",
380
- info="**All unlimited good!**"
381
- )
382
- )
383
-
384
-
385
- @Client.on_message(
386
- ~filters.scheduled
387
- & filters.command(["onchat"])
388
- & ~filters.forwarded
389
- )
390
- async def addchatbot_user(client: Client, message: Message):
391
- if message.from_user.id in NOT_ALLOWED_NON_PROGRAMMER:
392
- return
393
- await db.add_chatbot(message.chat.id, client.me.id)
394
- await message.reply_text("Added chatbot user")
395
-
396
-
397
- @Client.on_message(
398
- ~filters.scheduled
399
- & filters.command(["offchat"])
400
- & ~filters.forwarded
401
- )
402
- @akeno.LogChannel(channel_id="KillerXSupport", is_track=True)
403
- async def rmchatbot_user(client: Client, message: Message):
404
- if message.from_user.id in NOT_ALLOWED_NON_PROGRAMMER:
405
- return
406
- await db.remove_chatbot(message.chat.id)
407
- await message.reply_text("ok stopped gemini")
408
-
409
  @Client.on_message(
410
  filters.incoming
411
  & (
@@ -415,95 +212,21 @@ async def rmchatbot_user(client: Client, message: Message):
415
  | filters.audio
416
  | filters.voice
417
  | filters.document
418
- | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
419
  )
420
- & (filters.private | filters.group)
421
- & filters.reply
422
- & ~filters.command(DISABLE_COMMAND)
423
- & ~filters.bot
424
- & ~filters.via_bot
425
- & ~filters.forwarded,
426
- group=2,
427
  )
428
- @akeno.ForceSubscribe(where_from="RendyProjects", owner_id="xtdevs")
429
  async def chatbot_talk(client: Client, message: Message):
430
- if message.from_user.id in NOT_ALLOWED_NON_PROGRAMMER:
431
- return
432
  user = await users_collection.find_one({"user_id": message.from_user.id})
433
- model_ = user.get("model") if user else "gemini-1.5-pro"
434
- genai.configure(api_key=GOOGLE_API_KEY)
435
  chat_user = await db.get_chatbot(message.chat.id)
436
  if not chat_user:
437
  return
438
  if message.reply_to_message and message.reply_to_message.from_user:
439
  if message.reply_to_message.from_user.id != client.me.id:
440
  return
441
- if message.photo:
442
- await handle_photo(client, message)
443
- if message.audio or message.voice:
444
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_AUDIO)
445
- await asyncio.sleep(1.5)
446
- if client.me.is_premium:
447
- ai_reply = await message.reply_text(f"{custom_loading}Processing...")
448
- else:
449
- ai_reply = await message.reply_text(f"Processing...")
450
- if message.audio:
451
- audio_file_name = await message.download()
452
- if message.voice:
453
- audio_file_name = await message.download()
454
- caption = message.caption or "What's this?"
455
- model = genai.GenerativeModel(
456
- model_name=model_,
457
- safety_settings={
458
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
459
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
460
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
461
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
462
- }
463
- )
464
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
465
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
466
- if client.me.is_premium:
467
- await ai_reply.edit_text(f"{custom_loading}Uploading file..")
468
- else:
469
- await ai_reply.edit_text("Uploading file..")
470
- audio_file = genai.upload_file(path=audio_file_name)
471
- while audio_file.state.name == "PROCESSING":
472
- await asyncio.sleep(10)
473
- audio_file = genai.get_file(audio_file.name)
474
- if audio_file.state.name == "FAILED":
475
- return await ai_reply.edit_text(f"Error: {audio_file.state.name}")
476
- try:
477
- await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
478
- await asyncio.sleep(1.5)
479
- response = model.generate_content(
480
- [audio_file, caption],
481
- request_options={"timeout": 600}
482
- )
483
- if len(response.text) > 4096:
484
- with open("chat.txt", "w+", encoding="utf8") as out_file:
485
- out_file.write(response.text)
486
- await message.reply_document(
487
- document="chat.txt",
488
- disable_notification=True
489
- )
490
- await ai_reply.delete()
491
- os.remove("chat.txt")
492
- else:
493
- await ai_reply.edit_text(response.text)
494
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
495
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
496
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
497
- audio_file.delete()
498
- os.remove(audio_file_name)
499
- return
500
- except InvalidArgument as e:
501
- return await ai_reply.edit_text(f"Error: {e}")
502
- except Exception as e:
503
- return await ai_reply.edit_text(f"Error: {e}")
504
-
505
- if message.video:
506
- await handle_video(client, message, model_)
507
  if message.text:
508
  await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
509
  await asyncio.sleep(1.5)
@@ -518,13 +241,10 @@ async def chatbot_talk(client: Client, message: Message):
518
  command = parts[0].lower()
519
  pic_query = parts[1].strip() if len(parts) > 1 else ""
520
  try:
521
- model_flash = genai.GenerativeModel(
522
- model_name=model_
523
- )
524
  backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
525
  backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
526
- chat_session = model_flash.start_chat(history=backup_chat)
527
- response_data = chat_session.send_message(query_base)
528
  output = response_data.text
529
  if len(output) > 4096:
530
  with open("chat.txt", "w+", encoding="utf8") as out_file:
@@ -541,4 +261,4 @@ async def chatbot_talk(client: Client, message: Message):
541
  await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
542
  return
543
  except Exception as e:
544
- return await message.reply_text(f"Error: {e}")
 
49
  from database import users_collection
50
  from logger import LOGS
51
 
52
+ from google import genai
53
+ from google.genai import types as ty
54
  import akenoai.pyro_decorator as akeno
 
55
 
56
  async def geni_files_delete(name: str):
57
  url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
 
61
  return None
62
  return response.text
63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
  GEMINI_START_TEXT = """
66
+ Hey! {name} Welcome to Gemini AI New
 
 
 
 
 
 
67
  """
68
 
69
+ gen = genai.Client(api_key=GOOGLE_API_KEY)
70
+
71
  class TaskManager:
72
  def __init__(self):
73
  self.running_tasks = {}
 
177
  else:
178
  await callback_query.edit_message_text("⚠️ No active task to cancel.")
179
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
  @Client.on_message(
181
  ~filters.scheduled
182
  & filters.command(["start"])
 
195
  text="Channel",
196
  url='https://t.me/RendyProjects'
197
  ),
 
 
 
 
 
 
198
  ]
199
  ]
200
  await message.reply_text(
201
  text=GEMINI_START_TEXT.format(name=message.from_user.mention),
202
  disable_web_page_preview=True,
 
203
  reply_markup=InlineKeyboardMarkup(buttons)
204
  )
205
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
206
  @Client.on_message(
207
  filters.incoming
208
  & (
 
212
  | filters.audio
213
  | filters.voice
214
  | filters.document
 
215
  )
216
+ & filters.private
217
+ & ~filters.command(["start"])
218
+ & ~filters.forwarded
 
 
 
 
219
  )
 
220
  async def chatbot_talk(client: Client, message: Message):
 
 
221
  user = await users_collection.find_one({"user_id": message.from_user.id})
222
+ model_ = user.get("model") if user else "gemini-2.0-flash-001"
 
223
  chat_user = await db.get_chatbot(message.chat.id)
224
  if not chat_user:
225
  return
226
  if message.reply_to_message and message.reply_to_message.from_user:
227
  if message.reply_to_message.from_user.id != client.me.id:
228
  return
229
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  if message.text:
231
  await client.send_chat_action(message.chat.id, enums.ChatAction.TYPING)
232
  await asyncio.sleep(1.5)
 
241
  command = parts[0].lower()
242
  pic_query = parts[1].strip() if len(parts) > 1 else ""
243
  try:
 
 
 
244
  backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
245
  backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
246
+ chat_session = gen.aio.chats.create(model=model_, history=backup_chat)
247
+ response_data = await chat_session.send_message(query_base)
248
  output = response_data.text
249
  if len(output) > 4096:
250
  with open("chat.txt", "w+", encoding="utf8") as out_file:
 
261
  await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
262
  return
263
  except Exception as e:
264
+ return await message.reply_text(f"Error: {e}")