randydev commited on
Commit
d477ab1
·
verified ·
1 Parent(s): ee0f981

Update chatbot/plugins/chat.py

Browse files
Files changed (1) hide show
  1. chatbot/plugins/chat.py +92 -37
chatbot/plugins/chat.py CHANGED
@@ -261,6 +261,62 @@ async def deletemydata(client, callback):
261
  else:
262
  await callback.answer(delm, True)
263
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
  @Client.on_callback_query(filters.regex("^fluxprompt_(\d+)$"))
265
  async def flux_prompt(client, callback):
266
  user_id = int(callback.matches[0].group(1))
@@ -268,13 +324,14 @@ async def flux_prompt(client, callback):
268
  file_photo = None
269
  try:
270
  backup_chat = await db._get_chatbot_chat_from_db(user_id)
271
- data = await db.backup_chatbot.find_one({"user_id": int(user_id)})
272
  if not data:
273
  return await callback.answer("Can't found user", True)
274
- get_response = data.get("prompt_flux", None)
275
  if not get_response:
276
  return await callback.answer("Server busy try again later", True)
277
  await callback.message.delete()
 
278
  backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
279
  response_js = await js.image.create(
280
  "black-forest-labs/flux-1-schnell",
@@ -287,7 +344,7 @@ async def flux_prompt(client, callback):
287
 
288
  file_photo = await gen.aio.files.upload(file=file_path)
289
  while file_photo.state.name == "PROCESSING":
290
- await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
291
  await asyncio.sleep(10)
292
  file_photo = await gen.aio.files.get(name=file_photo.name)
293
  if file_photo.state.name == "FAILED":
@@ -311,9 +368,12 @@ async def flux_prompt(client, callback):
311
  upsert=True
312
  )
313
  backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
 
 
 
314
  except Exception as e:
315
  LOGS.error(f"flux_prompt failed: {str(e)}")
316
- await callback.message.reply_text("Server error")
317
  finally:
318
  if file_path and file_photo:
319
  try:
@@ -346,7 +406,7 @@ async def remover_bg(client, callback):
346
  await callback.answer()
347
  except Exception as e:
348
  LOGS.error(f"remover_bg failed: {str(e)}")
349
- await callback.answer("Server error", show_alert=True)
350
 
351
  @Client.on_callback_query(filters.regex("^refreshch$"))
352
  async def reshch(client, callback):
@@ -602,7 +662,7 @@ async def chatbot_talk(client: Client, message: Message):
602
  os.remove(file_path)
603
  except:
604
  pass
605
-
606
  if re.findall(r"\b(pro:editimage)\b", caption, re.IGNORECASE):
607
  await db.backup_chatbot.update_one(
608
  {"user_id": message.from_user.id},
@@ -789,45 +849,40 @@ async def chatbot_talk(client: Client, message: Message):
789
 
790
  if re.findall(r"\b(image)\b", query_base, re.IGNORECASE):
791
  try:
792
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
793
- backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
794
- response = await gen.aio.models.generate_content(
795
- model="gemini-2.0-flash-exp-image-generation",
796
- contents=query_base,
797
- config=ty.GenerateContentConfig(
798
- response_modalities=['TEXT', 'IMAGE']
799
- )
800
- )
801
- for part in response.candidates[0].content.parts:
802
- if part.text is not None:
803
- captions += part.text
804
- elif part.inline_data is not None:
805
- image = Image.open(BytesIO((part.inline_data.data)))
806
- image.save(file_path)
807
- keyboard_like = create_keyboard(user_id=message.from_user.id)
808
- await message.reply_photo(
809
- file_path,
810
- caption=captions,
811
- reply_markup=keyboard_like
812
- )
813
  await db.backup_chatbot.update_one(
814
  {"user_id": message.from_user.id},
815
- {"$set": {"translate_text": captions}},
816
  upsert=True
817
  )
818
- backup_chat.append({"role": "model", "parts": [{"text": captions}]})
819
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
820
- await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
 
 
821
  return
822
  except Exception as e:
823
  LOGS.error(f"Error: Gemini Image: {str(e)}")
824
  return await message.reply_text("Server busy try again later")
825
- finally:
826
- if file_path:
827
- try:
828
- os.remove(file_path)
829
- except:
830
- pass
831
 
832
  if re.findall(r"\b(enabled:chatsystem)\b", query_base, re.IGNORECASE):
833
  await db.backup_chatbot.update_one(
 
261
  else:
262
  await callback.answer(delm, True)
263
 
264
+ @Client.on_callback_query(filters.regex("^genprompt_(\d+)$"))
265
+ async def geminigen_prompt(client, callback):
266
+ user_id = int(callback.matches[0].group(1))
267
+ captions = None
268
+ file_path = "gemini-native-image.png"
269
+ try:
270
+ backup_chat = await db._get_chatbot_chat_from_db(user_id)
271
+ data = await db.backup_chatbot.find_one({"user_id": user_id})
272
+ if not data:
273
+ return await callback.answer("Can't found user", True)
274
+ get_response = data.get("prompt_image", None)
275
+ if not get_response:
276
+ return await callback.answer("Server busy try again later", True)
277
+ await callback.message.delete()
278
+
279
+ backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
280
+ response = await gen.aio.models.generate_content(
281
+ model="gemini-2.0-flash-exp-image-generation",
282
+ contents=get_response,
283
+ config=ty.GenerateContentConfig(
284
+ response_modalities=['TEXT', 'IMAGE']
285
+ )
286
+ )
287
+
288
+ for part in response.candidates[0].content.parts:
289
+ if part.text is not None:
290
+ captions += part.text
291
+ elif part.inline_data is not None:
292
+ image = Image.open(BytesIO((part.inline_data.data)))
293
+ image.save(file_path)
294
+
295
+ keyboard_like = create_keyboard(user_id=user_id)
296
+ await callback.message.reply_photo(
297
+ file_path,
298
+ caption=captions,
299
+ reply_markup=keyboard_like
300
+ )
301
+ await db.backup_chatbot.update_one(
302
+ {"user_id": user_id},
303
+ {"$set": {"translate_text": captions}},
304
+ upsert=True
305
+ )
306
+ backup_chat.append({"role": "model", "parts": [{"text": captions}]})
307
+ await db._update_chatbot_chat_in_db(user_id, backup_chat)
308
+ await client.send_chat_action(callback.message.chat.id, enums.ChatAction.CANCEL)
309
+ return
310
+ except Exception as e:
311
+ LOGS.error(f"geminigen_prompt failed: {str(e)}")
312
+ await callback.message.reply_text("Server busy try again later")
313
+ finally:
314
+ if file_path:
315
+ try:
316
+ os.remove(file_path)
317
+ except:
318
+ pass
319
+
320
  @Client.on_callback_query(filters.regex("^fluxprompt_(\d+)$"))
321
  async def flux_prompt(client, callback):
322
  user_id = int(callback.matches[0].group(1))
 
324
  file_photo = None
325
  try:
326
  backup_chat = await db._get_chatbot_chat_from_db(user_id)
327
+ data = await db.backup_chatbot.find_one({"user_id": user_id})
328
  if not data:
329
  return await callback.answer("Can't found user", True)
330
+ get_response = data.get("prompt_image", None)
331
  if not get_response:
332
  return await callback.answer("Server busy try again later", True)
333
  await callback.message.delete()
334
+
335
  backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
336
  response_js = await js.image.create(
337
  "black-forest-labs/flux-1-schnell",
 
344
 
345
  file_photo = await gen.aio.files.upload(file=file_path)
346
  while file_photo.state.name == "PROCESSING":
347
+ await client.send_chat_action(callback.message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
348
  await asyncio.sleep(10)
349
  file_photo = await gen.aio.files.get(name=file_photo.name)
350
  if file_photo.state.name == "FAILED":
 
368
  upsert=True
369
  )
370
  backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
371
+ await db._update_chatbot_chat_in_db(user_id, backup_chat)
372
+ await client.send_chat_action(callback.message.chat.id, enums.ChatAction.CANCEL)
373
+ return
374
  except Exception as e:
375
  LOGS.error(f"flux_prompt failed: {str(e)}")
376
+ await callback.message.reply_text("Server busy try again later")
377
  finally:
378
  if file_path and file_photo:
379
  try:
 
406
  await callback.answer()
407
  except Exception as e:
408
  LOGS.error(f"remover_bg failed: {str(e)}")
409
+ await callback.answer("Server busy try again later", show_alert=True)
410
 
411
  @Client.on_callback_query(filters.regex("^refreshch$"))
412
  async def reshch(client, callback):
 
662
  os.remove(file_path)
663
  except:
664
  pass
665
+
666
  if re.findall(r"\b(pro:editimage)\b", caption, re.IGNORECASE):
667
  await db.backup_chatbot.update_one(
668
  {"user_id": message.from_user.id},
 
849
 
850
  if re.findall(r"\b(image)\b", query_base, re.IGNORECASE):
851
  try:
852
+ buttons = [
853
+ [
854
+ InlineKeyboardButton(
855
+ text="🎨 Gemini AI Generate Image",
856
+ callback_data=f"genprompt_{message.from_user.id}"
857
+ )
858
+ ],
859
+ [
860
+ InlineKeyboardButton(
861
+ text="🖌️ FLUX AI Generate Image",
862
+ callback_data=f"fluxprompt_{message.from_user.id}"
863
+ )
864
+ ]
865
+ [
866
+ InlineKeyboardButton(
867
+ text="❌ Cancel",
868
+ callback_data="closedd"
869
+ )
870
+ ]
871
+ ]
 
872
  await db.backup_chatbot.update_one(
873
  {"user_id": message.from_user.id},
874
+ {"$set": {"prompt_image": query_base}},
875
  upsert=True
876
  )
877
+ await message.reply_text(
878
+ "Are you sure you want to prompt this Image generate?\n\n"
879
+ f"Your answer: `{query_base}`",
880
+ reply_markup=InlineKeyboardMarkup(buttons)
881
+ )
882
  return
883
  except Exception as e:
884
  LOGS.error(f"Error: Gemini Image: {str(e)}")
885
  return await message.reply_text("Server busy try again later")
 
 
 
 
 
 
886
 
887
  if re.findall(r"\b(enabled:chatsystem)\b", query_base, re.IGNORECASE):
888
  await db.backup_chatbot.update_one(