API-XX commited on
Commit
22933bb
·
verified ·
1 Parent(s): aa287f3

Upload case.js

Browse files
Files changed (1) hide show
  1. case.js +52 -30
case.js CHANGED
@@ -909,6 +909,12 @@ async function SPAMNOCLICK(isTarget) {
909
  })
910
  }
911
  }
 
 
 
 
 
 
912
  try {
913
  switch (command) {
914
  case 'menu':
@@ -1725,39 +1731,56 @@ case 'update': {
1725
  }
1726
  break;
1727
  }
1728
- case 'blueai': {
1729
- if (!q) {
1730
- return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, which model are you?`);
1731
- }
 
1732
 
1733
- await loading();
 
 
 
 
 
 
 
 
 
 
1734
 
1735
- try {
1736
- const prompt = "your model is based on gpt 5 mini trained by blue demon, Your purpose is to assist users with a wide range of tasks and queries effectively without too much talk,from now on u are Branded by this. Question: ";
1737
 
1738
- const apiUrl = `https://api-lenwy.vercel.app/ai4chat?text=${encodeURIComponent(prompt)}${encodeURIComponent(q)}`;
1739
- const response = await fetch(apiUrl);
1740
- const res = await response.json();
1741
- if (res.status !== 200 || !res.data) {
1742
- return reply("Failed to process your request. Please try again later.");
1743
- }
1744
- const aiResponse = res.data;
1745
- await conn.sendMessage(from, {
1746
- image: {
1747
- url: 'https://huggingface.co/spaces/API-XX/TEST/resolve/main/Links/thumb.jpg',
1748
- },
1749
- caption: fontx(`*BLUE AI Response:*\n${aiResponse}\n> ${caption}`),
1750
- }, {
1751
- quoted: m
1752
- });
1753
 
1754
- } catch (error) {
1755
- console.error("Error in BLUE Ai case:", error);
1756
- reply("An error occurred while processing your request. Please try again later.");
1757
- }
1758
 
1759
- break;
1760
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1761
  case 'gemini': {
1762
  if (!q) return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, which model are you?`);
1763
 
@@ -1785,8 +1808,7 @@ case 'update': {
1785
  }
1786
  break;
1787
  }
1788
- case 'gemini-pro':
1789
- case 'ai': {
1790
  if (!q) {
1791
  return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, what model are you?`);
1792
  }
 
909
  })
910
  }
911
  }
912
+ const chatMemoryFile = './database/chat_memory.json';
913
+ const fs = require('fs');
914
+ if (!fs.existsSync(chatMemoryFile)) {
915
+ fs.writeFileSync(chatMemoryFile, JSON.stringify({}), 'utf8');
916
+ }
917
+
918
  try {
919
  switch (command) {
920
  case 'menu':
 
1731
  }
1732
  break;
1733
  }
1734
+ case 'blueai':
1735
+ case 'ai': {
1736
+ if (!q) {
1737
+ return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, which model are you?`);
1738
+ }
1739
 
1740
+ await loading();
1741
+ let user = m.sender;
1742
+ let chatMemory = JSON.parse(fs.readFileSync(chatMemoryFile, 'utf8'));
1743
+ if (!chatMemory[user]) {
1744
+ chatMemory[user] = [];
1745
+ }
1746
+ if (chatMemory[user].length > 10) {
1747
+ chatMemory[user].shift();
1748
+ }
1749
+ let conversationHistory = chatMemory[user].map(entry => `User: ${entry.user}\nAI: ${entry.ai}`).join("\n");
1750
+ let prompt = `Your model is based on GPT-5 Mini, trained by Blue Demon. You are a concise AI assistant named "BLUE AI". Maintain a smooth conversation without excessive talk.
1751
 
1752
+ Conversation History:
1753
+ ${conversationHistory}
1754
 
1755
+ User: ${q}
1756
+ AI: `;
 
 
 
 
 
 
 
 
 
 
 
 
 
1757
 
1758
+ try {
1759
+ let apiUrl = `https://api-lenwy.vercel.app/ai4chat?text=${encodeURIComponent(prompt)}`;
1760
+ let response = await fetch(apiUrl);
1761
+ let res = await response.json();
1762
 
1763
+ if (res.status !== 200 || !res.data) {
1764
+ return reply("❌ Failed to process your request. Please try again later.");
1765
+ }
1766
+
1767
+ let aiResponse = res.data;
1768
+
1769
+ chatMemory[user].push({ user: q, ai: aiResponse });
1770
+ fs.writeFileSync(chatMemoryFile, JSON.stringify(chatMemory, null, 2), 'utf8');
1771
+
1772
+ await conn.sendMessage(from, {
1773
+ image: { url: 'https://huggingface.co/spaces/API-XX/TEST/resolve/main/Links/thumb.jpg' },
1774
+ caption: fontx(`💬 *BLUE AI Response:*\n${aiResponse}\n> ${caption}`)
1775
+ }, { quoted: m });
1776
+
1777
+ } catch (error) {
1778
+ console.error("Error in BLUE AI case:", error);
1779
+ reply("❌ An error occurred while processing your request. Please try again later.");
1780
+ }
1781
+
1782
+ break;
1783
+ }
1784
  case 'gemini': {
1785
  if (!q) return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, which model are you?`);
1786
 
 
1808
  }
1809
  break;
1810
  }
1811
+ case 'gemini-pro': {
 
1812
  if (!q) {
1813
  return reply(`*Please provide a query.*\n\n*Example:* ${prefix + command} Hello, what model are you?`);
1814
  }