backend / utils /llms.py
akiko19191's picture
Update utils/llms.py
a0bae9e verified
raw
history blame
10.7 kB
import os
import helpers.helper as helper
import google.generativeai as genai
from g4f.client import Client
from litellm import completion
import random
from g4f.Provider import DeepInfraChat,Glider,Liaobots,Blackbox,ChatGptEs,LambdaChat,TypeGPT
os.environ["OPENROUTER_API_KEY"] = "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"
os.environ["GROQ_API_KEY"] ="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"
gemini_api_keys=["AIzaSyB7yKIdfW7Umv62G47BCdJjoHTJ9TeiAko","AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc","AIzaSyDOyjfqFhHmGlGJ2raX82XWTtmMcZxRshs"]
groq_api_keys=["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","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","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"]
#[,"AIzaSyBPfR-HG_HeUgLF0LYW1XQgQUxFF6jF_0U","AIzaSyBz01gZCb9kzZF3lNHuwy_iajWhi9ivyDk"]]
os.environ["GEMINI_API_KEY"] =random.choice(gemini_api_keys)
os.environ["TOGETHERAI_API_KEY"] ="30bed0b842ed3268372d57f588c22452798e9af96aa8d3129ba745ef226282a8"
DeepInfraChat.models = ["google/gemma-3-27b-it","deepseek-ai/DeepSeek-R1-Turbo","Qwen/QwQ-32B","deepseek-ai/DeepSeek-R1","deepseek-ai/DeepSeek-V3-0324","meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","meta-llama/Llama-4-Scout-17B-16E-Instruct","microsoft/Phi-4-multimodal-instruct"]
deepinframodels=["meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8","microsoft/Phi-4-multimodal-instruct","google/gemma-3-27b-it","meta-llama/Llama-4-Scout-17B-16E-Instruct"]
REASONING_CORRESPONDANCE = {"DeepSeek-R1-Glider":Glider, "DeepSeekR1-LAMBDA":LambdaChat,"DeepSeekR1":DeepInfraChat,"deepseek-slow":TypeGPT}
REASONING_QWQ = {"qwq-32b":DeepInfraChat}
CHAT_CORRESPONDANCE = {"DeepSeek-V3":DeepInfraChat}
client = Client()
genai.configure(api_key="AIzaSyAQgAtQPpY0bQaCqCISGxeyF6tpDePx-Jg")
modell = genai.GenerativeModel('gemini-1.5-pro')
generation_config = {
"temperature": 1,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
model2flash = genai.GenerativeModel(
model_name="gemini-2.0-flash-thinking-exp",
generation_config=generation_config,
)
def clear():
helper.stopped=False
helper.q.put_nowait("END")
while not helper.q.empty():
try:
helper.q.get(block=False)
except Exception as e:
continue
helper.q.task_done()
def gpt4(messages,model="gpt-4"):
print(messages)
if len(messages) ==1:
messages[0]["role"]="user"
response = completion(
model="gemini/gemini-2.0-flash",
messages=messages
)
return str(response.choices[0].message.content)
def gpt4stream(messages,model,api_keys):
print(f"-------{model}--------")
global llmfree
global llmdeepseek
global llmgroq
if model in deepinframodels:
try:
response = client.chat.completions.create(
provider=DeepInfraChat,
model=model,
messages=messages,
stream=True
# Add any other necessary parameters
)
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk or "```" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
except Exception as e:
pass
helper.q.put_nowait("RESULT: "+cunk)
elif model == "DeepSeekR1-togetherAI":
response = completion(model="together_ai/deepseek-ai/DeepSeek-R1", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
helper.q.put_nowait("RESULT: "+cunk)
elif model == "DeepSeekV3-togetherAI":
response = completion(model="together_ai/deepseek-ai/DeepSeek-V3", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
helper.q.put_nowait("RESULT: "+cunk)
elif model=="deepseek-r1-distill-llama-70b":
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys)
response = completion(model="groq/deepseek-r1-distill-llama-70b", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
helper.q.put_nowait("RESULT: "+cunk)
elif model=="qwq-32b":
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys)
response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
helper.q.put_nowait("RESULT: "+cunk)
elif model=="llama-3.3-70b-versatile":
response = completion(model="groq/llama-3.3-70b-versatile", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
helper.q.put_nowait("RESULT: "+cunk)
elif model=="gemini-2.0-flash-thinking-exp-01-21":
for key in gemini_api_keys:
try:
os.environ["GEMINI_API_KEY"] =key
response = completion(model="gemini/gemini-2.0-flash-thinking-exp-01-21", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
break
except Exception as e:
helper.q.put_nowait(str(e))
pass
helper.q.put_nowait("RESULT: "+cunk)
elif model=="gemini-2.5-pro-exp-03-25" :
for key in gemini_api_keys:
try:
os.environ["GEMINI_API_KEY"] =key
response = completion(model="gemini/gemini-2.5-pro-exp-03-25", messages=messages, stream=True)
cunk=""
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
break
except:
pass
helper.q.put_nowait("RESULT: "+cunk)
elif model=="deepseek.r1" or model=="deepseek-chat":
cunk=""
if "chat" in model:
providers = CHAT_CORRESPONDANCE
model_name="deepseek-ai/DeepSeek-V3-0324"
else:
providers = REASONING_CORRESPONDANCE
model_name="deepseek-r1"
for provider in providers:
try:
response = client.chat.completions.create(
provider=providers[provider],
model=model_name,
messages=messages,
stream=True
# Add any other necessary parameters
)
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
if "```json" not in cunk or "```" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
break
except Exception as e:
#helper.q.put_nowait(str(e))
pass
print("STOPPING")
helper.q.put_nowait("RESULT: "+cunk)
elif model=="deepseek-reasoner" :
cunk=""
providers=REASONING_QWQ
helper.q.put_nowait("<think>")
for provider in providers:
try:
response = client.chat.completions.create(
provider=providers[provider],
model="Qwen/QwQ-32B",
messages=messages,
stream=True
# Add any other necessary parameters
)
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
if "```json" not in cunk or "```" not in cunk:
helper.q.put_nowait(part.choices[0].delta.content or "")
if helper.stopped:
clear()
break
break
except Exception as e:
pass
helper.q.put_nowait("RESULT: "+cunk)
elif "DeepSeek" in model and "dev" in model:
cunk=""
if "V3" in model:
providers = CHAT_CORRESPONDANCE
else:
providers = REASONING_CORRESPONDANCE
for provider in providers:
try:
response = client.chat.completions.create(
provider=providers[provider],
model="deepseek-r1",
messages=messages,
stream=True
# Add any other necessary parameters
)
for part in response:
cunk=cunk+(part.choices[0].delta.content or "")
break
except Exception as e:
pass
print("STOPPING")
helper.q.put_nowait("RESULT: "+cunk)