Spaces:
Building
Building
import os | |
import helpers.helper as helper | |
import asyncio | |
import google.generativeai as genai | |
from g4f.client import Client | |
from litellm import completion | |
import random | |
from g4f.Provider import DeepInfraChat,Glider,Liaobots,Blackbox,ChatGptEs | |
os.environ["OPENROUTER_API_KEY"] = "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" | |
os.environ["GROQ_API_KEY"] ="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" | |
gemini_api_keys=["AIzaSyB7yKIdfW7Umv62G47BCdJjoHTJ9TeiAko","AIzaSyDtP05TyoIy9j0uPL7_wLEhgQEE75AZQSc","AIzaSyDOyjfqFhHmGlGJ2raX82XWTtmMcZxRshs"] | |
groq_api_keys=["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","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","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"] | |
#[,"AIzaSyBPfR-HG_HeUgLF0LYW1XQgQUxFF6jF_0U","AIzaSyBz01gZCb9kzZF3lNHuwy_iajWhi9ivyDk"]] | |
os.environ["GEMINI_API_KEY"] =random.choice(gemini_api_keys) | |
os.environ["TOGETHERAI_API_KEY"] ="30bed0b842ed3268372d57f588c22452798e9af96aa8d3129ba745ef226282a8" | |
DeepInfraChat.models = ["deepseek-ai/DeepSeek-R1-Turbo","Qwen/QwQ-32B","deepseek-ai/DeepSeek-R1"] | |
REASONING_CORRESPONDANCE = {"DeepSeek-R1-Glider":Glider, "DeepSeekR1":DeepInfraChat} | |
REASONING_QWQ = {"qwq-32b":DeepInfraChat} | |
CHAT_CORRESPONDANCE = {"DeepSeek-V3":DeepInfraChat,"DeepSeek-V3-Blackbox":Blackbox} | |
client = Client() | |
genai.configure(api_key="AIzaSyAQgAtQPpY0bQaCqCISGxeyF6tpDePx-Jg") | |
modell = genai.GenerativeModel('gemini-1.5-pro') | |
generation_config = { | |
"temperature": 1, | |
"top_p": 0.95, | |
"top_k": 40, | |
"max_output_tokens": 8192, | |
"response_mime_type": "text/plain", | |
} | |
model2flash = genai.GenerativeModel( | |
model_name="gemini-2.0-flash-thinking-exp", | |
generation_config=generation_config, | |
) | |
def gpt4(messages,model="gpt-4"): | |
print(messages) | |
if len(messages) ==1: | |
messages[0]["role"]="user" | |
response = completion( | |
model="gemini/gemini-2.0-flash", | |
messages=messages | |
) | |
return str(response.choices[0].message.content) | |
def gpt4stream(messages,model,api_keys): | |
print(f"-------{model}--------") | |
global llmfree | |
global llmdeepseek | |
global llmgroq | |
if model == "DeepSeekR1-togetherAI": | |
response = completion(model="together_ai/deepseek-ai/DeepSeek-R1", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model == "DeepSeekV3-togetherAI": | |
response = completion(model="together_ai/deepseek-ai/DeepSeek-V3", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="deepseek-r1-distill-llama-70b": | |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) | |
response = completion(model="groq/deepseek-r1-distill-llama-70b", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if "```json" in cunk: | |
helper.q.put_nowait("RESULT: "+cunk) | |
else: | |
helper.q.put_nowait("END") | |
elif model=="qwq-32b": | |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys) | |
response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="llama-3.3-70b-versatile": | |
response = completion(model="groq/llama-3.3-70b-versatile", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
if "```json" in cunk: | |
helper.q.put_nowait("RESULT: "+cunk) | |
else: | |
helper.q.put_nowait("END") | |
elif model=="gemini-2.0-flash-thinking-exp-01-21": | |
for key in gemini_api_keys: | |
try: | |
os.environ["GEMINI_API_KEY"] =key | |
response = completion(model="gemini/gemini-2.0-flash-thinking-exp-01-21", messages=messages, stream=True) | |
cunk="" | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
break | |
except: | |
del response | |
pass | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="deepseek.r1": | |
cunk="" | |
if "V3" in model: | |
providers = CHAT_CORRESPONDANCE | |
model_name="deepseek-v3" | |
else: | |
providers = REASONING_CORRESPONDANCE | |
model_name="deepseek-r1" | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model=model_name, | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk or "```" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
break | |
except Exception as e: | |
helper.q.put_nowait(str(e)) | |
pass | |
print("STOPPING") | |
helper.q.put_nowait("RESULT: "+cunk) | |
elif model=="deepseek-reasoner" : | |
cunk="" | |
providers=REASONING_QWQ | |
helper.q.put_nowait("<think>") | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model="Qwen/QwQ-32B", | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
if "```json" not in cunk or "```" not in cunk: | |
helper.q.put_nowait(part.choices[0].delta.content or "") | |
break | |
except Exception as e: | |
pass | |
print("STOPPING") | |
if "```json" in cunk: | |
helper.q.put_nowait("RESULT: "+cunk) | |
else: | |
helper.q.put_nowait("END") | |
elif "DeepSeek" in model and "dev" in model: | |
cunk="" | |
if "V3" in model: | |
providers = CHAT_CORRESPONDANCE | |
else: | |
providers = REASONING_CORRESPONDANCE | |
for provider in providers: | |
try: | |
response = client.chat.completions.create( | |
provider=providers[provider], | |
model="deepseek-r1", | |
messages=messages, | |
stream=True | |
# Add any other necessary parameters | |
) | |
for part in response: | |
cunk=cunk+(part.choices[0].delta.content or "") | |
break | |
except Exception as e: | |
pass | |
print("STOPPING") | |
if "```json" in cunk: | |
helper.q.put_nowait("RESULT: "+cunk) | |
else: | |
helper.q.put_nowait("END") | |