Spaces:
Running
Running
Update utils/llms.py
Browse files- utils/llms.py +2 -2
utils/llms.py
CHANGED
@@ -93,7 +93,7 @@ def gpt4stream(messages,model,api_keys):
|
|
93 |
helper.q.put_nowait("RESULT: "+cunk)
|
94 |
else:
|
95 |
helper.q.put_nowait("END")
|
96 |
-
elif model=="
|
97 |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys)
|
98 |
response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True)
|
99 |
|
@@ -167,7 +167,7 @@ def gpt4stream(messages,model,api_keys):
|
|
167 |
helper.q.put_nowait("RESULT: "+cunk)
|
168 |
|
169 |
|
170 |
-
elif model=="
|
171 |
cunk=""
|
172 |
providers=REASONING_QWQ
|
173 |
for provider in providers:
|
|
|
93 |
helper.q.put_nowait("RESULT: "+cunk)
|
94 |
else:
|
95 |
helper.q.put_nowait("END")
|
96 |
+
elif model=="qwq-32b":
|
97 |
os.environ["GROQ_API_KEY"] =random.choice(groq_api_keys)
|
98 |
response = completion(model="groq/qwen-qwq-32b", messages=messages, stream=True)
|
99 |
|
|
|
167 |
helper.q.put_nowait("RESULT: "+cunk)
|
168 |
|
169 |
|
170 |
+
elif model=="gemini-2.0-pro-exp-02-05" :
|
171 |
cunk=""
|
172 |
providers=REASONING_QWQ
|
173 |
for provider in providers:
|