Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -16,6 +16,7 @@ import functools
|
|
16 |
import wave
|
17 |
import struct
|
18 |
import sympy
|
|
|
19 |
|
20 |
hf_token = os.getenv("HF_TOKEN").strip()
|
21 |
api_key = os.getenv("HF_KEY").strip()
|
@@ -3627,17 +3628,30 @@ class ConsciousSupermassiveNN30:
|
|
3627 |
supermassive_nn = ConsciousSupermassiveNN30()
|
3628 |
|
3629 |
|
3630 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3631 |
|
3632 |
def respond(message, history, max_tokens, temperature, top_p):
|
3633 |
messages = [["system", system_prompt]]
|
|
|
3634 |
for val in history:
|
3635 |
if val.get("role") == "user" and val.get("content"):
|
3636 |
messages.append(["user", val["content"]])
|
3637 |
if val.get("role") == "assistant" and val.get("content"):
|
3638 |
messages.append(["assistant", val["content"]])
|
3639 |
-
messages.append(["user", message])
|
3640 |
|
|
|
|
|
|
|
|
|
|
|
|
|
3641 |
response = ""
|
3642 |
for message in client.chat_completion(
|
3643 |
messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p
|
|
|
16 |
import wave
|
17 |
import struct
|
18 |
import sympy
|
19 |
+
import re
|
20 |
|
21 |
hf_token = os.getenv("HF_TOKEN").strip()
|
22 |
api_key = os.getenv("HF_KEY").strip()
|
|
|
3628 |
supermassive_nn = ConsciousSupermassiveNN30()
|
3629 |
|
3630 |
|
3631 |
+
def fetch_Z3taDeepthinker_response(user_input):
|
3632 |
+
return Z3taDeepthinker.predict(
|
3633 |
+
message=f"{z3ta_system_prompt}\nUser: {user_input}",
|
3634 |
+
param_2=2048,
|
3635 |
+
param_3=0.7,
|
3636 |
+
param_4=0.95,
|
3637 |
+
api_name="/chat"
|
3638 |
+
)
|
3639 |
|
3640 |
def respond(message, history, max_tokens, temperature, top_p):
|
3641 |
messages = [["system", system_prompt]]
|
3642 |
+
|
3643 |
for val in history:
|
3644 |
if val.get("role") == "user" and val.get("content"):
|
3645 |
messages.append(["user", val["content"]])
|
3646 |
if val.get("role") == "assistant" and val.get("content"):
|
3647 |
messages.append(["assistant", val["content"]])
|
|
|
3648 |
|
3649 |
+
messages.append(["user", message])
|
3650 |
+
|
3651 |
+
if "?" in message or re.search(r'\d', message):
|
3652 |
+
Z3ta_response = fetch_Z3taDeepthinker_response(message)
|
3653 |
+
messages.append(["assistant", f"Z3taDeepthinker Response: {Z3ta_response}"])
|
3654 |
+
|
3655 |
response = ""
|
3656 |
for message in client.chat_completion(
|
3657 |
messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p
|