Spaces:
Sleeping
Sleeping
Update app.py
Browse filesremoved some unnecessary packages.
app.py
CHANGED
@@ -7,14 +7,7 @@ import time
|
|
7 |
import tempfile
|
8 |
from huggingface_hub import InferenceClient
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
Complex = """## Best in Complex Question"""
|
13 |
-
|
14 |
-
Detail = """## Best for Detailed Generation or Long Answers"""
|
15 |
-
|
16 |
-
client1 = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
17 |
-
#client1 = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
18 |
|
19 |
system_instructions1 = "<s>[SYSTEM] You are AI assistant named DorjGPT, Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if super interlligent AI assistant. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
|
20 |
|
@@ -39,7 +32,7 @@ async def generate1(prompt, history=[], b=None):
|
|
39 |
)
|
40 |
#formatted_prompt = system_instructions1 + prompt + "[JARVIS]"
|
41 |
formatted_prompt = format_prompt(f"{system_instructions1}, {prompt}", history) + "[DORJGPT]"
|
42 |
-
stream =
|
43 |
formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
|
44 |
|
45 |
output = ""
|
|
|
7 |
import tempfile
|
8 |
from huggingface_hub import InferenceClient
|
9 |
|
10 |
+
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
system_instructions1 = "<s>[SYSTEM] You are AI assistant named DorjGPT, Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if super interlligent AI assistant. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
|
13 |
|
|
|
32 |
)
|
33 |
#formatted_prompt = system_instructions1 + prompt + "[JARVIS]"
|
34 |
formatted_prompt = format_prompt(f"{system_instructions1}, {prompt}", history) + "[DORJGPT]"
|
35 |
+
stream = client.text_generation(
|
36 |
formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
|
37 |
|
38 |
output = ""
|