Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -119,7 +119,6 @@ with gr.Blocks() as demo:
|
|
119 |
|
120 |
def user_fn(user_message, history: list):
|
121 |
return "", history + [{"role": "user", "content": user_message}]
|
122 |
-
|
123 |
|
124 |
def generate_fn(history: list):
|
125 |
# continue from user input
|
@@ -131,17 +130,17 @@ with gr.Blocks() as demo:
|
|
131 |
|
132 |
history.append({"role": "assistant", "content": ""})
|
133 |
# history[-1]["content"] += "Generating with the given prefix...\n"
|
134 |
-
for history in model_fn(prefix):
|
135 |
yield history
|
136 |
|
137 |
def continue_fn(history: list):
|
138 |
# continue from the last model output
|
139 |
prefix = history[-1]["content"]
|
140 |
-
for history in model_fn(prefix):
|
141 |
yield history
|
142 |
|
143 |
@spaces.GPU
|
144 |
-
def model_fn(prefix):
|
145 |
queue = Queue(maxsize=10)
|
146 |
class MyStreamer:
|
147 |
def put(self, tokens):
|
|
|
119 |
|
120 |
def user_fn(user_message, history: list):
|
121 |
return "", history + [{"role": "user", "content": user_message}]
|
|
|
122 |
|
123 |
def generate_fn(history: list):
|
124 |
# continue from user input
|
|
|
130 |
|
131 |
history.append({"role": "assistant", "content": ""})
|
132 |
# history[-1]["content"] += "Generating with the given prefix...\n"
|
133 |
+
for history in model_fn(prefix, history):
|
134 |
yield history
|
135 |
|
136 |
def continue_fn(history: list):
|
137 |
# continue from the last model output
|
138 |
prefix = history[-1]["content"]
|
139 |
+
for history in model_fn(prefix, history):
|
140 |
yield history
|
141 |
|
142 |
@spaces.GPU
|
143 |
+
def model_fn(prefix, history):
|
144 |
queue = Queue(maxsize=10)
|
145 |
class MyStreamer:
|
146 |
def put(self, tokens):
|