Spaces:
Runtime error
Runtime error
upd
Browse files
app.py
CHANGED
|
@@ -15,17 +15,6 @@ from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
|
|
| 15 |
model_chtoen = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M")
|
| 16 |
tokenizer_chtoen = M2M100Tokenizer.from_pretrained("facebook/m2m100_418M")
|
| 17 |
|
| 18 |
-
#Streaming endpoint for OPENAI ChatGPT
|
| 19 |
-
API_URL = "https://api.openai.com/v1/chat/completions"
|
| 20 |
-
#Streaming endpoint for OPENCHATKIT
|
| 21 |
-
API_URL_TGTHR = os.getenv('API_URL_TGTHR')
|
| 22 |
-
|
| 23 |
-
openchat_preprompt = (
|
| 24 |
-
"\n<human>: Hi!\n<bot>: My name is Bot, model version is 0.15, part of an open-source kit for "
|
| 25 |
-
"fine-tuning new bots! I was created by Together, LAION, and Ontocord.ai and the open-source "
|
| 26 |
-
"community. I am not human, not evil and not alive, and thus have no thoughts and feelings, "
|
| 27 |
-
"but I am programmed to be helpful, polite, honest, and friendly.\n")
|
| 28 |
-
|
| 29 |
#Predict function for CHATGPT
|
| 30 |
def predict_chatgpt(inputs, top_p_chatgpt, temperature_chatgpt, openai_api_key, chat_counter_chatgpt, chatbot_chatgpt=[], history=[]):
|
| 31 |
#Define payload and header for chatgpt API
|
|
@@ -189,16 +178,18 @@ def translate_Chinese_English(chinese_text):
|
|
| 189 |
return trans_eng_text[0]
|
| 190 |
|
| 191 |
"""
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
|
|
|
|
|
|
| 202 |
"""
|
| 203 |
|
| 204 |
def reset_textbox():
|
|
|
|
| 15 |
model_chtoen = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M")
|
| 16 |
tokenizer_chtoen = M2M100Tokenizer.from_pretrained("facebook/m2m100_418M")
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
#Predict function for CHATGPT
|
| 19 |
def predict_chatgpt(inputs, top_p_chatgpt, temperature_chatgpt, openai_api_key, chat_counter_chatgpt, chatbot_chatgpt=[], history=[]):
|
| 20 |
#Define payload and header for chatgpt API
|
|
|
|
| 178 |
return trans_eng_text[0]
|
| 179 |
|
| 180 |
"""
|
| 181 |
+
def predict(input, max_length, top_p, temperature, history=None):
|
| 182 |
+
if history is None:
|
| 183 |
+
history = []
|
| 184 |
+
for response, history in model.stream_chat(tokenizer, input, history, max_length=max_length, top_p=top_p,
|
| 185 |
+
temperature=temperature):
|
| 186 |
+
updates = []
|
| 187 |
+
for query, response in history:
|
| 188 |
+
updates.append(gr.update(visible=True, value="用户:" + query))
|
| 189 |
+
updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response))
|
| 190 |
+
if len(updates) < MAX_BOXES:
|
| 191 |
+
updates = updates + [gr.Textbox.update(visible=False)] * (MAX_BOXES - len(updates))
|
| 192 |
+
yield [history] + updates
|
| 193 |
"""
|
| 194 |
|
| 195 |
def reset_textbox():
|