Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,3 @@
|
|
1 |
-
|
2 |
-
import time
|
3 |
import gradio as gr
|
4 |
from huggingface_hub import InferenceClient
|
5 |
from optimum.intel import OVModelForCausalLM
|
@@ -14,40 +12,20 @@ tokenizer = AutoTokenizer.from_pretrained(model_id)
|
|
14 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
15 |
|
16 |
def respond(message, history):
|
17 |
-
start_time = time.time() # 記錄開始時間
|
18 |
-
|
19 |
# 將當前訊息與歷史訊息合併
|
20 |
input_text = message if not history else history[-1]["content"] + " " + message
|
21 |
input_text = message
|
22 |
-
#
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
response = pipe(input_text, max_length=max_length, truncation=False, num_return_sequences=1)
|
28 |
-
reply = response[0]['generated_text']
|
29 |
-
output_text += reply
|
30 |
-
|
31 |
-
# 檢測是否包含結束標記,或者生成結束
|
32 |
-
if stop_token in reply or len(output_text) >= max_length:
|
33 |
-
output_text = output_text.split(stop_token)[0] # 去掉結束標記以後的部分
|
34 |
-
break
|
35 |
-
|
36 |
-
# 更新輸入文字繼續生成
|
37 |
-
input_text = reply
|
38 |
-
|
39 |
-
end_time = time.time() # 記錄結束時間
|
40 |
-
duration = end_time - start_time # 計算耗時
|
41 |
-
|
42 |
-
# 輸出耗時到控制台
|
43 |
print(f"Message: {message}")
|
44 |
-
print(f"Reply: {
|
45 |
-
|
46 |
-
|
47 |
-
return output_text
|
48 |
-
|
49 |
# 設定 Gradio 的聊天界面
|
50 |
-
demo = gr.ChatInterface(fn=respond, title="Phi-3.5-mini-instruct-openvino", description="Phi-3.5-mini-instruct-openvino", type='messages')
|
51 |
|
52 |
if __name__ == "__main__":
|
53 |
-
demo.launch()
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
from optimum.intel import OVModelForCausalLM
|
|
|
12 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
13 |
|
14 |
def respond(message, history):
|
|
|
|
|
15 |
# 將當前訊息與歷史訊息合併
|
16 |
input_text = message if not history else history[-1]["content"] + " " + message
|
17 |
input_text = message
|
18 |
+
# 獲取模型的回應
|
19 |
+
response = pipe(input_text, max_length=1024, truncation=True, num_return_sequences=1)
|
20 |
+
reply = response[0]['generated_text']
|
21 |
+
|
22 |
+
# 返回新的消息格式
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
print(f"Message: {message}")
|
24 |
+
print(f"Reply: {reply}")
|
25 |
+
return reply
|
26 |
+
|
|
|
|
|
27 |
# 設定 Gradio 的聊天界面
|
28 |
+
demo = gr.ChatInterface(fn=respond, title="Chat with Phi-3.5-mini-instruct-openvino", description="與Phi-3.5-mini-instruct-openvino 聊天!", type='messages')
|
29 |
|
30 |
if __name__ == "__main__":
|
31 |
+
demo.launch()
|