code
Browse files
app.py
CHANGED
@@ -1,12 +1,18 @@
|
|
1 |
import spaces
|
2 |
import gradio as gr
|
3 |
-
import
|
|
|
4 |
import os
|
5 |
|
|
|
|
|
|
|
|
|
|
|
6 |
# 初始化pipeline
|
7 |
pipeline = transformers.pipeline(
|
8 |
"text-generation",
|
9 |
-
model="
|
10 |
model_kwargs={"torch_dtype": "auto"},
|
11 |
device_map="auto",
|
12 |
)
|
|
|
1 |
import spaces
|
2 |
import gradio as gr
|
3 |
+
import torch
|
4 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
5 |
import os
|
6 |
|
7 |
+
if not os.path.exists("./phi-4"):
|
8 |
+
os.system('huggingface-cli download matteogeniaccio/phi-4 --local-dir ./phi-4 --include "phi-4/*"')
|
9 |
+
|
10 |
+
import transformers
|
11 |
+
|
12 |
# 初始化pipeline
|
13 |
pipeline = transformers.pipeline(
|
14 |
"text-generation",
|
15 |
+
model="./phi-4/phi-4",
|
16 |
model_kwargs={"torch_dtype": "auto"},
|
17 |
device_map="auto",
|
18 |
)
|