000 / app.py
Seunggg's picture
Update app.py
fa83b7a verified
raw
history blame
3.3 kB
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel
import torch
import requests
import json
import shutil, os
offload_folder = "offload/"
# 如果是文件,删掉
if os.path.isfile(offload_folder):
os.remove(offload_folder)
# 如果目录不存在,创建
if not os.path.exists(offload_folder):
os.makedirs(offload_folder)
offload_folder = "offload/"
print(f"路径是否存在: {os.path.exists(offload_folder)}")
print(f"是否是目录: {os.path.isdir(offload_folder)}")
print(f"是否是文件: {os.path.isfile(offload_folder)}")
os.makedirs(offload_folder, exist_ok=True)
model_id = "deepseek-ai/deepseek-coder-1.3b-base"
lora_id = "Seunggg/lora-plant"
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
base = AutoModelForCausalLM.from_pretrained(
model_id,
device_map="auto",
offload_folder=offload_folder,
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
trust_remote_code=True
)
model = PeftModel.from_pretrained(
base,
lora_id,
device_map="auto",
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32
)
model.eval()
from transformers import pipeline
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
device_map="auto",
max_new_tokens=256
)
def get_sensor_data():
try:
sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
sensor_data = sensor_response.json().get("sensorData", None)
return json.dumps(sensor_data, ensure_ascii=False, indent=2) if sensor_data else "暂无传感器数据"
except Exception as e:
return "⚠️ 获取失败:" + str(e)
def respond(user_input):
sensor_display = get_sensor_data()
if not user_input.strip():
return sensor_display, "请输入植物相关的问题 😊"
prompt = f"用户提问:{user_input}\n"
try:
sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
sensor_data = sensor_response.json().get("sensorData", None)
if sensor_data:
prompt += f"当前传感器数据:{json.dumps(sensor_data, ensure_ascii=False)}\n"
prompt += "请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:"
result = pipe(prompt)
full_output = result[0]["generated_text"]
answer = full_output.replace(prompt, "").strip()
except Exception as e:
answer = f"生成建议时出错:{str(e)}"
return sensor_display, answer
def auto_update_sensor():
return gr.Textbox.update(value=get_sensor_data())
with gr.Blocks() as demo:
gr.Markdown("# 🌱 植物助手 - 实时联动版")
sensor_box = gr.Textbox(label="🧪 当前传感器数据", lines=6, interactive=False)
question = gr.Textbox(label="植物问题", lines=4, placeholder="请输入植物相关的问题 😊")
answer_box = gr.Textbox(label="🤖 回答建议", lines=8, interactive=False)
send_btn = gr.Button("发送")
demo.load(fn=get_sensor_data, inputs=None, outputs=sensor_box, every=5)
send_btn.click(fn=respond, inputs=question, outputs=[sensor_box, answer_box])
demo.launch()