|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
from peft import PeftModel |
|
import torch |
|
import json |
|
|
|
model_id = "deepseek-ai/deepseek-coder-1.3b-base" |
|
lora_id = "Seunggg/lora-plant" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True) |
|
|
|
|
|
base = AutoModelForCausalLM.from_pretrained( |
|
model_id, |
|
device_map="auto", |
|
offload_folder="offload/", |
|
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32, |
|
trust_remote_code=True |
|
) |
|
|
|
|
|
model = PeftModel.from_pretrained( |
|
base, |
|
lora_id, |
|
offload_folder="offload/", |
|
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32 |
|
) |
|
|
|
model.eval() |
|
|
|
|
|
from transformers import pipeline |
|
pipe = pipeline( |
|
"text-generation", |
|
model=model, |
|
tokenizer=tokenizer, |
|
device_map="auto", |
|
max_new_tokens=256 |
|
) |
|
|
|
from ask_api import ask_with_sensor |
|
|
|
def respond(user_input): |
|
if not user_input.strip(): |
|
return "请输入植物相关的问题 😊" |
|
|
|
|
|
try: |
|
sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5) |
|
sensor_data = sensor_response.json().get("sensorData", None) |
|
except Exception as e: |
|
sensor_data = None |
|
|
|
|
|
prompt = f"用户提问:{user_input}\n" |
|
if sensor_data: |
|
prompt += f"当前传感器数据:{json.dumps(sensor_data, ensure_ascii=False)}\n" |
|
prompt += "请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:" |
|
|
|
|
|
try: |
|
result = pipe(prompt) |
|
return result[0]["generated_text"] |
|
except Exception as e: |
|
return f"生成建议时出错:{str(e)}" |
|
|
|
|
|
gr.Interface( |
|
fn=respond, |
|
inputs=[ |
|
gr.Textbox(lines=4, label="植物问题"), |
|
gr.Textbox(lines=2, label="传感器数据 (JSON 格式)", placeholder='{"temperature": 25, "humidity": 60}') |
|
], |
|
outputs="text", |
|
title="🌱 植物助手 - 本地 LoRA + Render 联动版", |
|
description="结合本地建议和传感器分析结果。" |
|
).launch() |
|
|
|
|
|
|