|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
from peft import PeftModel |
|
import torch |
|
import gradio as gr |
|
|
|
model_id = "deepseek-ai/deepseek-coder-1.3b-base" |
|
lora_id = "Seunggg/lora-plant" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True) |
|
|
|
|
|
base = AutoModelForCausalLM.from_pretrained( |
|
model_id, |
|
device_map="auto", |
|
offload_folder="offload/", |
|
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32, |
|
trust_remote_code=True |
|
) |
|
|
|
|
|
model = PeftModel.from_pretrained( |
|
base, |
|
lora_id, |
|
offload_folder="offload/", |
|
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32 |
|
) |
|
|
|
model.eval() |
|
|
|
|
|
from transformers import pipeline |
|
pipe = pipeline( |
|
"text-generation", |
|
model=model, |
|
tokenizer=tokenizer, |
|
device_map="auto", |
|
max_new_tokens=256 |
|
) |
|
|
|
from ask_api import ask_with_sensor |
|
|
|
def respond(user_input, sensor_data_input): |
|
if not user_input.strip(): |
|
return "请输入植物相关的问题 :)" |
|
|
|
|
|
prompt = f"{user_input}\n请用人性化语言生成建议并推荐相关植物资料。\n回答:" |
|
local_result = pipe(prompt)[0]["generated_text"].replace(prompt, "").strip() |
|
|
|
|
|
try: |
|
sensor_data = eval(sensor_data_input) if sensor_data_input else {} |
|
except: |
|
sensor_data = {} |
|
|
|
api_result = ask_with_sensor(user_input, sensor_data) |
|
|
|
return f"💡 本地建议:\n{local_result}\n\n🌐 传感器分析:\n{api_result}" |
|
|
|
|
|
|
|
gr.Interface( |
|
fn=respond, |
|
inputs=[ |
|
gr.Textbox(lines=4, label="植物问题"), |
|
gr.Textbox(lines=2, label="传感器数据 (JSON 格式)", placeholder='{"temperature": 25, "humidity": 60}') |
|
], |
|
outputs="text", |
|
title="🌱 植物助手 - 本地 LoRA + Render 联动版", |
|
description="结合本地建议和传感器分析结果。" |
|
).launch() |
|
|
|
|
|
|