Seunggg commited on
Commit
ecec6e3
·
verified ·
1 Parent(s): fa83b7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -54
app.py CHANGED
@@ -4,25 +4,6 @@ from peft import PeftModel
4
  import torch
5
  import requests
6
  import json
7
- import shutil, os
8
-
9
- offload_folder = "offload/"
10
-
11
- # 如果是文件,删掉
12
- if os.path.isfile(offload_folder):
13
- os.remove(offload_folder)
14
-
15
- # 如果目录不存在,创建
16
- if not os.path.exists(offload_folder):
17
- os.makedirs(offload_folder)
18
-
19
-
20
- offload_folder = "offload/"
21
- print(f"路径是否存在: {os.path.exists(offload_folder)}")
22
- print(f"是否是目录: {os.path.isdir(offload_folder)}")
23
- print(f"是否是文件: {os.path.isfile(offload_folder)}")
24
-
25
- os.makedirs(offload_folder, exist_ok=True)
26
 
27
  model_id = "deepseek-ai/deepseek-coder-1.3b-base"
28
  lora_id = "Seunggg/lora-plant"
@@ -31,20 +12,15 @@ tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
31
 
32
  base = AutoModelForCausalLM.from_pretrained(
33
  model_id,
34
- device_map="auto",
35
- offload_folder=offload_folder,
36
- torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
37
  trust_remote_code=True
38
  )
39
 
40
-
41
  model = PeftModel.from_pretrained(
42
  base,
43
  lora_id,
44
- device_map="auto",
45
- torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32
46
  )
47
-
48
  model.eval()
49
 
50
  from transformers import pipeline
@@ -52,48 +28,57 @@ pipe = pipeline(
52
  "text-generation",
53
  model=model,
54
  tokenizer=tokenizer,
55
- device_map="auto",
56
  max_new_tokens=256
57
  )
58
 
59
  def get_sensor_data():
60
  try:
61
- sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
62
- sensor_data = sensor_response.json().get("sensorData", None)
63
- return json.dumps(sensor_data, ensure_ascii=False, indent=2) if sensor_data else "暂无传感器数据"
64
  except Exception as e:
65
- return "⚠️ 获取失败:" + str(e)
 
 
 
 
66
 
67
- def respond(user_input):
68
- sensor_display = get_sensor_data()
69
  if not user_input.strip():
70
- return sensor_display, "请输入植物相关的问题 😊"
71
- prompt = f"用户提问:{user_input}\n"
72
  try:
73
- sensor_response = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
74
- sensor_data = sensor_response.json().get("sensorData", None)
75
- if sensor_data:
76
- prompt += f"当前传感器数据:{json.dumps(sensor_data, ensure_ascii=False)}\n"
77
- prompt += "请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:"
78
  result = pipe(prompt)
79
- full_output = result[0]["generated_text"]
80
- answer = full_output.replace(prompt, "").strip()
81
  except Exception as e:
82
- answer = f"生成建议时出错:{str(e)}"
83
- return sensor_display, answer
84
-
85
- def auto_update_sensor():
86
- return gr.Textbox.update(value=get_sensor_data())
 
 
 
 
 
 
 
 
87
 
88
  with gr.Blocks() as demo:
89
- gr.Markdown("# 🌱 植物助手 - 实时联动版")
90
-
91
- sensor_box = gr.Textbox(label="🧪 当前传感器数据", lines=6, interactive=False)
92
- question = gr.Textbox(label="植物问题", lines=4, placeholder="请输入植物相关的问题 😊")
 
 
 
93
  answer_box = gr.Textbox(label="🤖 回答建议", lines=8, interactive=False)
94
  send_btn = gr.Button("发送")
95
 
96
- demo.load(fn=get_sensor_data, inputs=None, outputs=sensor_box, every=5)
97
- send_btn.click(fn=respond, inputs=question, outputs=[sensor_box, answer_box])
 
98
 
99
- demo.launch()
 
4
  import torch
5
  import requests
6
  import json
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
  model_id = "deepseek-ai/deepseek-coder-1.3b-base"
9
  lora_id = "Seunggg/lora-plant"
 
12
 
13
  base = AutoModelForCausalLM.from_pretrained(
14
  model_id,
15
+ torch_dtype=torch.float32, # Hugging Face Spaces 一般用 float32
 
 
16
  trust_remote_code=True
17
  )
18
 
 
19
  model = PeftModel.from_pretrained(
20
  base,
21
  lora_id,
22
+ torch_dtype=torch.float32
 
23
  )
 
24
  model.eval()
25
 
26
  from transformers import pipeline
 
28
  "text-generation",
29
  model=model,
30
  tokenizer=tokenizer,
 
31
  max_new_tokens=256
32
  )
33
 
34
  def get_sensor_data():
35
  try:
36
+ res = requests.get("https://arduino-realtime.onrender.com/api/data", timeout=5)
37
+ sensor_data = res.json().get("sensorData", None)
38
+ return sensor_data if sensor_data else {}
39
  except Exception as e:
40
+ return {"错误": str(e)}
41
+
42
+ def sensor_display_text():
43
+ sensor_data = get_sensor_data()
44
+ return json.dumps(sensor_data, ensure_ascii=False, indent=2) if sensor_data else "暂无传感器数据"
45
 
46
+ def generate_answer(user_input):
 
47
  if not user_input.strip():
48
+ return "请输入植物相关的问题 😊"
49
+ prompt = f"用户提问:{user_input}\n请用更人性化的语言生成建议,并推荐相关植物文献或资料。\n回答:"
50
  try:
 
 
 
 
 
51
  result = pipe(prompt)
52
+ output = result[0]["generated_text"]
53
+ return output.replace(prompt, "").strip()
54
  except Exception as e:
55
+ return f"生成建议时出错:{str(e)}"
56
+
57
+ def update_chart():
58
+ sensor_data = get_sensor_data()
59
+ if not sensor_data or "温度" not in sensor_data:
60
+ return gr.LinePlot.update(value=None)
61
+ return {
62
+ "data": [
63
+ {"x": [0], "y": [sensor_data.get("温度", 0)], "name": "温度"},
64
+ {"x": [0], "y": [sensor_data.get("湿度", 0)], "name": "湿度"}
65
+ ],
66
+ "layout": {"title": "实时传感器数据"}
67
+ }
68
 
69
  with gr.Blocks() as demo:
70
+ gr.Markdown("# 🌱 植物助手 - 实时传感器联动")
71
+
72
+ with gr.Row():
73
+ sensor_box = gr.Textbox(label="🧪 当前传感器数据", lines=6, interactive=False)
74
+ chart = gr.LinePlot(label="📈 实时数据图表", x="x", y="y", overlay=True)
75
+
76
+ question = gr.Textbox(label="🌿 植物问题", lines=4, placeholder="请输入植物相关的问题 😊")
77
  answer_box = gr.Textbox(label="🤖 回答建议", lines=8, interactive=False)
78
  send_btn = gr.Button("发送")
79
 
80
+ demo.load(fn=sensor_display_text, inputs=None, outputs=sensor_box, every=5)
81
+ demo.load(fn=update_chart, inputs=None, outputs=chart, every=5)
82
+ send_btn.click(fn=generate_answer, inputs=question, outputs=answer_box)
83
 
84
+ demo.launch()