Seunggg commited on
Commit
fd45282
·
verified ·
1 Parent(s): cbdc373

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -41
app.py CHANGED
@@ -1,46 +1,25 @@
1
- import os, gradio as gr
2
- from openai import OpenAI
 
3
 
4
- # 初始化 DeepSeek 客户端
5
- client = OpenAI(
6
- api_key=os.getenv("DEEPSEEK_API_KEY"),
7
- base_url="https://api.deepseek.com/v1"
8
- )
9
 
10
- def analyze_stream(temperature, light, soil_humidity):
11
- """
12
- 接收三个环境数据指标,调用 DeepSeek chat 模型进行流式分析。
13
- 将生成的 token 实时 yield 回前端。
14
- """
15
- messages = [
16
- {"role": "system", "content": "You are a plant care assistant."},
17
- {"role": "user", "content":
18
- f"Temperature: {temperature}°C, Light: {light} lux, Soil Humidity: {soil_humidity}%."}
19
- ]
20
- stream = client.chat.completions.create(
21
- model="deepseek-chat",
22
- messages=messages,
23
- temperature=0.7,
24
- stream=True
25
- )
26
- partial = ""
27
- for chunk in stream:
28
- delta = chunk.choices[0].delta.content or ""
29
- partial += delta
30
- yield partial # 持续输出拼接中的内容
31
 
32
- # 定义 Gradio 界面
33
- demo = gr.Interface(
34
- fn=analyze_stream,
35
- inputs=[
36
- gr.Number(label="Temperature (°C)"),
37
- gr.Number(label="Light (lux)"),
38
- gr.Number(label="Soil Humidity (%)")
39
- ],
40
- outputs=gr.Textbox(label="DeepSeek 分析结果"),
41
- title="🌱 实时植物环境分析",
42
- description="输入当前环境参数后,系统将流式返回 DeepSeek 的分析建议。",
43
  )
44
 
45
- # 启用 Gradio 的队列和流式输出
46
- demo.queue(default_concurrency_limit=2).launch(server_name="0.0.0.0")
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
 
5
+ model_id = "your-username/lora-plant-deepseek"
6
+
7
+ # 加载模型
8
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
9
+ model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
10
 
11
+ def plant_ask(user_input):
12
+ prompt = f"用户提问:{user_input}\n请用人性化语言回答,并建议一些可查阅的植物文献资料。\n回答:"
13
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
14
+ outputs = model.generate(**inputs, max_new_tokens=300)
15
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
+ iface = gr.Interface(
18
+ fn=plant_ask,
19
+ inputs="text",
20
+ outputs="text",
21
+ title="🌱 植物助手问答系统",
22
+ description="欢迎提问关于植物养护、生长环境、病虫害防治等问题,我会尽力给出人性化建议和文献推荐。",
 
 
 
 
 
23
  )
24
 
25
+ iface.launch()