cody82 commited on
Commit
b10ba12
·
verified ·
1 Parent(s): 9c601ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -1,20 +1,19 @@
1
  import torch
 
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
- model_id = "cody82/unitrip" # или другой ID модели с Huggingface
5
-
6
- tokenizer = AutoTokenizer.from_pretrained(model_id) # без local_files_only
7
- model = AutoModelForCausalLM.from_pretrained(model_id) # без local_files_only
8
 
 
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  model.to(device)
11
 
12
  system_message = "Ты — умный помощник по Университету Иннополис."
13
 
14
- while True:
15
- user_input = input("🧑 Вопрос: ").strip()
16
- if user_input.lower() in ["exit", "выход", "quit"]:
17
- break
18
 
19
  prompt = f"{system_message}\nUser: {user_input}\nAssistant:"
20
 
@@ -32,4 +31,8 @@ while True:
32
 
33
  generated = outputs[0][inputs["input_ids"].shape[1]:]
34
  answer = tokenizer.decode(generated, skip_special_tokens=True)
35
- print(f"🤖 Ответ: {answer.strip()}\n")
 
 
 
 
 
1
  import torch
2
+ import gradio as gr
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
+ model_id = "cody82/unitrip"
 
 
 
6
 
7
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
8
+ model = AutoModelForCausalLM.from_pretrained(model_id)
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  model.to(device)
11
 
12
  system_message = "Ты — умный помощник по Университету Иннополис."
13
 
14
+ def respond(user_input, history=None):
15
+ if history is None:
16
+ history = []
 
17
 
18
  prompt = f"{system_message}\nUser: {user_input}\nAssistant:"
19
 
 
31
 
32
  generated = outputs[0][inputs["input_ids"].shape[1]:]
33
  answer = tokenizer.decode(generated, skip_special_tokens=True)
34
+ history.append((user_input, answer.strip()))
35
+ return history, history
36
+
37
+ iface = gr.ChatInterface(fn=respond, title="Innopolis Q&A")
38
+ iface.launch()