DGameHF commited on
Commit
9266308
Β·
verified Β·
1 Parent(s): f2d7640

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -0
app.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
+
5
+ # Load model
6
+ model_name = "TheBloke/MythoMax-L2-13B-GPTQ"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype=torch.float16)
9
+
10
+ def chat(message, history):
11
+ input_text = tokenizer.apply_chat_template(history + [[None, message]], tokenize=False)
12
+ inputs = tokenizer(input_text, return_tensors="pt").to("cuda")
13
+ outputs = model.generate(**inputs, max_new_tokens=300)
14
+ response = tokenizer.decode(outputs[:, inputs.input_ids.shape[1]:][0], skip_special_tokens=True)
15
+ return response
16
+
17
+ # Gradio UI
18
+ with gr.Blocks() as demo:
19
+ gr.Markdown("## MythoMax AI Chatbot πŸ’¬")
20
+ chatbox = gr.ChatInterface(chat)
21
+
22
+ demo.launch()