LeenAnabtawe commited on
Commit
3964b4c
ยท
verified ยท
1 Parent(s): 1b11def

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -0
app.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ # 4 ู†ู…ุงุฐุฌ ู„ุชูˆู„ูŠุฏ ุงู„ูƒูˆุฏ
6
+ models = {
7
+ "CodeGen (Salesforce)": "Salesforce/codegen-2B-multi",
8
+ "StarCoder": "bigcode/starcoder",
9
+ "WizardCoder": "WizardLM/WizardCoder-1B-V1.0",
10
+ "Phind LLaMA": "Phind/phind-codeLlama-34b-v2"
11
+ }
12
+
13
+ # ุชุญู…ูŠู„ ุงู„ู†ู…ุงุฐุฌ
14
+ loaded_models = {}
15
+ for name, model_id in models.items():
16
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
17
+ model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype=torch.float16)
18
+ loaded_models[name] = (tokenizer, model)
19
+
20
+ # ุฏุงู„ุฉ ุชูˆู„ูŠุฏ ุงู„ูƒูˆุฏ
21
+ def generate_code(prompt, model_name):
22
+ tokenizer, model = loaded_models[model_name]
23
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
24
+ outputs = model.generate(**inputs, max_new_tokens=150)
25
+ code = tokenizer.decode(outputs[0], skip_special_tokens=True)
26
+ return code
27
+
28
+ # ูˆุงุฌู‡ุฉ Gradio
29
+ demo = gr.Interface(
30
+ fn=generate_code,
31
+ inputs=[
32
+ gr.Textbox(lines=5, label="ุงูƒุชุจ ูˆุตู ุงู„ูƒูˆุฏ (ุจุงู„ุฅู†ุฌู„ูŠุฒูŠุฉ)"),
33
+ gr.Radio(choices=list(models.keys()), label="ุงุฎุชุฑ ุงู„ู†ู…ูˆุฐุฌ")
34
+ ],
35
+ outputs=gr.Code(label="ุงู„ูƒูˆุฏ ุงู„ู†ุงุชุฌ"),
36
+ title="Code Generation with AI Models",
37
+ description="ุงุฎุชุฑ ู†ู…ูˆุฐุฌ AI ูˆุงุฏุฎู„ ูˆุตู ุงู„ูƒูˆุฏ ู„ูŠุชู… ุชูˆู„ูŠุฏู‡ ุชู„ู‚ุงุฆูŠู‹ุง"
38
+ )
39
+
40
+ demo.launch()