Spaces:
Runtime error
Runtime error
4bit falcon
Browse files
app.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import
|
3 |
|
4 |
-
# بارگذاری مدل
|
5 |
-
model_name = "
|
|
|
|
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
7 |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
|
8 |
|
@@ -40,9 +42,7 @@ def respond(
|
|
40 |
|
41 |
yield response
|
42 |
|
43 |
-
|
44 |
-
برای اطلاعات بیشتر در مورد تنظیمات ChatInterface، به مستندات Gradio مراجعه کنید: https://www.gradio.app/docs/chatinterface
|
45 |
-
"""
|
46 |
demo = gr.ChatInterface(
|
47 |
respond,
|
48 |
additional_inputs=[
|
@@ -59,5 +59,6 @@ demo = gr.ChatInterface(
|
|
59 |
],
|
60 |
)
|
61 |
|
|
|
62 |
if __name__ == "__main__":
|
63 |
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
3 |
|
4 |
+
# بارگذاری مدل فشردهشده Falcon-7B-Instruct-GPTQ
|
5 |
+
model_name = "4bit/falcon-7b-instruct-GPTQ"
|
6 |
+
|
7 |
+
# بارگذاری توکنایزر و مدل
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
9 |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
|
10 |
|
|
|
42 |
|
43 |
yield response
|
44 |
|
45 |
+
# ایجاد رابط کاربری با Gradio
|
|
|
|
|
46 |
demo = gr.ChatInterface(
|
47 |
respond,
|
48 |
additional_inputs=[
|
|
|
59 |
],
|
60 |
)
|
61 |
|
62 |
+
# اجرای برنامه
|
63 |
if __name__ == "__main__":
|
64 |
demo.launch()
|