faisalhr1997 commited on
Commit
3a09844
·
1 Parent(s): 2b5b254

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -47
app.py CHANGED
@@ -6,35 +6,7 @@ import psutil
6
  from ctransformers import AutoModelForCausalLM
7
  from huggingface_hub import hf_hub_download
8
 
9
- _ = """
10
- snapshot_download(
11
- repo_id="TheBloke/Luna-AI-Llama2-Uncensored-GGML",
12
- allow_patterns="luna-ai-llama2-uncensored.ggmlv3.q2_K.bin",
13
- revision="main",
14
- local_dir="models",
15
- local_dir_use_symlinks=False, # default "auto"
16
- )
17
-
18
- hf_hub_download(
19
- repo_id=repo_id,
20
- filename=model_filename,
21
- local_dir=local_path,
22
- local_dir_use_symlinks=True,
23
- )
24
- # """
25
- # 4.06G
26
-
27
- _ = """
28
- llm = AutoModelForCausalLM.from_pretrained(
29
- "TheBloke/Luna-AI-Llama2-Uncensored-GGML",
30
- model_file="luna-ai-llama2-uncensored.ggmlv3.q2_K.bin",
31
- model_type="llama", gpu_layers=32, threads=2,
32
- )
33
- # """
34
- # _ = Path("models", "luna-ai-llama2-uncensored.ggmlv3.q2_K.bin").absolute().as_posix()
35
- # assert Path(_).exists(), f"{_} does not exist, perhaps snapshot_download failed?"
36
 
37
- # URL = "https://huggingface.co/TheBloke/falcon-7b-instruct-GGML/blob/main/falcon-7b-instruct.ggccv1.q4_1.bin"
38
  URL = "https://huggingface.co/TheBloke/Luna-AI-Llama2-Uncensored-GGML/resolve/main/luna-ai-llama2-uncensored.ggmlv3.q2_K.bin"
39
  repo_id = "/".join(urlparse(URL).path.strip("/").split("/")[:2])
40
 
@@ -50,9 +22,6 @@ _ = hf_hub_download(
50
  )
51
 
52
  llm = AutoModelForCausalLM.from_pretrained(
53
- # repo_id, # "TheBloke/falcon-7b-instruct-GGML",
54
- # model_file=model_file,
55
- # model_file=_,
56
  _,
57
  model_type="llama",
58
  threads=psutil.cpu_count(logical=False),
@@ -112,7 +81,7 @@ def chat():
112
 
113
  with gr.Row(elem_id="button_container"):
114
  with gr.Column():
115
- retry_button = gr.Button("♻️ Retry last turn")
116
  with gr.Column():
117
  delete_turn_button = gr.Button("🧽 Delete last turn")
118
  with gr.Column():
@@ -120,15 +89,12 @@ def chat():
120
 
121
  gr.Examples(
122
  [
123
- ["Hey Falcon! Any recommendations for my holidays in Abu Dhabi?"],
124
  ["What's the Everett interpretation of quantum mechanics?"],
125
  [
126
  "Give me a list of the top 10 dive sites you would recommend around the world."
127
  ],
128
  ["Can you tell me more about deep-water soloing?"],
129
- [
130
- "Can you write a short tweet about the Apache 2.0 release of our latest AI model, Falcon LLM?"
131
- ],
132
  ],
133
  inputs=inputs,
134
  label="Click on any example and press Enter in the input textbox!",
@@ -237,17 +203,7 @@ def get_demo():
237
  with gr.Row():
238
  with gr.Column():
239
  gr.Markdown(
240
- """**Chat with [Falcon-7b-Instruct](https://huggingface.co/tiiuae/falcon-7b-instruct), brainstorm ideas, discuss your holiday plans, and more!**
241
-
242
- ✨ This demo is powered by [Falcon-7B](https://huggingface.co/tiiuae/falcon-7b), finetuned on the [Baize](https://github.com/project-baize/baize-chatbot) dataset, and running with [Text Generation Inference](https://github.com/huggingface/text-generation-inference). [Falcon-7B](https://huggingface.co/tiiuae/falcon-7b) is a state-of-the-art large language model built by the [Technology Innovation Institute](https://www.tii.ae) in Abu Dhabi. It is trained on 1 trillion tokens (including [RefinedWeb](https://huggingface.co/datasets/tiiuae/falcon-refinedweb)) and available under the Apache 2.0 license. It currently holds the 🥇 1st place on the [🤗 Open LLM leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). This demo is made available by the [HuggingFace H4 team](https://huggingface.co/HuggingFaceH4).
243
-
244
- 🧪 This is only a **first experimental preview**: the [H4 team](https://huggingface.co/HuggingFaceH4) intends to provide increasingly capable versions of Falcon Chat in the future, based on improved datasets and RLHF/RLAIF.
245
-
246
- 👀 **Learn more about Falcon LLM:** [falconllm.tii.ae](https://falconllm.tii.ae/)
247
-
248
- ➡️️ **Intended Use**: this demo is intended to showcase an early finetuning of [Falcon-7B](https://huggingface.co/tiiuae/falcon-7b), to illustrate the impact (and limitations) of finetuning on a dataset of conversations and instructions. We encourage the community to further build upon the base model, and to create even better instruct/chat versions!
249
-
250
- ⚠️ **Limitations**: the model can and will produce factually incorrect information, hallucinating facts and actions. As it has not undergone any advanced tuning/alignment, it can produce problematic outputs, especially if prompted to do so. Finally, this demo is limited to a session length of about 1,000 words.
251
  """
252
  )
253
 
 
6
  from ctransformers import AutoModelForCausalLM
7
  from huggingface_hub import hf_hub_download
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
 
10
  URL = "https://huggingface.co/TheBloke/Luna-AI-Llama2-Uncensored-GGML/resolve/main/luna-ai-llama2-uncensored.ggmlv3.q2_K.bin"
11
  repo_id = "/".join(urlparse(URL).path.strip("/").split("/")[:2])
12
 
 
22
  )
23
 
24
  llm = AutoModelForCausalLM.from_pretrained(
 
 
 
25
  _,
26
  model_type="llama",
27
  threads=psutil.cpu_count(logical=False),
 
81
 
82
  with gr.Row(elem_id="button_container"):
83
  with gr.Column():
84
+ retry_button = gr.Button("♻️ Retry")
85
  with gr.Column():
86
  delete_turn_button = gr.Button("🧽 Delete last turn")
87
  with gr.Column():
 
89
 
90
  gr.Examples(
91
  [
92
+ ["Hey! Any recommendations for my holidays"],
93
  ["What's the Everett interpretation of quantum mechanics?"],
94
  [
95
  "Give me a list of the top 10 dive sites you would recommend around the world."
96
  ],
97
  ["Can you tell me more about deep-water soloing?"],
 
 
 
98
  ],
99
  inputs=inputs,
100
  label="Click on any example and press Enter in the input textbox!",
 
203
  with gr.Row():
204
  with gr.Column():
205
  gr.Markdown(
206
+ """**Chat with [TheBloke/Luna-AI-Llama2-Uncensored-GGML](https://huggingface.co/TheBloke/Luna-AI-Llama2-Uncensored-GGML), brainstorm ideas, discuss your holiday plans, and more!**
 
 
 
 
 
 
 
 
 
 
207
  """
208
  )
209