vilarin commited on
Commit
e3939ea
·
verified ·
1 Parent(s): 1fc0e10

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -11
app.py CHANGED
@@ -10,7 +10,7 @@ HF_TOKEN = os.environ.get("HF_TOKEN", None)
10
  MODEL = "LLaMAX/LLaMAX3-8B-Alpaca"
11
  RELATIVE_MODEL="LLaMAX/LLaMAX3-8B"
12
 
13
- TITLE = "<h1><center>LLaMAX3-8B-Translation</center></h1>"
14
 
15
  quantization_config = BitsAndBytesConfig(load_in_8bit=True)
16
 
@@ -34,11 +34,11 @@ def lang_detector(text):
34
  except Exception as e:
35
  return f"ERROR:{str(e)}"
36
 
37
- def Prompt_template(query, src_language, trg_language):
38
- instruction = f'Translate the following sentences from {src_language} to {trg_language}.'
 
39
  prompt = (
40
- 'Below is an instruction that describes a task, paired with an input that provides further context. '
41
- 'Write a response that appropriately completes the request.\n'
42
  f'### Instruction:\n{instruction}\n'
43
  f'### Input:\n{query}\n### Response:'
44
  )
@@ -52,7 +52,9 @@ def chunk_text():
52
  def translate(
53
  source_text: str,
54
  source_lang: str,
55
- target_lang: str,
 
 
56
  max_length: int,
57
  temperature: float,
58
  top_p: float,
@@ -60,7 +62,7 @@ def translate(
60
 
61
  print(f'Text is - {source_text}')
62
 
63
- prompt = Prompt_template(source_text, source_lang, target_lang)
64
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
65
 
66
  generate_kwargs = dict(
@@ -89,9 +91,11 @@ CSS = """
89
  visibility: hidden;
90
  }
91
  """
92
- DESCRIPTION = """
93
- LLaMAX is a language model with powerful multilingual capabilities without loss instruction-following capabilities.
 
94
  """
 
95
  LANG_LIST = ['Akrikaans', 'Amharic', 'Arabic', 'Armenian', 'Assamese', 'Asturian', 'Azerbaijani', \
96
  'Belarusian', 'Bengali', 'Bosnian', 'Bulgarian', 'Burmese', \
97
  'Catalan', 'Cebuano', 'Simplified Chinese', 'Traditional Chinese', 'Croatian', 'Czech', \
@@ -155,7 +159,20 @@ with gr.Blocks(theme="soft", css=CSS) as demo:
155
  value=1.2,
156
  label="Repetition penalty",
157
  )
158
- gr.Markdown(DESCRIPTION)
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  with gr.Column(scale=4):
160
  source_text = gr.Textbox(
161
  label="Source Text",
@@ -173,7 +190,7 @@ with gr.Blocks(theme="soft", css=CSS) as demo:
173
  clear = gr.ClearButton([source_text, output_text])
174
 
175
  source_text.change(lang_detector, source_text, source_lang)
176
- submit.click(fn=translate, inputs=[source_text, source_lang, target_lang, max_length, temperature, top_p, rp], outputs=[output_text])
177
 
178
 
179
  if __name__ == "__main__":
 
10
  MODEL = "LLaMAX/LLaMAX3-8B-Alpaca"
11
  RELATIVE_MODEL="LLaMAX/LLaMAX3-8B"
12
 
13
+ TITLE = "<h1><center>LLaMAX3-Translator</center></h1>"
14
 
15
  quantization_config = BitsAndBytesConfig(load_in_8bit=True)
16
 
 
34
  except Exception as e:
35
  return f"ERROR:{str(e)}"
36
 
37
+ def Prompt_template(inst, prompt, query, src_language, trg_language):
38
+ inst = inst.format(src_language=src_language, trg_language=trg_language)
39
+ instruction = f"`{inst}`"
40
  prompt = (
41
+ f'{prompt}'
 
42
  f'### Instruction:\n{instruction}\n'
43
  f'### Input:\n{query}\n### Response:'
44
  )
 
52
  def translate(
53
  source_text: str,
54
  source_lang: str,
55
+ target_lang: str,
56
+ inst: str,
57
+ prompt: str,
58
  max_length: int,
59
  temperature: float,
60
  top_p: float,
 
62
 
63
  print(f'Text is - {source_text}')
64
 
65
+ prompt = Prompt_template(inst, prompt, source_text, source_lang, target_lang)
66
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
67
 
68
  generate_kwargs = dict(
 
91
  visibility: hidden;
92
  }
93
  """
94
+
95
+ LICENSE = """
96
+ MODEL: <a href="https://huggingface.co/LLaMAX/LLaMAX3-8B-Alpaca">LLaMAX3-8B-Alpaca</a>
97
  """
98
+
99
  LANG_LIST = ['Akrikaans', 'Amharic', 'Arabic', 'Armenian', 'Assamese', 'Asturian', 'Azerbaijani', \
100
  'Belarusian', 'Bengali', 'Bosnian', 'Bulgarian', 'Burmese', \
101
  'Catalan', 'Cebuano', 'Simplified Chinese', 'Traditional Chinese', 'Croatian', 'Czech', \
 
159
  value=1.2,
160
  label="Repetition penalty",
161
  )
162
+ with gr.Accordion("Advanced Options", open=False):
163
+ gr.Markdown(LICENSE)
164
+ inst = gr.Textbox(
165
+ label="Instruction",
166
+ value="Translate the following sentences from {src_language} to {trg_language}."
167
+ )
168
+ prompt = gr.Textbox(
169
+ label="Prompt",
170
+ value="""
171
+ 'Below is an instruction that describes a task, paired with an input that provides further context. '
172
+ 'Write a response that appropriately completes the request.\n'
173
+ """
174
+ )
175
+
176
  with gr.Column(scale=4):
177
  source_text = gr.Textbox(
178
  label="Source Text",
 
190
  clear = gr.ClearButton([source_text, output_text])
191
 
192
  source_text.change(lang_detector, source_text, source_lang)
193
+ submit.click(fn=translate, inputs=[source_text, source_lang, target_lang, inst, prompt, max_length, temperature, top_p, rp], outputs=[output_text])
194
 
195
 
196
  if __name__ == "__main__":