Dorjzodovsuren commited on
Commit
e450c90
·
verified ·
1 Parent(s): a8011bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -43
app.py CHANGED
@@ -1,59 +1,52 @@
1
- import os
2
- import re
3
- import gradio as gr
4
  import edge_tts
5
- import asyncio
6
- import time
7
  import tempfile
 
8
  from huggingface_hub import InferenceClient
9
- from deep_translator import GoogleTranslator
10
-
11
- def get_translation(context_custom, language = 'en'):
12
- translated = GoogleTranslator(source = 'auto', target = language).translate(text = context_custom)
13
- return translated
14
-
15
- client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1",token=os.getenv("TOKEN"))
16
 
17
- system_instructions1 = "<s>[SYSTEM] You are AI assistant named DorjGPT, Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if super interlligent AI assistant. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
 
 
 
18
 
19
  global history
20
  history = []
21
- def format_prompt(message, history):
22
- prompt = system_instructions1
23
- for user_prompt, bot_response in history:
24
- prompt += f"[INST] {user_prompt} [/INST]"
25
- prompt += f" {bot_response}</s> "
26
- prompt += f"[INST] {message} [/INST]"
27
- return prompt
 
 
28
 
29
- async def generate1(prompt, history=[], b=None):
30
- generate_kwargs = dict(
31
- temperature=0.6,
32
- max_new_tokens=256,
33
- top_p=0.95,
34
- repetition_penalty=1,
35
- do_sample=True,
36
- seed=42,
37
- )
38
- #formatted_prompt = system_instructions1 + prompt + "[JARVIS]"
39
- prompt_en = get_translation(prompt)
40
- formatted_prompt = format_prompt(f"{system_instructions1}, {prompt_en}", history) + "[DORJGPT]"
41
- stream = client.text_generation(
42
- formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
43
 
44
- output = ""
45
- for response in stream:
46
- output += response.token.text
47
- output = output.replace("</s>","")
48
- history.append([prompt, output])
49
- output_mn = get_translation(output, language="mn")
50
-
51
- communicate = edge_tts.Communicate(output_mn, voice="mn-MN-BataaNeural")
 
 
 
 
 
 
 
52
  with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
53
  tmp_path = tmp_file.name
54
  await communicate.save(tmp_path)
55
  yield tmp_path
56
 
 
57
  with gr.Blocks(theme="gradio/monochrome", title="Dorj Assistant") as demo:
58
  gr.HTML("""
59
  <h1 style="text-align: center; style="font-size: 3m;">
@@ -72,7 +65,7 @@ with gr.Blocks(theme="gradio/monochrome", title="Dorj Assistant") as demo:
72
  with gr.Tab():
73
  with gr.Row():
74
  translate_btn = gr.Button("Submit")
75
- translate_btn.click(fn=generate1, inputs=user_input,
76
  outputs=output_audio, api_name="translate")
77
 
78
  if __name__ == "__main__":
 
 
 
 
1
  import edge_tts
 
 
2
  import tempfile
3
+ import gradio as gr
4
  from huggingface_hub import InferenceClient
 
 
 
 
 
 
 
5
 
6
+ """
7
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
8
+ """
9
+ client = InferenceClient("google/gemma-3-27b-it")
10
 
11
  global history
12
  history = []
13
+ async def respond(
14
+ message,
15
+ history=[],
16
+ system_message="You are a Gemma, created by Google. You is a helpful assistant and always reply back in Mongolian, and only return Mongolian text.",
17
+ max_tokens=512,
18
+ temperature=0.001,
19
+ top_p=0.95,
20
+ ):
21
+ messages = [{"role": "system", "content": system_message}]
22
 
23
+ for val in history:
24
+ if val[0]:
25
+ messages.append({"role": "user", "content": val[0]})
26
+ if val[1]:
27
+ messages.append({"role": "assistant", "content": val[1]})
 
 
 
 
 
 
 
 
 
28
 
29
+ messages.append({"role": "user", "content": message})
30
+
31
+ response = ""
32
+
33
+ for message in client.chat_completion(
34
+ messages,
35
+ max_tokens=max_tokens,
36
+ stream=True,
37
+ temperature=temperature,
38
+ top_p=top_p,
39
+ ):
40
+ token = message.choices[0].delta.content
41
+ response += token
42
+
43
+ communicate = edge_tts.Communicate(response, voice="mn-MN-BataaNeural")
44
  with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
45
  tmp_path = tmp_file.name
46
  await communicate.save(tmp_path)
47
  yield tmp_path
48
 
49
+
50
  with gr.Blocks(theme="gradio/monochrome", title="Dorj Assistant") as demo:
51
  gr.HTML("""
52
  <h1 style="text-align: center; style="font-size: 3m;">
 
65
  with gr.Tab():
66
  with gr.Row():
67
  translate_btn = gr.Button("Submit")
68
+ translate_btn.click(fn=respond, inputs=user_input,
69
  outputs=output_audio, api_name="translate")
70
 
71
  if __name__ == "__main__":