baptiste.bernard commited on
Commit
a602253
·
1 Parent(s): 0aecada

new theme and reading the file

Browse files
Files changed (1) hide show
  1. app.py +30 -21
app.py CHANGED
@@ -4,19 +4,18 @@ from dotenv import load_dotenv
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
6
  import chardet
 
7
 
8
  load_dotenv()
9
 
10
- # Récupérez le token à partir de la variable d'environnement
11
  hftoken = os.environ.get("HF_TOKEN")
12
 
13
  from huggingface_hub import login
 
14
 
15
- login(token = hftoken)
16
 
17
- # Utilisez le token pour initialiser le client
18
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hftoken)
19
 
 
20
  file_content = None
21
 
22
  def respond(message, history, system_message, max_tokens, temperature, top_p, file=None):
@@ -35,13 +34,16 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, fi
35
 
36
  if file:
37
  try:
38
- file_content = file.decode("utf-8")
39
- except UnicodeDecodeError:
40
- result = chardet.detect(file)
41
- encoding = result['encoding']
42
- file_content = file.decode(encoding, errors='ignore')
 
 
 
43
 
44
- if "contenu du fichier" in message.lower() and file_content:
45
  response += f"Contenu du fichier :\n{file_content}"
46
  yield response
47
  return
@@ -57,18 +59,25 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, fi
57
  response += token
58
  yield response
59
 
60
- with gr.Blocks() as demo:
 
61
  gr.Image(value="logo-gaia.png", label="Logo")
62
- gr.ChatInterface(
63
- respond,
64
- additional_inputs=[
65
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
66
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
67
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
68
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
69
- gr.File(label="Télécharger un fichier", type="binary"),
70
- ],
71
- )
 
 
 
 
 
 
72
 
73
  if __name__ == "__main__":
74
  demo.launch()
 
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
6
  import chardet
7
+ import re
8
 
9
  load_dotenv()
10
 
 
11
  hftoken = os.environ.get("HF_TOKEN")
12
 
13
  from huggingface_hub import login
14
+ login(token=hftoken)
15
 
 
16
 
 
 
17
 
18
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hftoken)
19
  file_content = None
20
 
21
  def respond(message, history, system_message, max_tokens, temperature, top_p, file=None):
 
34
 
35
  if file:
36
  try:
37
+ if isinstance(file, bytes):
38
+ result = chardet.detect(file)
39
+ encoding = result['encoding']
40
+ file_content = file.decode(encoding, errors='ignore')
41
+ else:
42
+ file_content = file
43
+ except Exception as e:
44
+ file_content = f"Erreur de décodage du fichier : {e}"
45
 
46
+ if re.search(r"contenu du fichier|afficher le fichier|lire le fichier|voir le fichier| donnée du fichier", message.lower()) and file_content:
47
  response += f"Contenu du fichier :\n{file_content}"
48
  yield response
49
  return
 
59
  response += token
60
  yield response
61
 
62
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
63
+ gr.Markdown("# Chatbot Interface")
64
  gr.Image(value="logo-gaia.png", label="Logo")
65
+
66
+ with gr.Row():
67
+ with gr.Column():
68
+ gr.Markdown("## Paramètres")
69
+ system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message")
70
+ max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
71
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
72
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
73
+ file_upload = gr.File(label="Télécharger un fichier", type="binary")
74
+
75
+ with gr.Column():
76
+ gr.Markdown("## Chat")
77
+ chatbot = gr.ChatInterface(
78
+ respond,
79
+ additional_inputs=[system_message, max_tokens, temperature, top_p, file_upload],
80
+ )
81
 
82
  if __name__ == "__main__":
83
  demo.launch()