orrinin commited on
Commit
de693c7
·
verified ·
1 Parent(s): 840fc0f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -8
app.py CHANGED
@@ -1,16 +1,17 @@
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
- from mistralai.client import MistralClient
4
  from mistralai.models.chat_completion import ChatMessage
5
  import numpy as np
6
  import PyPDF2
7
  import faiss
8
  import os
9
  import httpx
 
10
 
11
  mistral_api_key = os.environ.get("API_KEY")
12
 
13
- cli = MistralClient(api_key = mistral_api_key)
14
 
15
  def get_text_embedding(input: str):
16
  embeddings_batch_response = cli.embeddings(
@@ -61,12 +62,26 @@ def ask_mistral(message: str, history: list):
61
  messages.append(ChatMessage(role = "user", content = message["text"]))
62
 
63
  full_response = ""
64
- for chunk in cli.chat_stream(model = "open-mistral-7b", messages = messages, max_tokens = 1024):
 
 
 
 
 
 
 
65
  full_response += chunk.choices[0].delta.content
66
  yield full_response
67
 
68
- app = gr.ChatInterface(
69
- fn = ask_mistral,
70
- title = "Ask Mistral and talk to your PDFs",
71
- multimodal = True)
72
- app.launch()
 
 
 
 
 
 
 
 
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
+ from mistralai.async_client import MistralAsyncClient
4
  from mistralai.models.chat_completion import ChatMessage
5
  import numpy as np
6
  import PyPDF2
7
  import faiss
8
  import os
9
  import httpx
10
+ import asyncio
11
 
12
  mistral_api_key = os.environ.get("API_KEY")
13
 
14
+ cli = MistralAsyncClient(api_key = mistral_api_key)
15
 
16
  def get_text_embedding(input: str):
17
  embeddings_batch_response = cli.embeddings(
 
62
  messages.append(ChatMessage(role = "user", content = message["text"]))
63
 
64
  full_response = ""
65
+
66
+ async_response = cli.chat_stream(
67
+ model = "open-mistral-7b",
68
+ messages = messages,
69
+ max_tokens = 1024
70
+ )
71
+
72
+ async for chunk in async_response:
73
  full_response += chunk.choices[0].delta.content
74
  yield full_response
75
 
76
+ chatbot = gr.Chatbot()
77
+
78
+ with gr.Blocks(theme="soft") as demo:
79
+ gr.ChatInterface(
80
+ fn = ask_mistral,
81
+ title = "Ask Mistral and talk to your PDFs",
82
+ multimodal = True,
83
+ chatbot=chatbot,
84
+ )
85
+
86
+ if __name__ == "__main__":
87
+ demo.queue(api_open=False).launch(show_api=False, share=False)