orrinin commited on
Commit
dd3fe36
·
verified ·
1 Parent(s): 6201abb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -17
app.py CHANGED
@@ -1,26 +1,25 @@
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
- from mistralai.async_client import MistralAsyncClient
4
  from mistralai.models.chat_completion import ChatMessage
5
  import numpy as np
6
  import PyPDF2
7
  import faiss
8
  import os
9
- import httpx
10
- import asyncio
11
 
12
  mistral_api_key = os.environ.get("API_KEY")
13
 
14
- cli = MistralAsyncClient(api_key = mistral_api_key)
15
 
16
- async def get_text_embedding(input: str):
17
  embeddings_batch_response = cli.embeddings(
18
  model = "mistral-embed",
19
  input = input
20
  )
21
  return embeddings_batch_response.data[0].embedding
22
 
23
- async def rag_pdf(pdfs: list, question: str) -> str:
24
  chunk_size = 4096
25
  chunks = []
26
  for pdf in pdfs:
@@ -34,11 +33,10 @@ async def rag_pdf(pdfs: list, question: str) -> str:
34
  question_embeddings = np.array([get_text_embedding(question)])
35
  D, I = index.search(question_embeddings, k = 4)
36
  retrieved_chunk = [chunks[i] for i in I.tolist()[0]]
37
- print(retrieved_chunk)
38
  text_retrieved = "\n\n".join(retrieved_chunk)
39
  return text_retrieved
40
 
41
- async def ask_mistral(message: str, history: list):
42
  messages = []
43
  pdfs = message["files"]
44
  for couple in history:
@@ -58,23 +56,17 @@ async def ask_mistral(message: str, history: list):
58
  pdfs_extracted.append(txt)
59
 
60
  retrieved_text = rag_pdf(pdfs_extracted, message["text"])
61
- print(retrieved_text)
62
  messages.append(ChatMessage(role = "user", content = retrieved_text + "\n\n" + message["text"]))
63
  else:
64
  messages.append(ChatMessage(role = "user", content = message["text"]))
65
 
66
  full_response = ""
67
-
68
- async_response = cli.chat_stream(
69
- model = "open-mistral-7b",
70
- messages = messages,
71
- max_tokens = 1024
72
- )
73
-
74
- async for chunk in async_response:
75
  full_response += chunk.choices[0].delta.content
76
  yield full_response
77
 
 
 
78
  chatbot = gr.Chatbot()
79
 
80
  with gr.Blocks(theme="soft") as demo:
 
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
+ from mistralai.client import MistralClient
4
  from mistralai.models.chat_completion import ChatMessage
5
  import numpy as np
6
  import PyPDF2
7
  import faiss
8
  import os
9
+
 
10
 
11
  mistral_api_key = os.environ.get("API_KEY")
12
 
13
+ cli = MistralClient(api_key = mistral_api_key)
14
 
15
+ def get_text_embedding(input: str):
16
  embeddings_batch_response = cli.embeddings(
17
  model = "mistral-embed",
18
  input = input
19
  )
20
  return embeddings_batch_response.data[0].embedding
21
 
22
+ def rag_pdf(pdfs: list, question: str) -> str:
23
  chunk_size = 4096
24
  chunks = []
25
  for pdf in pdfs:
 
33
  question_embeddings = np.array([get_text_embedding(question)])
34
  D, I = index.search(question_embeddings, k = 4)
35
  retrieved_chunk = [chunks[i] for i in I.tolist()[0]]
 
36
  text_retrieved = "\n\n".join(retrieved_chunk)
37
  return text_retrieved
38
 
39
+ def ask_mistral(message: str, history: list):
40
  messages = []
41
  pdfs = message["files"]
42
  for couple in history:
 
56
  pdfs_extracted.append(txt)
57
 
58
  retrieved_text = rag_pdf(pdfs_extracted, message["text"])
 
59
  messages.append(ChatMessage(role = "user", content = retrieved_text + "\n\n" + message["text"]))
60
  else:
61
  messages.append(ChatMessage(role = "user", content = message["text"]))
62
 
63
  full_response = ""
64
+ for chunk in cli.chat_stream(model = "open-mistral-7b", messages = messages, max_tokens = 1024):
 
 
 
 
 
 
 
65
  full_response += chunk.choices[0].delta.content
66
  yield full_response
67
 
68
+
69
+
70
  chatbot = gr.Chatbot()
71
 
72
  with gr.Blocks(theme="soft") as demo: