orrinin commited on
Commit
b2c2e74
·
verified ·
1 Parent(s): 1f54d7e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -5
app.py CHANGED
@@ -1,11 +1,16 @@
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
- from mistralai.client import MistralClient
4
- from mistralai.models.chat_completion import ChatMessage
5
  import numpy as np
6
  import PyPDF2
7
  import faiss
8
  import os
 
 
 
 
 
 
9
 
10
 
11
  mistral_api_key = os.environ.get("API_KEY")
@@ -36,17 +41,40 @@ def rag_pdf(pdfs: list, question: str) -> str:
36
  text_retrieved = "\n\n".join(retrieved_chunk)
37
  return text_retrieved
38
 
 
 
 
 
 
 
 
 
 
 
39
  def ask_mistral(message: str, history: list):
40
  messages = []
41
- pdfs = message["files"]
42
  for couple in history:
43
  if type(couple[0]) is tuple:
44
- pdfs += couple[0]
45
  else:
46
  messages.append(ChatMessage(role= "user", content = couple[0]))
47
  messages.append(ChatMessage(role= "assistant", content = couple[1]))
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
- if pdfs:
50
  pdfs_extracted = []
51
  for pdf in pdfs:
52
  reader = PyPDF2.PdfReader(pdf)
 
1
  #using codes from mistralai official cookbook
2
  import gradio as gr
3
+ from llama_index.llms import MistralAI
 
4
  import numpy as np
5
  import PyPDF2
6
  import faiss
7
  import os
8
+ from llama_index.core import SimpleDirectoryReader
9
+ from llama_index.embeddings import MistralAIEmbedding
10
+ from llama_index import ServiceContext
11
+ from llama_index.core import VectorStoreIndex, StorageContext
12
+ from llama_index.vector_stores.milvus import MilvusVectorStore
13
+ import textwrap
14
 
15
 
16
  mistral_api_key = os.environ.get("API_KEY")
 
41
  text_retrieved = "\n\n".join(retrieved_chunk)
42
  return text_retrieved
43
 
44
+ def load_doc(path_list):
45
+ documents = SimpleDirectoryReader(input_files=path).load_data()
46
+ print("Document ID:", documents[0].doc_id)
47
+ vector_store = MilvusVectorStore(uri="./milvus_demo.db", dim=1536, overwrite=True)
48
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
49
+ index = VectorStoreIndex.from_documents(documents, storage_context=storage_context)
50
+ return index
51
+
52
+
53
+
54
  def ask_mistral(message: str, history: list):
55
  messages = []
56
+ docs = message["files"]
57
  for couple in history:
58
  if type(couple[0]) is tuple:
59
+ docs += couple[0][0]
60
  else:
61
  messages.append(ChatMessage(role= "user", content = couple[0]))
62
  messages.append(ChatMessage(role= "assistant", content = couple[1]))
63
+ if docs:
64
+ print(docs)
65
+ index = load_doc(docs)
66
+ query_engine = index.as_query_engine()
67
+ response = query_engine.query(message["text"])
68
+
69
+ full_response = ""
70
+ for text in response.response_gen:
71
+ full_response += chunk.choices[0].delta.content
72
+ yield full_response
73
+
74
+
75
+
76
 
77
+
78
  pdfs_extracted = []
79
  for pdf in pdfs:
80
  reader = PyPDF2.PdfReader(pdf)