Spaces:
Running
Running
Asankhaya Sharma
commited on
Commit
·
5d1688a
1
Parent(s):
cae23e1
removed
Browse files- brain.py +0 -40
- components_keys.py +0 -4
- explorer.py +0 -14
- files.py +0 -191
brain.py
DELETED
|
@@ -1,40 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
import streamlit as st
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
def brain(supabase):
|
| 6 |
-
## List all documents
|
| 7 |
-
response = supabase.table("documents").select("name:metadata->>file_name, size:metadata->>file_size", count="exact").filter('metadata->>user', 'eq', st.session_state["username"]).execute()
|
| 8 |
-
|
| 9 |
-
documents = response.data # Access the data from the response
|
| 10 |
-
|
| 11 |
-
# Convert each dictionary to a tuple of items, then to a set to remove duplicates, and then back to a dictionary
|
| 12 |
-
unique_data = [dict(t) for t in set(tuple(d.items()) for d in documents)]
|
| 13 |
-
|
| 14 |
-
# Sort the list of documents by size in decreasing order
|
| 15 |
-
unique_data.sort(key=lambda x: int(x['size']), reverse=True)
|
| 16 |
-
|
| 17 |
-
# Display some metrics at the top of the page
|
| 18 |
-
col1, col2 = st.columns(2)
|
| 19 |
-
col1.metric(label="Total Documents", value=len(unique_data))
|
| 20 |
-
col2.metric(label="Total Size (bytes)", value=sum(int(doc['size']) for doc in unique_data))
|
| 21 |
-
|
| 22 |
-
for document in unique_data:
|
| 23 |
-
# Create a unique key for each button by using the document name
|
| 24 |
-
button_key = f"delete_{document['name']}"
|
| 25 |
-
|
| 26 |
-
# Display the document name, size and the delete button on the same line
|
| 27 |
-
col1, col2, col3 = st.columns([3, 1, 1])
|
| 28 |
-
col1.markdown(f"**{document['name']}** ({document['size']} bytes)")
|
| 29 |
-
|
| 30 |
-
if col2.button('❌', key=button_key):
|
| 31 |
-
delete_document(supabase, document['name'])
|
| 32 |
-
|
| 33 |
-
def delete_document(supabase, document_name):
|
| 34 |
-
# Delete the document from the database
|
| 35 |
-
response = supabase.table("documents").delete().match({"metadata->>file_name": document_name}).execute()
|
| 36 |
-
# Check if the deletion was successful
|
| 37 |
-
if len(response.data) > 0:
|
| 38 |
-
st.write(f"✂️ {document_name} was deleted.")
|
| 39 |
-
else:
|
| 40 |
-
st.write(f"❌ {document_name} was not deleted.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
components_keys.py
DELETED
|
@@ -1,4 +0,0 @@
|
|
| 1 |
-
"""Store streamlit component keys"""
|
| 2 |
-
|
| 3 |
-
class ComponentsKeys:
|
| 4 |
-
FILE_UPLOADER = "file_uploader"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
explorer.py
DELETED
|
@@ -1,14 +0,0 @@
|
|
| 1 |
-
import streamlit as st
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
def view_document(supabase):
|
| 5 |
-
# Get the document from the database
|
| 6 |
-
response = supabase.table("documents").select("content").filter('metadata->>user', 'eq', st.session_state["username"]).execute()
|
| 7 |
-
# st.write("**This feature is in active development**")
|
| 8 |
-
# Display a list of elements from the documents
|
| 9 |
-
# If the user clicks on an element, display the content of the document
|
| 10 |
-
i = 0
|
| 11 |
-
for document in response.data:
|
| 12 |
-
i += 1
|
| 13 |
-
if st.button(document['content'][:50].replace("\n", " "), key = str(i)):
|
| 14 |
-
st.write(document['content'])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
files.py
DELETED
|
@@ -1,191 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
from typing import (
|
| 3 |
-
Any,
|
| 4 |
-
Union,
|
| 5 |
-
)
|
| 6 |
-
import zipfile
|
| 7 |
-
import streamlit as st
|
| 8 |
-
from streamlit.runtime.uploaded_file_manager import (
|
| 9 |
-
UploadedFile,
|
| 10 |
-
UploadedFileRec,
|
| 11 |
-
UploadedFileManager,
|
| 12 |
-
)
|
| 13 |
-
from streamlit.runtime.scriptrunner import get_script_run_ctx
|
| 14 |
-
from supabase.client import Client
|
| 15 |
-
from langchain.vectorstores.supabase import SupabaseVectorStore
|
| 16 |
-
from components_keys import ComponentsKeys
|
| 17 |
-
from loaders.audio import process_audio
|
| 18 |
-
from loaders.txt import process_txt
|
| 19 |
-
from loaders.csv import process_csv
|
| 20 |
-
from loaders.markdown import process_markdown
|
| 21 |
-
from loaders.pdf import process_pdf
|
| 22 |
-
from loaders.html import (
|
| 23 |
-
create_html_file,
|
| 24 |
-
delete_tempfile,
|
| 25 |
-
get_html,
|
| 26 |
-
process_html,
|
| 27 |
-
)
|
| 28 |
-
from loaders.powerpoint import process_powerpoint
|
| 29 |
-
from loaders.docx import process_docx
|
| 30 |
-
from utils import compute_sha1_from_content
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
ctx = get_script_run_ctx()
|
| 34 |
-
manager = UploadedFileManager()
|
| 35 |
-
file_processors = {
|
| 36 |
-
".txt": process_txt,
|
| 37 |
-
".csv": process_csv,
|
| 38 |
-
".md": process_markdown,
|
| 39 |
-
".markdown": process_markdown,
|
| 40 |
-
".m4a": process_audio,
|
| 41 |
-
".mp3": process_audio,
|
| 42 |
-
".webm": process_audio,
|
| 43 |
-
".mp4": process_audio,
|
| 44 |
-
".mpga": process_audio,
|
| 45 |
-
".wav": process_audio,
|
| 46 |
-
".mpeg": process_audio,
|
| 47 |
-
".pdf": process_pdf,
|
| 48 |
-
".html": process_html,
|
| 49 |
-
".pptx": process_powerpoint,
|
| 50 |
-
".docx": process_docx
|
| 51 |
-
}
|
| 52 |
-
|
| 53 |
-
def file_uploader(supabase, vector_store):
|
| 54 |
-
# Omit zip file support if the `st.secrets.self_hosted` != "true" because
|
| 55 |
-
# a zip file can consist of multiple files so the limit on 1 file uploaded
|
| 56 |
-
# at a time in the demo can be circumvented.
|
| 57 |
-
accepted_file_extensions = list(file_processors.keys())
|
| 58 |
-
accept_multiple_files = st.secrets.self_hosted == "true"
|
| 59 |
-
if accept_multiple_files:
|
| 60 |
-
accepted_file_extensions += [".zip"]
|
| 61 |
-
|
| 62 |
-
files = st.file_uploader(
|
| 63 |
-
"**Upload a file**",
|
| 64 |
-
accept_multiple_files=accept_multiple_files,
|
| 65 |
-
type=accepted_file_extensions,
|
| 66 |
-
key=ComponentsKeys.FILE_UPLOADER,
|
| 67 |
-
)
|
| 68 |
-
if st.secrets.self_hosted == "false":
|
| 69 |
-
st.markdown("**In demo mode, the max file size is 1MB**")
|
| 70 |
-
if st.button("Add to Database"):
|
| 71 |
-
# Single file upload
|
| 72 |
-
if isinstance(files, UploadedFile):
|
| 73 |
-
filter_file(files, supabase, vector_store)
|
| 74 |
-
# Multiple files upload
|
| 75 |
-
elif isinstance(files, list):
|
| 76 |
-
for file in files:
|
| 77 |
-
filter_file(file, supabase, vector_store)
|
| 78 |
-
|
| 79 |
-
def file_already_exists(supabase, file):
|
| 80 |
-
file_sha1 = compute_sha1_from_content(file.getvalue())
|
| 81 |
-
response = supabase.table("documents").select("id").eq("metadata->>file_sha1", file_sha1).execute()
|
| 82 |
-
return len(response.data) > 0
|
| 83 |
-
|
| 84 |
-
def file_to_uploaded_file(file: Any) -> Union[None, UploadedFile]:
|
| 85 |
-
"""Convert a file to a streamlit `UploadedFile` object.
|
| 86 |
-
|
| 87 |
-
This allows us to unzip files and treat them the same way
|
| 88 |
-
streamlit treats files uploaded through the file uploader.
|
| 89 |
-
|
| 90 |
-
Parameters
|
| 91 |
-
---------
|
| 92 |
-
file : Any
|
| 93 |
-
The file. Can be any file supported by this app.
|
| 94 |
-
|
| 95 |
-
Returns
|
| 96 |
-
-------
|
| 97 |
-
Union[None, UploadedFile]
|
| 98 |
-
The file converted to a streamlit `UploadedFile` object.
|
| 99 |
-
Returns `None` if the script context cannot be grabbed.
|
| 100 |
-
"""
|
| 101 |
-
|
| 102 |
-
if ctx is None:
|
| 103 |
-
print("script context not found, skipping uploading file:", file.name)
|
| 104 |
-
return
|
| 105 |
-
|
| 106 |
-
file_extension = os.path.splitext(file.name)[-1]
|
| 107 |
-
file_name = file.name
|
| 108 |
-
file_data = file.read()
|
| 109 |
-
# The file manager will automatically assign an ID so pass `None`
|
| 110 |
-
# Reference: https://github.com/streamlit/streamlit/blob/9a6ce804b7977bdc1f18906d1672c45f9a9b3398/lib/streamlit/runtime/uploaded_file_manager.py#LL98C6-L98C6
|
| 111 |
-
uploaded_file_rec = UploadedFileRec(None, file_name, file_extension, file_data)
|
| 112 |
-
uploaded_file_rec = manager.add_file(
|
| 113 |
-
ctx.session_id,
|
| 114 |
-
ComponentsKeys.FILE_UPLOADER,
|
| 115 |
-
uploaded_file_rec,
|
| 116 |
-
)
|
| 117 |
-
return UploadedFile(uploaded_file_rec)
|
| 118 |
-
|
| 119 |
-
def filter_zip_file(
|
| 120 |
-
file: UploadedFile,
|
| 121 |
-
supabase: Client,
|
| 122 |
-
vector_store: SupabaseVectorStore,
|
| 123 |
-
) -> None:
|
| 124 |
-
"""Unzip the zip file then filter each unzipped file.
|
| 125 |
-
|
| 126 |
-
Parameters
|
| 127 |
-
----------
|
| 128 |
-
file : UploadedFile
|
| 129 |
-
The uploaded file from the file uploader.
|
| 130 |
-
supabase : Client
|
| 131 |
-
The supabase client.
|
| 132 |
-
vector_store : SupabaseVectorStore
|
| 133 |
-
The vector store in the database.
|
| 134 |
-
"""
|
| 135 |
-
|
| 136 |
-
with zipfile.ZipFile(file, "r") as z:
|
| 137 |
-
unzipped_files = z.namelist()
|
| 138 |
-
for unzipped_file in unzipped_files:
|
| 139 |
-
with z.open(unzipped_file, "r") as f:
|
| 140 |
-
filter_file(f, supabase, vector_store)
|
| 141 |
-
|
| 142 |
-
def filter_file(file, supabase, vector_store):
|
| 143 |
-
# Streamlit file uploads are of type `UploadedFile` which has the
|
| 144 |
-
# necessary methods and attributes for this app to work.
|
| 145 |
-
if not isinstance(file, UploadedFile):
|
| 146 |
-
file = file_to_uploaded_file(file)
|
| 147 |
-
|
| 148 |
-
file_extension = os.path.splitext(file.name)[-1]
|
| 149 |
-
if file_extension == ".zip":
|
| 150 |
-
filter_zip_file(file, supabase, vector_store)
|
| 151 |
-
return True
|
| 152 |
-
|
| 153 |
-
if file_already_exists(supabase, file):
|
| 154 |
-
st.write(f"😎 {file.name} is already in the database.")
|
| 155 |
-
return False
|
| 156 |
-
|
| 157 |
-
if file.size < 1:
|
| 158 |
-
st.write(f"💨 {file.name} is empty.")
|
| 159 |
-
return False
|
| 160 |
-
|
| 161 |
-
if file_extension in file_processors:
|
| 162 |
-
if st.secrets.self_hosted == "false":
|
| 163 |
-
file_processors[file_extension](vector_store, file, stats_db=supabase)
|
| 164 |
-
else:
|
| 165 |
-
file_processors[file_extension](vector_store, file, stats_db=None)
|
| 166 |
-
st.write(f"✅ {file.name} ")
|
| 167 |
-
return True
|
| 168 |
-
|
| 169 |
-
st.write(f"❌ {file.name} is not a valid file type.")
|
| 170 |
-
return False
|
| 171 |
-
|
| 172 |
-
def url_uploader(supabase, vector_store):
|
| 173 |
-
url = st.text_area("**Add an url**",placeholder="https://meraGPT.com")
|
| 174 |
-
button = st.button("Add the URL to the database")
|
| 175 |
-
|
| 176 |
-
if button:
|
| 177 |
-
if not st.session_state["overused"]:
|
| 178 |
-
html = get_html(url)
|
| 179 |
-
if html:
|
| 180 |
-
st.write(f"Getting content ... {url} ")
|
| 181 |
-
try:
|
| 182 |
-
file, temp_file_path = create_html_file(url, html)
|
| 183 |
-
except UnicodeEncodeError as e:
|
| 184 |
-
st.write(f"❌ Error encoding character: {e}")
|
| 185 |
-
file, temp_file_path = create_html_file(url, html)
|
| 186 |
-
ret = filter_file(file, supabase, vector_store)
|
| 187 |
-
delete_tempfile(temp_file_path, url, ret)
|
| 188 |
-
else:
|
| 189 |
-
st.write(f"❌ Failed to access to {url} .")
|
| 190 |
-
else:
|
| 191 |
-
st.write("You have reached your daily limit. Please come back later or self host the solution.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|