Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
import subprocess
|
2 |
|
3 |
-
sub_p_res = subprocess.run(['pip', 'install', 'langchain', 'sentence-transformers', 'transformers', 'faiss-gpu', 'PyPDF2', '
|
4 |
-
print("pip install downloded ", sub_p_res)
|
5 |
|
6 |
|
7 |
-
command = 'CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python'
|
8 |
|
9 |
-
sub_p_res = subprocess.run(command, shell=True, check=True)
|
10 |
|
11 |
-
print("llama-cpp-python GPU downloaded ",sub_p_res)
|
12 |
|
13 |
|
14 |
from langchain.document_loaders.text import TextLoader
|
|
|
1 |
import subprocess
|
2 |
|
3 |
+
#sub_p_res = subprocess.run(['pip', 'install', 'langchain', 'sentence-transformers', 'transformers', 'faiss-gpu', 'PyPDF2', 'torch','llama-cpp-python'], stdout=subprocess.PIPE).stdout.decode('utf-8') #<cc-cm>
|
4 |
+
#print("pip install downloded ", sub_p_res)
|
5 |
|
6 |
|
7 |
+
#command = 'CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python'
|
8 |
|
9 |
+
#sub_p_res = subprocess.run(command, shell=True, check=True)
|
10 |
|
11 |
+
#print("llama-cpp-python GPU downloaded ",sub_p_res)
|
12 |
|
13 |
|
14 |
from langchain.document_loaders.text import TextLoader
|