Fecalisboa commited on
Commit
7341860
·
verified ·
1 Parent(s): ee565de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -20
app.py CHANGED
@@ -1,5 +1,8 @@
1
  import gradio as gr
2
  import os
 
 
 
3
  from langchain_community.document_loaders import PyPDFLoader
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
  from langchain_community.vectorstores import Chroma
@@ -9,27 +12,7 @@ from langchain_community.llms import HuggingFacePipeline
9
  from langchain.chains import ConversationChain
10
  from langchain.memory import ConversationBufferMemory
11
  from langchain_community.llms import HuggingFaceEndpoint
12
- from huggingface_hub import login
13
-
14
- from pathlib import Path
15
- import chromadb
16
- from unidecode import unidecode
17
-
18
- from transformers import AutoTokenizer
19
- import transformers
20
  import torch
21
- import tqdm
22
- import accelerate
23
- import re
24
-
25
- from io import StringIO
26
- from typing import Any, Callable, List, Optional
27
-
28
- import pandas as pd
29
-
30
- # Obtenha o token da variável de ambiente
31
- api_token = os.getenv("HF_TOKEN")
32
-
33
 
34
  list_llm = ["meta-llama/Meta-Llama-3-8B-Instruct", "mistralai/Mistral-7B-Instruct-v0.3"]
35
  list_llm_simple = [os.path.basename(llm) for llm in list_llm]
 
1
  import gradio as gr
2
  import os
3
+ api_token = os.getenv("HF_TOKEN")
4
+
5
+ from langchain_community.vectorstores import FAISS
6
  from langchain_community.document_loaders import PyPDFLoader
7
  from langchain.text_splitter import RecursiveCharacterTextSplitter
8
  from langchain_community.vectorstores import Chroma
 
12
  from langchain.chains import ConversationChain
13
  from langchain.memory import ConversationBufferMemory
14
  from langchain_community.llms import HuggingFaceEndpoint
 
 
 
 
 
 
 
 
15
  import torch
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
  list_llm = ["meta-llama/Meta-Llama-3-8B-Instruct", "mistralai/Mistral-7B-Instruct-v0.3"]
18
  list_llm_simple = [os.path.basename(llm) for llm in list_llm]