Update app.py
Browse files
app.py
CHANGED
@@ -4,14 +4,18 @@ warnings.simplefilter(action='ignore', category=FutureWarning)
|
|
4 |
import PyPDF2
|
5 |
import gradio as gr
|
6 |
from langchain.prompts import PromptTemplate
|
|
|
|
|
7 |
from pathlib import Path
|
8 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
|
9 |
-
from huggingface_hub import login
|
10 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
11 |
import torch
|
12 |
-
import os
|
13 |
|
14 |
-
|
|
|
|
|
|
|
|
|
15 |
|
16 |
# Configuraci贸n del modelo de resumen
|
17 |
llm = HuggingFaceEndpoint(
|
@@ -168,4 +172,4 @@ with gr.Blocks() as demo:
|
|
168 |
)
|
169 |
|
170 |
# Ejecutar la aplicaci贸n Gradio
|
171 |
-
demo.launch(share=True)
|
|
|
4 |
import PyPDF2
|
5 |
import gradio as gr
|
6 |
from langchain.prompts import PromptTemplate
|
7 |
+
from langchain.chains.summarize import load_summarize_chain
|
8 |
+
from huggingface_hub import login
|
9 |
from pathlib import Path
|
10 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
|
|
|
11 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
12 |
import torch
|
|
|
13 |
|
14 |
+
huggingface_token = os.getenv('HUGGINGFACE_TOKEN')
|
15 |
+
|
16 |
+
# Realizar el inicio de sesi贸n de Hugging Face solo si el token est谩 disponible
|
17 |
+
if huggingface_token:
|
18 |
+
login(token=huggingface_token)
|
19 |
|
20 |
# Configuraci贸n del modelo de resumen
|
21 |
llm = HuggingFaceEndpoint(
|
|
|
172 |
)
|
173 |
|
174 |
# Ejecutar la aplicaci贸n Gradio
|
175 |
+
demo.launch(share=True)
|