Adriiiii24 commited on
Commit
4e68eb9
·
verified ·
1 Parent(s): f00ce12

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -33
app.py CHANGED
@@ -1,41 +1,24 @@
1
- import gradio as gr
2
- from transformers import pipeline, T5Tokenizer, T5ForConditionalGeneration, AutoTokenizer, AutoModelForSeq2SeqLM
3
- import os
4
  import requests
 
5
 
6
- # Load environment variable for Hugging Face API token
7
  token = os.getenv("HF_TOKEN")
8
- headers = {"Authorization": f"Bearer {token}"}
9
-
10
- # Load summarization model and tokenizer
11
- tokenizer = T5Tokenizer.from_pretrained("sumedh/t5-base-amazonreviews", clean_up_tokenization_spaces=True)
12
- model = T5ForConditionalGeneration.from_pretrained("sumedh/t5-base-amazonreviews")
13
- summarizer = pipeline("summarization", model=model, tokenizer=tokenizer)
14
 
15
- # Translation API details
16
  API_URL = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-en-es"
 
17
 
18
- # Summarization and Translation Function
19
- def texto_sum(text):
20
- # Summarize the input text
21
- summary = summarizer(text, do_sample=False)[0]['summary_text']
22
-
23
- # Translate summary using the Hugging Face API
24
- response = requests.post(API_URL, headers=headers, json={"inputs": summary})
25
- translation = response.json()
26
 
27
- # Check if translation is successful
28
- if 'error' in translation:
29
- return f"Error in translation: {translation['error']}"
30
-
31
- return translation[0]['translation_text']
32
-
33
- # Gradio interface
34
- demo = gr.Interface(
35
- fn=texto_sum,
36
- inputs=gr.Textbox(label="Texto a introducir:", placeholder="Introduce el texto a resumir aquí..."),
37
- outputs="text"
38
- )
39
 
40
- # Launch the interface
41
- demo.launch()
 
 
 
 
1
  import requests
2
+ import os
3
 
4
+ # Configurar el token de Hugging Face como variable de entorno
5
  token = os.getenv("HF_TOKEN")
6
+ if not token:
7
+ raise ValueError("El token HF_TOKEN no se encontró en las variables de entorno. Asegúrate de configurarlo correctamente.")
 
 
 
 
8
 
9
+ # Configuración de headers para la API de traducción
10
  API_URL = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-en-es"
11
+ headers = {"Authorization": f"Bearer {token}"}
12
 
13
+ # Prueba de traducción para verificar el token
14
+ def test_translation_api():
15
+ test_response = requests.post(API_URL, headers=headers, json={"inputs": "Hello, how are you?"})
16
+ response_data = test_response.json()
 
 
 
 
17
 
18
+ # Comprobar si hay errores en la respuesta
19
+ if 'error' in response_data:
20
+ raise ValueError(f"Error en la API de traducción: {response_data['error']}")
21
+ print("Token válido y API accesible. Respuesta de prueba:", response_data)
 
 
 
 
 
 
 
 
22
 
23
+ # Ejecutar la prueba de token
24
+ test_translation_api()