Spaces:
Runtime error
Runtime error
File size: 1,225 Bytes
ce10f9a 5b4c169 a3ae69c 4ffc5f1 ce10f9a 07fca4f d43b4cf ce10f9a 604d57b a06cfd4 47f2ac0 49c5437 47f2ac0 cc11b4b 49c5437 fe257b1 49c5437 cc11b4b 49c5437 1917b0b ce8a810 cc11b4b ce8a810 1917b0b a3ae69c 94461c6 a3ae69c ce10f9a a3ae69c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import os
import requests
import gradio as gr
api_token = os.environ.get("TOKEN")
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
headers = {"Authorization": f"Bearer {api_token}"}
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def analyze_sentiment(text):
output = query({
"inputs": f'''<|begin_of_text|>
<|start_header_id|>system<|end_header_id|>
you are a feeling analyser and you'll say only "positive" if i'm feeling positive and "negativ" if i'm feeling sad or bad <|eot_id|>
<|start_header_id|>user<|end_header_id|>
{text}
<|eot_id|>
<|start_header_id|>assistant<|end_header_id|>
"parameters": {
"max_new_tokens": 1,
"return_full_text": False
}
'''
})
# Assurez-vous de gérer correctement la sortie de l'API
if isinstance(output, list) and len(output) > 0:
return output[0].get('generated_text', 'Erreur: Réponse inattendue')
else:
return "Erreur: Réponse inattendue de l'API"
demo = gr.Interface(
fn = analyze_sentiment,
inputs=["text"],
outputs=["text"],
)
demo.launch() |