Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,26 +1,30 @@
|
|
1 |
-
import torch
|
2 |
import os
|
3 |
import requests
|
4 |
-
import spaces
|
5 |
import gradio as gr
|
6 |
|
7 |
api_token = os.environ.get("TOKEN")
|
8 |
|
9 |
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
|
10 |
headers = {"Authorization": f"Bearer {api_token}"}
|
11 |
-
@spaces.GPU
|
12 |
|
13 |
def query(payload):
|
14 |
response = requests.post(API_URL, headers=headers, json=payload)
|
15 |
return response.json()
|
16 |
|
17 |
def analyze_sentiment(text):
|
18 |
-
|
19 |
-
|
|
|
|
|
20 |
output = query({
|
21 |
-
"inputs":
|
|
|
|
|
|
|
22 |
})
|
23 |
|
|
|
|
|
24 |
# Assurez-vous de gérer correctement la sortie de l'API
|
25 |
if isinstance(output, list) and len(output) > 0:
|
26 |
return output[0].get('generated_text', 'Erreur: Réponse inattendue')
|
|
|
|
|
1 |
import os
|
2 |
import requests
|
|
|
3 |
import gradio as gr
|
4 |
|
5 |
api_token = os.environ.get("TOKEN")
|
6 |
|
7 |
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
|
8 |
headers = {"Authorization": f"Bearer {api_token}"}
|
|
|
9 |
|
10 |
def query(payload):
|
11 |
response = requests.post(API_URL, headers=headers, json=payload)
|
12 |
return response.json()
|
13 |
|
14 |
def analyze_sentiment(text):
|
15 |
+
def query(payload):
|
16 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
17 |
+
return response.json()
|
18 |
+
|
19 |
output = query({
|
20 |
+
"inputs": {
|
21 |
+
"system": "you only answer in bulgarian",
|
22 |
+
"user": "hello",
|
23 |
+
}
|
24 |
})
|
25 |
|
26 |
+
print(output)
|
27 |
+
|
28 |
# Assurez-vous de gérer correctement la sortie de l'API
|
29 |
if isinstance(output, list) and len(output) > 0:
|
30 |
return output[0].get('generated_text', 'Erreur: Réponse inattendue')
|