alex-abb commited on
Commit
a06cfd4
·
verified ·
1 Parent(s): 7e23014

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -6
app.py CHANGED
@@ -1,26 +1,30 @@
1
- import torch
2
  import os
3
  import requests
4
- import spaces
5
  import gradio as gr
6
 
7
  api_token = os.environ.get("TOKEN")
8
 
9
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
10
  headers = {"Authorization": f"Bearer {api_token}"}
11
- @spaces.GPU
12
 
13
  def query(payload):
14
  response = requests.post(API_URL, headers=headers, json=payload)
15
  return response.json()
16
 
17
  def analyze_sentiment(text):
18
- prompt = f"<|begin_of_text|><|start_header_id|>system<|end_header_id|>You're a sentiment analyzer. Your role is to evaluate the general feeling of the prompt. Answer only with 'positive' or 'negative'. Don't add any explanations. Here's the text to analyze (don't add any text) : {text}<|eot_id|><|start_header_id|>user<|end_header_id|>"
19
-
 
 
20
  output = query({
21
- "inputs": prompt,
 
 
 
22
  })
23
 
 
 
24
  # Assurez-vous de gérer correctement la sortie de l'API
25
  if isinstance(output, list) and len(output) > 0:
26
  return output[0].get('generated_text', 'Erreur: Réponse inattendue')
 
 
1
  import os
2
  import requests
 
3
  import gradio as gr
4
 
5
  api_token = os.environ.get("TOKEN")
6
 
7
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
8
  headers = {"Authorization": f"Bearer {api_token}"}
 
9
 
10
  def query(payload):
11
  response = requests.post(API_URL, headers=headers, json=payload)
12
  return response.json()
13
 
14
  def analyze_sentiment(text):
15
+ def query(payload):
16
+ response = requests.post(API_URL, headers=headers, json=payload)
17
+ return response.json()
18
+
19
  output = query({
20
+ "inputs": {
21
+ "system": "you only answer in bulgarian",
22
+ "user": "hello",
23
+ }
24
  })
25
 
26
+ print(output)
27
+
28
  # Assurez-vous de gérer correctement la sortie de l'API
29
  if isinstance(output, list) and len(output) > 0:
30
  return output[0].get('generated_text', 'Erreur: Réponse inattendue')