File size: 2,448 Bytes
93f30c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import requests
import os
import time
from gtts import gTTS


HF_API_KEY = os.getenv("HUGGING_FACE_API_KEY")
MISTRAL_API_KEY = os.getenv("MISTRAL_IA_API_KEY")


# Summarisation model
def query(payload):
    headers = {"Authorization": f"Bearer {HF_API_KEY}"}
    API_URL_SUMMARISATION_MODEL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
    response = requests.post(API_URL_SUMMARISATION_MODEL, headers=headers, json=payload)
    return response.json()[0]['summary_text']

def summarize_text(text):
    payload = {"inputs": text}
    return query(payload)

#Text generation model
def get_diagnosis(prediction, confidence):
    labels = ["glioma", "meningioma", "notumor", "pituitary"]
    result = labels[prediction]
    prompt = f'Tu es un médecin qui doit annoncer à un patient le type de tumeur qu\'il a. Le type de tumeur prédit est "{result}" avec une confiance de {confidence}. Veuillez fournir un diagnostic concis, précis et des conseils.'
    return prompt


def generate_text(input_text, max_retries=3):
    MISTRAL_API_URL = "https://api.mistral.ai/v1/chat/completions"
    
    headers = {
        "Authorization": f"Bearer {MISTRAL_API_KEY}",
        "Content-Type": "application/json"
    }
    
    payload = {
        "model": "mistral-small-latest",
        "messages": [{"role": "user", "content": input_text}]
    }

    for _ in range(max_retries):
        response = requests.post(MISTRAL_API_URL, headers=headers, json=payload)
        if response.status_code == 200:
            return response.json()['choices'][0]['message']['content']
        else:
            time.sleep(5)

    return "Failed to generate text after multiple attempts"

# Translation model
def translate_fr_to_es(input_text, max_retries=5):
    API_URL = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-fr-es"
    headers = {"Authorization": f"Bearer {HF_API_KEY}"}
    payload = {"inputs": input_text}

    for _ in range(max_retries):
        response = requests.post(API_URL, headers=headers, json=payload)
        if response.status_code == 200:
            return response.json()[0]['translation_text']
        else:
            time.sleep(5)

    return "Une erreur s'est produite veuillez reessayer plus tard."

# Audio to text model
def text_to_speech(input_text):
    outputh_path = "text_to_speech.mp3"
    tts = gTTS(text=input_text, lang="es")
    tts.save(outputh_path)
    return outputh_path