|
import gradio as gr |
|
|
|
|
|
css = """ |
|
.gradio-container { |
|
background: linear-gradient(to right, #FFDEE9, #B5FFFC); |
|
} |
|
""" |
|
|
|
|
|
model = gr.load("models/mistralai/Mistral-7B-Instruct-v0.3") |
|
|
|
def inference_fn(prompt): |
|
""" |
|
This function calls the loaded model with the user's prompt. |
|
gr.load(...) returns a Gradio interface object, so we can call it like a function. |
|
""" |
|
|
|
response = model(prompt) |
|
return response |
|
|
|
with gr.Blocks(css=css) as demo: |
|
|
|
gr.Markdown("<h1 style='text-align: center;'>Bonjour Dans le chat du consentement</h1>") |
|
|
|
|
|
with gr.Row(): |
|
user_input = gr.Textbox(label="Entrez votre message ici:", lines=3) |
|
output = gr.Textbox(label="Réponse du Modèle Mistral-7B-Instruct:", lines=5) |
|
send_button = gr.Button("Envoyer") |
|
|
|
|
|
send_button.click(fn=inference_fn, inputs=user_input, outputs=output) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|