Mahavaury2's picture
Update app.py
3264612 verified
raw
history blame
1.21 kB
import gradio as gr
# Custom CSS for pastel gradient
css = """
.gradio-container {
background: linear-gradient(to right, #FFDEE9, #B5FFFC);
}
"""
# Load the Mistral-7B-Instruct-v0.3 model via Gradio's load function
model = gr.load("models/mistralai/Mistral-7B-Instruct-v0.3")
def inference_fn(prompt):
"""
This function calls the loaded model with the user's prompt.
gr.load(...) returns a Gradio interface object, so we can call it like a function.
"""
# If the loaded model is a pipeline or interface, calling it directly returns the response.
response = model(prompt)
return response
with gr.Blocks(css=css) as demo:
# Greeting at the top
gr.Markdown("<h1 style='text-align: center;'>Bonjour Dans le chat du consentement</h1>")
# Create the input/output layout
with gr.Row():
user_input = gr.Textbox(label="Entrez votre message ici:", lines=3)
output = gr.Textbox(label="Réponse du Modèle Mistral-7B-Instruct:", lines=5)
send_button = gr.Button("Envoyer")
# Link the button to inference_fn
send_button.click(fn=inference_fn, inputs=user_input, outputs=output)
# Launch the app
if __name__ == "__main__":
demo.launch()