import os
import streamlit as st
from langchain_huggingface import HuggingFaceEndpoint
# Set the environment variable "m_token" to the value of sec_key
sec_key = "YOUR_HUGGING_FACE_API_TOKEN_HERE"
os.environ["m_token"] = sec_key
# Specify the repository ID of the Hugging Face model you want to use
repo_id_mistral = "mistralai/Mistral-7B-Instruct-v0.3"
# Streamlit app layout
st.title("🤖 Mistral-7B-Instruct-v0.3 تجربة نموذج 🧙")
# Input text area for user query with enhanced instructions
user_query = st.text_area(
"✨ Enter your magical query:",
height=100,
help="""
**Enhanced Prompting Instructions:**
- Be clear and specific about what you want to know.
- Use natural language to describe your query.
- If asking a question, ensure it is well-formed and unambiguous.
- For best results, provide context or background information if relevant.
"""
)
# Slider for adjusting the temperature
temperature = st.slider(
"Temperature",
min_value=0.1,
max_value=1.0,
value=0.7,
step=0.1,
help="""
**Temperature:**
- Lower values (e.g., 0.1) make the output more deterministic and focused.
- Higher values (e.g., 1.0) make the output more diverse and creative.
"""
)
# Slider for adjusting the max length
max_length = st.slider(
"Max Length",
min_value=32,
max_value=256,
value=128,
step=32,
help="""
**Max Length:**
- Controls the maximum number of tokens in the generated response.
- Adjust based on the desired length of the response.
"""
)
# Button to trigger the query
if st.button("🪄 Cast Spell"):
if user_query:
# Initialize the HuggingFaceEndpoint for Mistral
llm_mistral = HuggingFaceEndpoint(
repo_id=repo_id_mistral,
max_length=max_length,
temperature=temperature,
token=sec_key
)
# Invoke the model with the user's query
response_mistral = llm_mistral.invoke(user_query)
# Display the response
st.markdown("🔮 Response from Mistral-7B-Instruct-v0.3:", unsafe_allow_html=True)
st.markdown(f"{response_mistral}", unsafe_allow_html=True)
# Save query and response to session state
if 'history' not in st.session_state:
st.session_state.history = []
st.session_state.history.append((user_query, response_mistral))
else:
st.write("🚨 Please enter a query to cast your spell.")
# Button to clear history
if st.button("🗑️ Clear History"):
if 'history' in st.session_state:
st.session_state.history = []
st.success("History cleared!")
# Display history of queries and responses
if 'history' in st.session_state:
st.subheader("📜 Scroll of Spells Cast")
for query, response_mistral in st.session_state.history:
st.write(f"**Query:** {query}")
st.markdown(f"**Response from Mistral-7B-Instruct-v0.3:** {response_mistral}", unsafe_allow_html=True)
st.write("---")