Spaces:
Runtime error
Runtime error
import streamlit as st | |
from queue import Queue | |
from langchain import HuggingFaceHub, PromptTemplate, LLMChain | |
# Set the title of the Streamlit app | |
st.title("Falcon QA Bot") | |
# Get the Hugging Face Hub API token from Streamlit secrets | |
huggingfacehub_api_token = st.secrets["hf_token"] | |
# Set the repository ID for the Falcon model | |
repo_id = "tiiuae/falcon-7b-instruct" | |
# Initialize the Hugging Face Hub and LLMChain | |
llm = HuggingFaceHub( | |
huggingfacehub_api_token=huggingfacehub_api_token, | |
repo_id=repo_id, | |
model_kwargs={"temperature": 0.2, "max_new_tokens": 2000} | |
) | |
# Define the template for the assistant's response | |
template = """ | |
You are an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. | |
{question} | |
""" | |
# Create a queue to store user questions | |
queue = Queue() | |
def chat(query): | |
""" | |
Generates a response to the user's question using the LLMChain model. | |
:param query: User's question. | |
:type query: str | |
:return: Response to the user's question. | |
:rtype: str | |
""" | |
# Create a prompt template with the question variable | |
prompt = PromptTemplate(template=template, input_variables=["question"]) | |
# Create an LLMChain instance with the prompt and the Falcon model | |
llm_chain = LLMChain(prompt=prompt, verbose=True, llm=llm) | |
# Generate a response to the user's question | |
result = llm_chain.predict(question=query) | |
return result | |
def main(): | |
""" | |
Main function for the Streamlit app. | |
""" | |
# Get the user's question from the input text box | |
user_question = st.text_input("What do you want to ask about", placeholder="Input your question here") | |
if user_question: | |
# Add the user's question to the queue | |
queue.put(user_question) | |
# Check if there are any waiting users | |
if not queue.empty(): | |
# Get the next user's question from the queue | |
query = queue.get() | |
# Generate a response to the user's question | |
response = chat(query) | |
# Display the response to the user | |
st.write(response, unsafe_allow_html=True) | |
if __name__ == '__main__': | |
main() | |