Spaces:
Runtime error
Runtime error
File size: 2,199 Bytes
e936a3f fb7ceee 2e00c46 e936a3f 2e00c46 bd1c309 e936a3f 2e00c46 fb7ceee 2e00c46 fb7ceee 2e00c46 fb7ceee 2e00c46 fb7ceee e936a3f 2e00c46 fb7ceee 2e00c46 fb7ceee e936a3f 2e00c46 fb7ceee 2e00c46 fb7ceee 2e00c46 fb7ceee 2e00c46 fbd4b06 e936a3f 2e00c46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import streamlit as st
from queue import Queue
from langchain import HuggingFaceHub, PromptTemplate, LLMChain
# Set the title of the Streamlit app
st.title("Falcon QA Bot")
# Get the Hugging Face Hub API token from Streamlit secrets
huggingfacehub_api_token = st.secrets["hf_token"]
# Set the repository ID for the Falcon model
repo_id = "tiiuae/falcon-7b-instruct"
# Initialize the Hugging Face Hub and LLMChain
llm = HuggingFaceHub(
huggingfacehub_api_token=huggingfacehub_api_token,
repo_id=repo_id,
model_kwargs={"temperature": 0.2, "max_new_tokens": 2000}
)
# Define the template for the assistant's response
template = """
You are an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
{question}
"""
# Create a queue to store user questions
queue = Queue()
def chat(query):
"""
Generates a response to the user's question using the LLMChain model.
:param query: User's question.
:type query: str
:return: Response to the user's question.
:rtype: str
"""
# Create a prompt template with the question variable
prompt = PromptTemplate(template=template, input_variables=["question"])
# Create an LLMChain instance with the prompt and the Falcon model
llm_chain = LLMChain(prompt=prompt, verbose=True, llm=llm)
# Generate a response to the user's question
result = llm_chain.predict(question=query)
return result
def main():
"""
Main function for the Streamlit app.
"""
# Get the user's question from the input text box
user_question = st.text_input("What do you want to ask about", placeholder="Input your question here")
if user_question:
# Add the user's question to the queue
queue.put(user_question)
# Check if there are any waiting users
if not queue.empty():
# Get the next user's question from the queue
query = queue.get()
# Generate a response to the user's question
response = chat(query)
# Display the response to the user
st.write(response, unsafe_allow_html=True)
if __name__ == '__main__':
main()
|