Nathan Butters
Attempt 1 to make this work
5b1c7ed
raw
history blame
4.16 kB
import streamlit as st
import logging
from huggingface_hub import InferenceClient
from helpers.systemPrompts import base, tutor
import os
logger = logging.getLogger(__name__)
api_key = os.environ.get('hf_api')
client = InferenceClient(api_key=api_key)
def hf_stream(model_name: str, messages: dict):
stream = client.chat.completions.create(
model=model_name,
messages=messages,
max_tokens=1000,
stream=True)
for chunk in stream:
chunk.choices[0].delta.content, end=""
def hf_generator(model,prompt,data):
messages = [
{
"role": "user",
"content": [
{
"type": "text",
"text": prompt
},
{
"type": "image_url",
"image_url": {
"url": data
}
}
]
}
]
completion = client.chat.completions.create(
model=model,
messages=messages,
max_tokens=500
)
return completion.choices[0].message
def basicChat():
# Accept user input and then writes the response
if prompt := st.chat_input("How may I help you learn math today?"):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
logger.info(st.session_state.messages[-1])
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message(st.session_state.model):
logger.info(f"""Message to {st.session_state.model}: {[
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
]}""")
response = st.write_stream(hf_generator(
st.session_state.model,
[
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
]
))
st.session_state.messages.append({"role": "assistant", "content": response})
logger.info(st.session_state.messages[-1])
def mmChat(data):
if prompt := st.chat_input("How may I help you learn math today?"):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt,"images":[data]})
logger.info(st.session_state.messages[-1])
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message(st.session_state.model):
logger.info(f"Message to {st.session_state.model}: {st.session_state.messages[-1]}")
response = st.write_stream(hf_generator(
st.session_state.model,
prompt,
data))
st.session_state.messages.append({"role": "assistant", "content": response})
logger.info(st.session_state.messages[-1])
def guidedMM(sysChoice:str, data):
if sysChoice == "Tutor":
system = tutor
else:
system = base
if prompt := st.chat_input("How may I help you learn math today?"):
# Add user message to chat history
st.session_state.messages.append([
{
"role": "user",
"content": [
{
"type": "text",
"text": prompt
},
{
"type": "image_url",
"image_url": {
"url": data
}
}
]
}
])
logger.info(st.session_state.messages[-2:])
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message(st.session_state.model):
logger.info(f"Message to {st.session_state.model}: {st.session_state.messages[-1]}")
response = st.write_stream(hf_generator(
st.session_state.model,
[st.session_state.messages[-1]]
))
st.session_state.messages.append({"role": "assistant", "content": response})
logger.info(st.session_state.messages[-1])