Spaces:
Sleeping
Sleeping
import os | |
import uuid | |
from pathlib import Path | |
from llm_manager.llm_parser import LlmParser | |
from prompts.prompts_manager import PromptsManager | |
from repository.repository import get_repository | |
from repository.repository_abc import ModelRoles, Model | |
from form.form import build_form_data_from_answers, write_pdf_form | |
import streamlit as st | |
user_msg = "Please describe what you need to do. To get the best results try to answer all the following questions:" | |
def check_for_missing_answers(parsed_questions: dict[int, str]): | |
return [k for k in parsed_questions if parsed_questions[k] is None] | |
def use_streamlit(): | |
pm = PromptsManager() | |
help_ = f"{user_msg}\n\n" + '\n'.join(pm.questions) | |
repository = get_repository("ondemand", Model("ondemand-gpt-3.5-turbo", ModelRoles("system", "user", "assistant"))) | |
if not st.session_state.get("step"): | |
with st.form("Please describe your request"): | |
user_input = st.text_area("Your input", height=700, label_visibility="hidden", placeholder=help_, help=help_) | |
signature = st.file_uploader("Your signature", key="file_upload") | |
st.session_state["signature"] = signature | |
st.session_state["session_id"] = str(uuid.uuid4()) | |
button = st.form_submit_button() | |
if button: | |
llama3 = "meta-llama/Meta-Llama-3-8B-Instruct" | |
# repository = get_repository("intel_npu", Model(llama3, | |
# ModelRoles("system", "user", "assistant")), | |
# pm.system_prompt, Path("llm_log.txt")) | |
st.session_state["step"] = "parsing_answers" | |
if st.session_state.get("step") == "parsing_answers": | |
with st.status("initialising LLM"): | |
repository.init() | |
with st.status("waiting for LLM"): | |
answer = repository.send_prompt(pm.verify_user_input_prompt(user_input)) | |
with st.status("Checking for missing answers"): | |
st.session_state["answers"] = LlmParser.parse_verification_prompt_answers(answer['content']) | |
st.session_state["missing_answers"] = check_for_missing_answers(st.session_state["answers"]) | |
if not st.session_state.get("missing_answers"): | |
st.session_state["step"] = "check_category" | |
else: | |
st.session_state["step"] = "ask_again" | |
if st.session_state.get("step") == "ask_again": | |
with st.form("form1"): | |
for ma in st.session_state["missing_answers"]: | |
st.text_input(pm.questions[ma].lower(), key=ma) | |
submitted = st.form_submit_button("Submit answers") | |
if submitted: | |
st.session_state["step"] = "check_category" | |
for ma in st.session_state["missing_answers"]: | |
st.session_state["answers"][ma] = st.session_state[ma] | |
if st.session_state.get("step") == "check_category": | |
with st.status("finding the work categories applicable to your work"): | |
answer = repository.send_prompt(pm.get_work_category(st.session_state["answers"][1])) | |
categories = LlmParser.parse_get_categories_answer(answer['content']) | |
with st.status("categories found, creating PDF form"): | |
form_filename = f"{st.session_state['session_id']}_form.pdf" | |
st.session_state["form_filename"] = form_filename | |
form_data = build_form_data_from_answers(st.session_state["answers"], categories, | |
st.session_state.get("signature")) | |
write_pdf_form(form_data, Path(form_filename)) | |
st.session_state["step"] = "form_created" | |
if st.session_state.get("step") == "form_created": | |
with open(Path(st.session_state["form_filename"]), "rb") as form: | |
st.download_button("download form", form.read(), mime="application/pdf") | |
start_over_button = st.button("Start over") | |
if start_over_button: | |
del st.session_state["step"] | |
os.unlink(st.session_state["form_filename"]) | |
use_streamlit() | |