Spaces:
Sleeping
Sleeping
Commit
·
d568cb6
1
Parent(s):
e7cf363
Updated README.md, removed unnecessary prompt
Browse files- README.md +26 -0
- app.py +0 -1
- prompts/prompts_manager.py +1 -1
README.md
CHANGED
@@ -11,3 +11,29 @@ license: mit
|
|
11 |
---
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
---
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
14 |
+
# Easy Dubai Asset Management form compilation
|
15 |
+
|
16 |
+
Who doesn't like to fill in form for any little thing? **Me**,
|
17 |
+
so I decided that the best use for thousand-dollars GPU in data
|
18 |
+
centers was to help me do just that!
|
19 |
+
|
20 |
+
Basically if you live in a community managed by Dubai Asset Management
|
21 |
+
you have to fill in a form for every kind of work that you
|
22 |
+
want to do in your unit, including furniture delivery.
|
23 |
+
|
24 |
+
With this ChatGPT 3.5 tool you can just explain in natural language
|
25 |
+
what you want to do and it should fill in the form for you
|
26 |
+
automatically. If you upload an image of your signature it
|
27 |
+
will use it to sign the form
|
28 |
+
|
29 |
+
## Other reasons why this is interesting
|
30 |
+
In this case I am using an LLM so that a computer can understand
|
31 |
+
natural language, for example: if you write that the
|
32 |
+
work you need to do will take place in two days, it
|
33 |
+
should correctly calculate the date and pass it down
|
34 |
+
in a way that the rest of my application can parse, which I think
|
35 |
+
is quite neat
|
36 |
+
|
37 |
+
## Why is the code related to the Intel NPU all commented out?
|
38 |
+
As of today HF cannot build the wheel: I tried to upload my
|
39 |
+
wheel but I built it on Windows and I guess HF uses Linux
|
app.py
CHANGED
@@ -38,7 +38,6 @@ def use_streamlit():
|
|
38 |
with st.status("initialising LLM"):
|
39 |
repository.init()
|
40 |
with st.status("waiting for LLM"):
|
41 |
-
repository.send_prompt(pm.ingest_user_answers(user_input))
|
42 |
answer = repository.send_prompt(pm.verify_user_input_prompt(user_input))
|
43 |
with st.status("Checking for missing answers"):
|
44 |
st.session_state["answers"] = LlmParser.parse_verification_prompt_answers(answer['content'])
|
|
|
38 |
with st.status("initialising LLM"):
|
39 |
repository.init()
|
40 |
with st.status("waiting for LLM"):
|
|
|
41 |
answer = repository.send_prompt(pm.verify_user_input_prompt(user_input))
|
42 |
with st.status("Checking for missing answers"):
|
43 |
st.session_state["answers"] = LlmParser.parse_verification_prompt_answers(answer['content'])
|
prompts/prompts_manager.py
CHANGED
@@ -28,7 +28,7 @@ class PromptsManager:
|
|
28 |
f"{self.verification_prompt}")
|
29 |
|
30 |
def get_work_category(self, work_description: str) -> str:
|
31 |
-
return (f"The work to do is {work_description}. Given the following categories {
|
32 |
f"which ones are the most relevant? Only return the categories, separated by a semicolon")
|
33 |
|
34 |
def ingest_user_answers(self, user_prompt: str) -> str:
|
|
|
28 |
f"{self.verification_prompt}")
|
29 |
|
30 |
def get_work_category(self, work_description: str) -> str:
|
31 |
+
return (f"The work to do is {work_description}. Given the following categories {', '.join(self.work_categories.values())} "
|
32 |
f"which ones are the most relevant? Only return the categories, separated by a semicolon")
|
33 |
|
34 |
def ingest_user_answers(self, user_prompt: str) -> str:
|