Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -31,21 +31,6 @@ options = {
|
|
| 31 |
|
| 32 |
st.set_page_config(page_title="BonsiAI", page_icon="🤖")
|
| 33 |
|
| 34 |
-
def gen_augmented_prompt(prompt, top_k) :
|
| 35 |
-
links = ""
|
| 36 |
-
persist_directory1 = './DB_Decreti'
|
| 37 |
-
embedding = HuggingFaceEmbeddings(model_name="sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2")
|
| 38 |
-
db = Chroma(persist_directory=persist_directory1, embedding_function=embedding)
|
| 39 |
-
context = db.similarity_search(prompt, k=top_k)
|
| 40 |
-
print(context)
|
| 41 |
-
generated_prompt = f"""
|
| 42 |
-
A PARTIRE DAL SEGUENTE CONTESTO: {context},
|
| 43 |
-
|
| 44 |
-
----
|
| 45 |
-
RISPONDI ALLA SEGUENTE RICHIESTA: {prompt}
|
| 46 |
-
"""
|
| 47 |
-
return generated_prompt, links
|
| 48 |
-
|
| 49 |
def init_state() :
|
| 50 |
if "messages" not in st.session_state:
|
| 51 |
st.session_state.messages = []
|
|
@@ -103,6 +88,7 @@ def chat_box() :
|
|
| 103 |
st.markdown(message["content"])
|
| 104 |
|
| 105 |
def formattaPrompt(prompt, systemRole, systemStyle, instruction):
|
|
|
|
| 106 |
if instruction.startswith("http"):
|
| 107 |
try:
|
| 108 |
with st.spinner("Ricerca in Drive...") :
|
|
@@ -116,7 +102,7 @@ def formattaPrompt(prompt, systemRole, systemStyle, instruction):
|
|
| 116 |
"input": {{
|
| 117 |
"role": "system",
|
| 118 |
"content": "{systemRole}",
|
| 119 |
-
"style": "{systemStyle}
|
| 120 |
}},
|
| 121 |
"messages": [
|
| 122 |
{{
|
|
@@ -132,14 +118,29 @@ def formattaPrompt(prompt, systemRole, systemStyle, instruction):
|
|
| 132 |
'''
|
| 133 |
return input_text
|
| 134 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
def generate_chat_stream(prompt) :
|
| 136 |
links = []
|
| 137 |
-
|
| 138 |
-
|
| 139 |
if st.session_state.rag_enabled :
|
| 140 |
with st.spinner("Ricerca nei documenti...."):
|
| 141 |
time.sleep(1)
|
| 142 |
-
prompt, links = gen_augmented_prompt(prompt=
|
|
|
|
| 143 |
with st.spinner("Generazione in corso...") :
|
| 144 |
time.sleep(1)
|
| 145 |
chat_stream = chat(prompt, st.session_state.history,chat_client=CHAT_BOTS[st.session_state.chat_bot] ,
|
|
|
|
| 31 |
|
| 32 |
st.set_page_config(page_title="BonsiAI", page_icon="🤖")
|
| 33 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
def init_state() :
|
| 35 |
if "messages" not in st.session_state:
|
| 36 |
st.session_state.messages = []
|
|
|
|
| 88 |
st.markdown(message["content"])
|
| 89 |
|
| 90 |
def formattaPrompt(prompt, systemRole, systemStyle, instruction):
|
| 91 |
+
#Attenzione! Il testo generato deve essere lungo {st.session_state.max_tokens*2} CARATTERI
|
| 92 |
if instruction.startswith("http"):
|
| 93 |
try:
|
| 94 |
with st.spinner("Ricerca in Drive...") :
|
|
|
|
| 102 |
"input": {{
|
| 103 |
"role": "system",
|
| 104 |
"content": "{systemRole}",
|
| 105 |
+
"style": "{systemStyle}"
|
| 106 |
}},
|
| 107 |
"messages": [
|
| 108 |
{{
|
|
|
|
| 118 |
'''
|
| 119 |
return input_text
|
| 120 |
|
| 121 |
+
def gen_augmented_prompt(prompt, top_k) :
|
| 122 |
+
links = ""
|
| 123 |
+
persist_directory1 = './DB_Decreti'
|
| 124 |
+
embedding = HuggingFaceEmbeddings(model_name="sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2")
|
| 125 |
+
db = Chroma(persist_directory=persist_directory1, embedding_function=embedding)
|
| 126 |
+
context = db.similarity_search(prompt, k=top_k)
|
| 127 |
+
generated_prompt = f"""
|
| 128 |
+
A PARTIRE DAL SEGUENTE CONTESTO: {context},
|
| 129 |
+
|
| 130 |
+
----
|
| 131 |
+
RISPONDI ALLA SEGUENTE RICHIESTA: {prompt}
|
| 132 |
+
"""
|
| 133 |
+
return generated_prompt, links
|
| 134 |
+
|
| 135 |
def generate_chat_stream(prompt) :
|
| 136 |
links = []
|
| 137 |
+
prompt_originale = prompt
|
| 138 |
+
prompt = formattaPrompt(prompt, st.session_state.systemRole, st.session_state.systemStyle, st.session_state.instruction)
|
| 139 |
if st.session_state.rag_enabled :
|
| 140 |
with st.spinner("Ricerca nei documenti...."):
|
| 141 |
time.sleep(1)
|
| 142 |
+
prompt, links = gen_augmented_prompt(prompt=prompt_originale, top_k=st.session_state.top_k)
|
| 143 |
+
print(prompt)
|
| 144 |
with st.spinner("Generazione in corso...") :
|
| 145 |
time.sleep(1)
|
| 146 |
chat_stream = chat(prompt, st.session_state.history,chat_client=CHAT_BOTS[st.session_state.chat_bot] ,
|