Spaces:
Running
Running
File size: 3,006 Bytes
dc84669 0ffc7ed a505ed2 dc84669 a6459c4 a46d3dd a505ed2 74a10e8 a505ed2 bab0343 dc84669 a505ed2 0ffc7ed a3dead0 0ffc7ed a505ed2 a3dead0 0ffc7ed adec707 0ffc7ed adec707 0ffc7ed a505ed2 0ffc7ed a505ed2 0ffc7ed dc84669 f904c62 0ffc7ed f904c62 a505ed2 f904c62 a505ed2 f904c62 a505ed2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
import gradio as gr
from bedrock_client import bedrock_llm
from langchain.schema import SystemMessage, HumanMessage, AIMessage
import os
from distutils.util import strtobool
MULTIMODAL = os.environ.get("MULTIMODAL", "false")
# 1) convert common truthy/falsy strings to bool
try:
MULTIMODAL = bool(strtobool(MULTIMODAL))
except ValueError:
# catch unrecognized values
print(f"Invalid MULTIMODAL value Set to Default Value=False: Use true/false, 1/0, yes/no.")
AUTHS = [(os.environ.get('USER'), os.environ.get('PW'))]
SYSTEM_PROMPT = os.environ.get('SYSTEM_PROMPT', '')
def chat(message, history):
# 1) start with the system prompt
history_langchain_format: list = [SystemMessage(content=SYSTEM_PROMPT)]
# 2) replay the user/assistant turns
for msg in history:
if msg["role"] == "user":
history_langchain_format.append(HumanMessage(content=msg["content"]))
elif msg["role"] == "assistant":
history_langchain_format.append(AIMessage(content=msg["content"]))
# 3) append the new user message
history_langchain_format.append(HumanMessage(content=message))
stream =bedrock_llm.stream(history_langchain_format)
full = next(stream)
for chunk in stream:
full +=chunk
yield full.content
with gr.Blocks(css_paths=["static/deval.css"],theme = gr.themes.Default(primary_hue="blue", secondary_hue="yellow"),) as demo:
# ββ Logo + Header + Logout ββββββββββββββββββββββββββββββββ
with gr.Row():
with gr.Column(scale=1):
gr.Image(
value="static/logo.png",
height=50,
show_label=False,
interactive=False,
show_download_button=False,
show_fullscreen_button=False,
elem_id="logo-primary", # matches the CSS above
)
with gr.Column(scale=10):
gr.Markdown(
"# DEvalBot\n\n"
"**Hinweis:** Bitte gebe keine vertraulichen Informationen ein. "
"Dazu zΓ€hlen u.a. sensible personenbezogene Daten, institutsinterne "
"Informationen oder Dokumente, unverΓΆffentlichte Berichtsinhalte, "
"vertrauliche Informationen oder Dokumente externer Organisationen "
"sowie sensible erhobene Daten (wie etwa Interviewtranskripte).", elem_id="header-text"
)
# inject auto-reload script
gr.HTML(
"""
<script>
// Reload the page after 1 minutes (300β000 ms)
setTimeout(() => {
window.location.reload();
}, 1000);
</script>
"""
)
gr.ChatInterface(
chat,
type="messages",
multimodal=MULTIMODAL,
editable=True,
concurrency_limit=20,
save_history=True,
)
demo.queue().launch( ssr_mode=False) |