File size: 2,677 Bytes
dc84669
f904c62
 
dc84669
f904c62
dc84669
 
a3dead0
 
 
 
 
 
 
 
 
f904c62
a3dead0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dc84669
 
f904c62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e4f94d7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import gradio as gr
from bedrock_client import claude_llm
from utils import load_users

AUTHS = load_users('user.csv')


# somewhere near the top of app.py:
SYSTEM_PROMPT = (
    "Du bist DevalBot, ein konversationeller Assistent des Deutschen Evaluierungsinstituts "
    "für Entwicklungsbewertung (DEval). DEval bietet staatlichen und zivilgesellschaftlichen "
    "Organisationen in der Entwicklungszusammenarbeit unabhängige und wissenschaftlich fundierte "
    "Evaluierungen. Deine Hauptsprache ist Deutsch; antworte daher standardmäßig auf Deutsch. "
    "Du kannst zudem bei statistischen Analysen und Programmierung in Stata und R unterstützen."
)

def chat(user_message, history):
    # 1) ignore empty
    if not user_message or not user_message.strip():
        return

    # 2) build the UI history (what Gradio shows)
    ui_history = history + [{"role":"user","content":user_message}]

    # 3) build the actual text prompt we’ll send to Claude
    prompt_lines = [SYSTEM_PROMPT]
    for msg in history:
        # capitalize role for clarity
        role = msg["role"].capitalize()
        prompt_lines.append(f"{role}: {msg['content']}")
    prompt_lines.append(f"User: {user_message}")
    prompt_lines.append("Assistant:")         # Claude will continue from here
    full_prompt = "\n".join(prompt_lines)

    # 4) stream from the LLM
    full_resp = ""
    for token in claude_llm.stream(full_prompt):
        full_resp += token
        yield ui_history + [{"role":"assistant","content": full_resp}]

    # 5) final append
    ui_history.append({"role":"assistant","content": full_resp})
    yield ui_history


with gr.Blocks(css_paths=["static/deval.css"],theme = gr.themes.Default(primary_hue="blue", secondary_hue="yellow"),) as demo:
    # ── Logo + Header + Logout ────────────────────────────────

    gr.Image(
        value="static/logo.png",
        show_label=False,
        interactive=False,
        show_download_button=False,
        show_fullscreen_button=False,
        elem_id="logo-primary",    # matches the CSS above
    )
        
    #logout_btn = gr.Button("Logout", elem_id="logout-btn")
    # inject auto-reload script
    gr.HTML(
        """
        <script>
          // Reload the page after 1 minutes (300 000 ms)
          setTimeout(() => {
            window.location.reload();
          }, 1000);
        </script>
        """
    )
    gr.ChatInterface(
        chat,
        type="messages",
        editable=True,
        concurrency_limit=200,
        save_history=True,
    )
    


demo.queue().launch(auth=AUTHS,share=True, ssr_mode=False)