File size: 4,257 Bytes
ab4f5fb
 
 
 
 
 
 
 
dced53b
ab4f5fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# app.py on Hugging Face Space
import gradio as gr
import requests # Used for making HTTP requests to your backend
import os

# --- IMPORTANT ---
# Replace this with your actual Cloudflare Worker URL after deployment.
# You can also set this as a Hugging Face Space secret if you prefer.
BACKEND_API_URL = os.getenv("BACKEND_API_URL", "https://your-worker-name.your-username.workers.dev")
# Example: https://actor-llm-deepseek-backend.your-username.workers.dev

# Store conversation history for context
conversation_history = []
current_script_in_session = ""
current_character_in_session = ""

def get_actor_advice(user_query, script_input, character_name_input):
    global conversation_history, current_script_in_session, current_character_in_session

    # 1. Check if script or character changed to reset context
    if script_input != current_script_in_session or character_name_input != current_character_in_session:
        conversation_history = [] # Reset history
        current_script_in_session = script_input
        current_character_in_session = character_name_input
        gr.Warning("Script or character changed! Conversation context has been reset.")

    # 2. Prepare payload for the Cloudflare Worker
    payload = {
        "userQuery": user_query,
        "scriptContent": script_input,
        "characterName": character_name_input,
        "conversationHistory": conversation_history # Send current history
    }

    headers = {"Content-Type": "application/json"}

    try:
        # 3. Make HTTP POST request to your Cloudflare Worker
        response = requests.post(BACKEND_API_URL, json=payload, headers=headers)
        response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)

        response_data = response.json()
        llm_response = response_data.get("response", "No advice received.")

        # 4. Update conversation history with user query and LLM response
        conversation_history.append({"role": "user", "content": user_query})
        conversation_history.append({"role": "assistant", "content": llm_response})

        return llm_response
    except requests.exceptions.RequestException as e:
        print(f"Error communicating with backend: {e}")
        return f"Error connecting to the backend. Please ensure the backend is deployed and accessible. Details: {e}"
    except Exception as e:
        print(f"An unexpected error occurred: {e}")
        return f"An unexpected error occurred: {e}"

# --- Frontend UI with Gradio ---
with gr.Blocks() as demo:
    gr.Markdown("# Actor's LLM Assistant")
    gr.Markdown("Enter your script and ask for acting advice for your character. The AI will remember past queries in the current session.")

    with gr.Row():
        with gr.Column():
            script_input = gr.Textbox(
                label="Paste Your Script Here",
                lines=10,
                placeholder="[Scene: A dimly lit stage...]\nANNA: (Whispering) 'I can't believe this...'"
            )
            character_name_input = gr.Textbox(
                label="Your Character's Name",
                placeholder="e.g., Anna"
            )
            # Photo customization placeholder (as discussed, for UI or future multimodal)
            photo_upload = gr.Image(
                label="Upload Actor Photo (for UI personalization)",
                type="pil", # Pillow image object
                sources=["upload"],
                interactive=True
            )
            gr.Markdown("*(Note: Photo customization is for UI personalization. The LLM itself currently processes text only.)*")

        with gr.Column():
            query_input = gr.Textbox(
                label="Ask for Acting Advice",
                placeholder="e.g., How should Anna deliver her line 'I can't believe this...' to convey despair?",
                lines=3
            )
            submit_btn = gr.Button("Get Advice")
            output_text = gr.Textbox(label="LLM Advice", lines=7)

    submit_btn.click(
        fn=get_actor_advice,
        inputs=[query_input, script_input, character_name_input],
        outputs=output_text
    )

    gr.Markdown("---")
    gr.Markdown("Powered by DeepSeek LLMs, Hugging Face, and Cloudflare.")

demo.launch(share=True)