Update app.py
Browse files
app.py
CHANGED
@@ -1,201 +1,48 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
model_locator = None
|
50 |
-
if model_name == "meta-llama/Llama-3.3-70B-Instruct":
|
51 |
-
model_locator = self.page.locator('::-p-aria(View details for meta-llama/Llama-3.3-70B-Instruct)')
|
52 |
-
elif model_name == "Qwen/Qwen2.5-72B-Instruct":
|
53 |
-
model_locator = self.page.locator('::-p-aria(View details for Qwen/Qwen2.5-72B-Instruct)')
|
54 |
-
elif model_name == "CohereForAI/c4ai-command-r-plus-08-2024":
|
55 |
-
model_locator = self.page.locator('::-p-aria(View details for CohereForAI/c4ai-command-r-plus-08-2024)')
|
56 |
-
elif model_name == "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B":
|
57 |
-
model_locator = self.page.locator('::-p-aria(View details for deepseek-ai/DeepSeek-R1-Distill-Qwen-32B)')
|
58 |
-
elif model_name == "nvidia/Llama-3.1-Nemotron-70B-Instruct-HF":
|
59 |
-
model_locator = self.page.locator('::-p-aria(View details for nvidia/Llama-3.1-Nemotron-70B-Instruct-HF)')
|
60 |
-
elif model_name == "Qwen/QwQ-32B-Preview":
|
61 |
-
model_locator = self.page.locator('::-p-aria(View details for Qwen/QwQ-32B-Preview)')
|
62 |
-
elif model_name == "Qwen/Qwen2.5-Coder-32B-Instruct":
|
63 |
-
model_locator = self.page.locator('::-p-aria(View details for Qwen/Qwen2.5-Coder-32B-Instruct)')
|
64 |
-
elif model_name == "meta-llama/Llama-3.2-11B-Vision-Instruct":
|
65 |
-
model_locator = self.page.locator('::-p-aria(View details for meta-llama/Llama-3.2-11B-Vision-Instruct)')
|
66 |
-
elif model_name == "NousResearch/Hermes-3-Llama-3.1-8B":
|
67 |
-
model_locator = self.page.locator('::-p-aria(View details for NousResearch/Hermes-3-Llama-3.1-8B)')
|
68 |
-
elif model_name == "mistralai/Mistral-Nemo-Instruct-2407":
|
69 |
-
model_locator = self.page.locator('::-p-aria(View details for mistralai/Mistral-Nemo-Instruct-2407)')
|
70 |
-
elif model_name == "microsoft/Phi-3.5-mini-instruct":
|
71 |
-
model_locator = self.page.locator('::-p-aria(View details for microsoft/Phi-3.5-mini-instruct)')
|
72 |
-
else:
|
73 |
-
raise ValueError(f"Model '{model_name}' not found in the predefined list.")
|
74 |
-
|
75 |
-
|
76 |
-
if model_locator:
|
77 |
-
await model_locator.click()
|
78 |
-
self.current_model = model_name
|
79 |
-
else:
|
80 |
-
raise ValueError(f"Model '{model_name}' selection failed.")
|
81 |
-
|
82 |
-
async def set_system_prompt(self, system_prompt):
|
83 |
-
"""Sets the system prompt."""
|
84 |
-
await self.page.locator("div.p-3 path").click()
|
85 |
-
await self.page.locator('::-p-aria(Custom system prompt)').fill(system_prompt)
|
86 |
-
await self.page.locator('::-p-aria(New chat)').click()
|
87 |
-
|
88 |
-
|
89 |
-
async def toggle_web_search(self, enable: bool):
|
90 |
-
"""Enable or disable web search."""
|
91 |
-
await self.page.locator('::-p-aria(Search)').click() #open tools
|
92 |
-
code_executor_button = self.page.locator('::-p-aria(Code Executor)')
|
93 |
-
|
94 |
-
if enable:
|
95 |
-
# Check if already enabled
|
96 |
-
if "bg-gray-100" not in await code_executor_button.get_attribute("class"):
|
97 |
-
await code_executor_button.click() # Enable
|
98 |
-
else:
|
99 |
-
# Check if already disabled
|
100 |
-
if "bg-gray-100" in await code_executor_button.get_attribute("class"):
|
101 |
-
await code_executor_button.click() #click to open
|
102 |
-
await self.page.locator('::-p-aria(Deactivate)').click() # Disable
|
103 |
-
await self.page.locator('div:nth-of-type(2) > div > div > button path').click() #close tools
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
async def send_message(self, message: str, expect_response: bool = True, timeout: float = 60.0):
|
108 |
-
"""Sends a message and optionally waits for a response."""
|
109 |
-
|
110 |
-
self.conversation_history.append({"role": "user", "content": message})
|
111 |
-
await self.page.locator('::-p-aria(Ask anything)').fill(message)
|
112 |
-
await self.page.locator('::-p-aria(Send message)').click()
|
113 |
-
|
114 |
-
if expect_response:
|
115 |
-
try:
|
116 |
-
# Wait for a new message bubble to appear. This is a more robust way
|
117 |
-
# to detect a response than just waiting a fixed amount of time.
|
118 |
-
await self.page.locator('.group.self-end').last.wait_for(timeout=timeout * 1000) # milliseconds
|
119 |
-
response_elements = await self.page.locator('.group.self-end > .markdown').all()
|
120 |
-
response = await response_elements[-1].inner_text() # Get the last response
|
121 |
-
self.conversation_history.append({"role": "assistant", "content": response})
|
122 |
-
return response
|
123 |
-
|
124 |
-
except Exception as e:
|
125 |
-
print(f"Error waiting for response: {e}")
|
126 |
-
return None # Or raise the exception, depending on your needs
|
127 |
-
else:
|
128 |
-
return None
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
async def new_chat(self):
|
133 |
-
"""Starts a new chat."""
|
134 |
-
await self.page.locator('::-p-aria(New chat)').click()
|
135 |
-
self.conversation_history = [] # Clear history for the new chat
|
136 |
-
|
137 |
-
|
138 |
-
async def chat(self, message: str, model_name: str = None, system_prompt: str = None, web_search: bool = False, timeout: float = 60.0):
|
139 |
-
"""
|
140 |
-
Combined method for setting parameters and sending a message.
|
141 |
-
|
142 |
-
Args:
|
143 |
-
message: The user's message.
|
144 |
-
model_name: The name of the model to use.
|
145 |
-
system_prompt: The system prompt to set.
|
146 |
-
web_search: Whether to enable web search.
|
147 |
-
timeout: Timeout for waiting for a response, in seconds.
|
148 |
-
|
149 |
-
Returns:
|
150 |
-
The model's response, or None if no response is received.
|
151 |
-
"""
|
152 |
-
if model_name and model_name != self.current_model:
|
153 |
-
await self.new_chat() # Start a new chat when changing models
|
154 |
-
await self.select_model(model_name)
|
155 |
-
|
156 |
-
if system_prompt is not None: # Allow empty string "" as a valid prompt
|
157 |
-
await self.set_system_prompt(system_prompt)
|
158 |
-
|
159 |
-
if web_search is not None:
|
160 |
-
await self.toggle_web_search(web_search)
|
161 |
-
|
162 |
-
return await self.send_message(message, timeout=timeout)
|
163 |
-
|
164 |
-
|
165 |
-
async def main():
|
166 |
-
# --- Example Usage ---
|
167 |
-
chat_api = HuggingChatAPI(headless=False) # Set headless=False for debugging
|
168 |
-
try:
|
169 |
-
await chat_api.start()
|
170 |
-
|
171 |
-
# Example 1: Simple chat with default model
|
172 |
-
response1 = await chat_api.chat("What is the capital of France?")
|
173 |
-
print(f"Response 1: {response1}")
|
174 |
-
|
175 |
-
# Example 2: Chat with a specific model and system prompt
|
176 |
-
response2 = await chat_api.chat("Explain the theory of relativity.",
|
177 |
-
model_name="Qwen/Qwen2.5-72B-Instruct",
|
178 |
-
system_prompt="You are a helpful and concise assistant.")
|
179 |
-
print(f"Response 2: {response2}")
|
180 |
-
|
181 |
-
# Example 3: Enable web search
|
182 |
-
response3 = await chat_api.chat("What is the latest news on AI?",
|
183 |
-
model_name="CohereForAI/c4ai-command-r-plus-08-2024",
|
184 |
-
web_search=True)
|
185 |
-
print(f"Response 3: {response3}")
|
186 |
-
|
187 |
-
# Example 4: Continue the first conversation
|
188 |
-
response4 = await chat_api.chat("And what about Germany?") # No model/prompt change
|
189 |
-
print(f"Response 4: {response4}")
|
190 |
-
|
191 |
-
# Example 5: Get conversation history
|
192 |
-
print("\nConversation History:")
|
193 |
-
for message in chat_api.conversation_history:
|
194 |
-
print(f"- {message['role']}: {message['content']}")
|
195 |
-
|
196 |
-
|
197 |
-
finally:
|
198 |
-
await chat_api.stop()
|
199 |
-
|
200 |
-
if __name__ == "__main__":
|
201 |
-
asyncio.run(main())
|
|
|
1 |
+
FROM python:3.11-slim AS base
|
2 |
+
|
3 |
+
WORKDIR /app
|
4 |
+
|
5 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
6 |
+
wget \
|
7 |
+
unzip \
|
8 |
+
libx11-xcb1 \
|
9 |
+
libxcomposite1 \
|
10 |
+
libxcursor1 \
|
11 |
+
libxdamage1 \
|
12 |
+
libxi6 \
|
13 |
+
libxtst6 \
|
14 |
+
libnss3 \
|
15 |
+
libcups2 \
|
16 |
+
libxss1 \
|
17 |
+
libxrandr2 \
|
18 |
+
libasound2 \
|
19 |
+
libatk1.0-0 \
|
20 |
+
libatk-bridge2.0-0 \
|
21 |
+
libcairo2 \
|
22 |
+
libgdk-pixbuf2.0-0 \
|
23 |
+
libgtk-3-0 \
|
24 |
+
libpango-1.0-0 \
|
25 |
+
libpangocairo-1.0-0 \
|
26 |
+
libx11-6 \
|
27 |
+
xvfb \
|
28 |
+
&& rm -rf /var/lib/apt/lists/*
|
29 |
+
|
30 |
+
COPY . /app/
|
31 |
+
|
32 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
33 |
+
|
34 |
+
ENV PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
|
35 |
+
RUN mkdir -p $PLAYWRIGHT_BROWSERS_PATH && playwright install chromium --with-deps
|
36 |
+
|
37 |
+
RUN useradd -m -u 1000 user
|
38 |
+
RUN mkdir /data
|
39 |
+
RUN chown -R 1000:1000 /app /data
|
40 |
+
|
41 |
+
COPY entrypoint.sh /app/entrypoint.sh
|
42 |
+
RUN chmod +x /app/entrypoint.sh
|
43 |
+
|
44 |
+
USER user
|
45 |
+
ENV HOME=/home/user \
|
46 |
+
PATH=/home/user/.local/bin:$PATH
|
47 |
+
|
48 |
+
ENTRYPOINT ["xvfb-run", "/app/entrypoint.sh"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|