File size: 10,213 Bytes
0085f6d 6ab8a0e 0085f6d f1158e0 0085f6d 6ab8a0e 0085f6d 6ab8a0e 0085f6d b62e354 8c7f4ad 0085f6d 8c7f4ad 0085f6d 8c7f4ad 0085f6d f1158e0 6ab8a0e f1158e0 0085f6d 6ab8a0e 0085f6d b62e354 0085f6d 8c7f4ad 0085f6d b62e354 0085f6d 8c7f4ad b62e354 0085f6d b62e354 0085f6d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 |
import os
import re
from http import HTTPStatus
from typing import Dict, List, Optional, Tuple
import base64
import anthropic
from functools import partial
import gradio as gr
import modelscope_studio.components.base as ms
import modelscope_studio.components.legacy as legacy
import modelscope_studio.components.antd as antd
from config import DEMO_LIST, SystemPrompt
# ํ์ผ ์๋จ์ import ๋ฌธ ์๋์ ์ถ๊ฐ
def get_image_base64(image_path):
with open(image_path, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read()).decode()
return encoded_string
YOUR_API_TOKEN = os.getenv('ANTHROPIC_API_KEY')
client = anthropic.Anthropic(api_key=YOUR_API_TOKEN)
class Role:
SYSTEM = "system"
USER = "user"
ASSISTANT = "assistant"
History = List[Tuple[str, str]]
Messages = List[Dict[str, str]]
def history_to_messages(history: History, system: str) -> Messages:
messages = [{'role': Role.SYSTEM, 'content': system}]
for h in history:
messages.append({'role': Role.USER, 'content': h[0]})
messages.append({'role': Role.ASSISTANT, 'content': h[1]})
return messages
def messages_to_history(messages: Messages) -> History:
assert messages[0]['role'] == Role.SYSTEM
history = []
for q, r in zip(messages[1::2], messages[2::2]):
history.append([q['content'], r['content']])
return history
def remove_code_block(text):
pattern = r'```html\n(.+?)\n```'
match = re.search(pattern, text, re.DOTALL)
if match:
return match.group(1).strip()
else:
return text.strip()
def history_render(history: History):
return gr.update(open=True), history
def clear_history():
return []
def send_to_sandbox(code):
encoded_html = base64.b64encode(code.encode('utf-8')).decode('utf-8')
data_uri = f"data:text/html;charset=utf-8;base64,{encoded_html}"
return f"<iframe src=\"{data_uri}\" width=\"100%\" height=\"920px\"></iframe>"
DEMO_CACHE = {}
def demo_card_click(e: gr.EventData):
index = e._data['component']['index']
if index not in DEMO_CACHE:
DEMO_CACHE[index] = DEMO_LIST[index]['description']
return DEMO_CACHE[index]
with gr.Blocks(css_paths="app.css") as demo:
history = gr.State([])
setting = gr.State({
"system": SystemPrompt,
})
def generation_code(query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
if query is None:
query = ''
if _history is None:
_history = []
messages = history_to_messages(_history, _setting['system'])
system_message = messages[0]['content']
claude_messages = [
{"role": msg["role"] if msg["role"] != "system" else "user", "content": msg["content"]}
for msg in messages[1:] + [{'role': Role.USER, 'content': query}]
]
try:
# ์ค๊ฐ ์ํ ํ์
yield [
"Generating code...", # code_output
_history, # history
None, # sandbox
gr.update(active_key="loading"), # state_tab
gr.update(open=True) # code_drawer
]
# ์คํธ๋ฆฌ๋ฐ ์๋ต ์ฌ์ฉ
with client.messages.stream(
model="claude-3-5-sonnet-20241022",
max_tokens=7800,
system=system_message,
messages=claude_messages
) as stream:
collected_content = ""
for chunk in stream:
if chunk.type == "content_block_delta":
delta = chunk.delta.text
collected_content += delta
yield [
collected_content, # code_output
_history, # history
None, # sandbox (์์ง ์์ฑ๋์ง ์์์ผ๋ฏ๋ก None)
gr.update(active_key="loading"), # state_tab
gr.update(open=True) # code_drawer
]
# ์ต์ข
๊ฒฐ๊ณผ ๋ฐํ
_history = messages_to_history([
{'role': Role.SYSTEM, 'content': system_message}
] + claude_messages + [{
'role': Role.ASSISTANT,
'content': collected_content
}])
yield [
collected_content, # code_output
_history, # history
send_to_sandbox(remove_code_block(collected_content)), # sandbox
gr.update(active_key="render"), # state_tab
gr.update(open=True) # code_drawer
]
except Exception as e:
raise ValueError(f'Error calling Claude API: {str(e)}')
with ms.Application() as app:
with antd.ConfigProvider():
# ๋ฉ์ธ ์ปจํ
์ธ ๋ฅผ ์ํ Row
with antd.Row(gutter=[32, 12]) as layout:
# ์ข์ธก ํจ๋
with antd.Col(span=24, md=8):
with antd.Flex(vertical=True, gap="middle", wrap=True):
header = gr.HTML(f"""
<div class="left_header">
<img src="data:image/gif;base64,{get_image_base64('mouse.gif')}" width="360px" />
<h1 style="font-size: 20px;">AI ์ฝ๋ฉ ์ฝํ์ผ๋ฟ: MOUSE(WEB)</h2>
</div>
""")
input = antd.InputTextarea(
size="large",
allow_clear=True,
placeholder="Please enter what kind of application you want"
)
btn = antd.Button("send", type="primary", size="large")
clear_btn = antd.Button("clear history", type="default", size="large")
# Modal๊ณผ Drawer ์ปดํฌ๋ํธ๋ค
with antd.Modal(open=False, title="set system Prompt", width="800px") as system_prompt_modal:
systemPromptInput = antd.InputTextarea(
SystemPrompt, auto_size=True)
with antd.Drawer(open=False, title="code", placement="left", width="750px") as code_drawer:
code_output = legacy.Markdown()
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
# ์ฐ์ธก ํจ๋
with antd.Col(span=24, md=16):
with ms.Div(elem_classes="right_panel"):
with antd.Flex(gap="small", elem_classes="setting-buttons"):
settingPromptBtn = antd.Button(
"โ๏ธ set system Prompt", type="default")
codeBtn = antd.Button("๐งโ๐ป view code", type="default")
historyBtn = antd.Button("๐ history", type="default")
gr.HTML('<div class="render_header"><span class="header_btn"></span><span class="header_btn"></span><span class="header_btn"></span></div>')
with antd.Tabs(active_key="empty", render_tab_bar="() => null") as state_tab:
with antd.Tabs.Item(key="empty"):
empty = antd.Empty(description="empty input", elem_classes="right_content")
with antd.Tabs.Item(key="loading"):
loading = antd.Spin(True, tip="coding...", size="large", elem_classes="right_content")
with antd.Tabs.Item(key="render"):
sandbox = gr.HTML(elem_classes="html_content")
# Examples ์น์
์ ๋ณ๋์ Row๋ก ์ถ๊ฐ (์ ์ฒด ํ๋ฉด ํ๋จ)
with antd.Row(gutter=[0, 24], elem_classes="examples-section"):
with antd.Col(span=24):
antd.Divider("examples")
with antd.Row(gutter=[16, 16]):
with ms.Each(DEMO_LIST):
with antd.Col(span=8): # ํ ์ค์ 3๊ฐ์ฉ (24/3 = 8)
with antd.Card(hoverable=True, as_item="card") as demoCard:
antd.CardMeta()
demoCard.click(demo_card_click, outputs=[input])
# ๋ฒํผ ์ด๋ฒคํธ ํธ๋ค๋ฌ
settingPromptBtn.click(lambda: gr.update(
open=True), inputs=[], outputs=[system_prompt_modal])
system_prompt_modal.ok(lambda input: ({"system": input}, gr.update(
open=False)), inputs=[systemPromptInput], outputs=[setting, system_prompt_modal])
system_prompt_modal.cancel(lambda: gr.update(
open=False), outputs=[system_prompt_modal])
codeBtn.click(lambda: gr.update(open=True),
inputs=[], outputs=[code_drawer])
code_drawer.close(lambda: gr.update(
open=False), inputs=[], outputs=[code_drawer])
historyBtn.click(history_render, inputs=[history], outputs=[history_drawer, history_output])
history_drawer.close(lambda: gr.update(
open=False), inputs=[], outputs=[history_drawer])
btn.click(
generation_code,
inputs=[input, setting, history],
outputs=[code_output, history, sandbox, state_tab, code_drawer]
)
clear_btn.click(clear_history, inputs=[], outputs=[history])
if __name__ == "__main__":
demo.queue(default_concurrency_limit=20).launch(ssr_mode=False) |