Spaces:
Running
Running
File size: 6,159 Bytes
21cbcb5 915a848 21cbcb5 d6bbfd2 915a848 21cbcb5 915a848 21cbcb5 ddc57e1 21cbcb5 d6bbfd2 915a848 d6bbfd2 21cbcb5 915a848 21cbcb5 c6e2c6b 21cbcb5 915a848 21cbcb5 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 ddc57e1 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 ddc57e1 21cbcb5 ddc57e1 21cbcb5 915a848 d6bbfd2 21cbcb5 ddc57e1 21cbcb5 915a848 21cbcb5 ddc57e1 21cbcb5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader
API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")
@st.cache_resource
def get_client():
return Together(api_key=API_KEY)
def process_file(file) -> str:
if file is None:
return ""
try:
if file.type == "application/pdf":
text = ""
pdf_reader = PdfReader(file)
for page in pdf_reader.pages:
text += page.extract_text() + "\n"
return text
elif file.type.startswith("image/"):
return base64.b64encode(file.getvalue()).decode("utf-8")
else:
return file.getvalue().decode('utf-8')
except Exception as e:
st.error(f"νμΌ μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}")
return ""
def generate_response(
message: str,
history: list[tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
files=None
) -> Iterator[str]:
client = get_client()
try:
# λ©μμ§ λ°°μ΄ μ΄κΈ°ν
messages = []
# μμ€ν
λ©μμ§κ° μλ κ²½μ°μλ§ μΆκ°
if system_message.strip():
messages.append({
"role": "system",
"content": system_message
})
# λν νμ€ν 리 μΆκ°
for user_msg, assistant_msg in history:
messages.append({
"role": "user",
"content": user_msg
})
messages.append({
"role": "assistant",
"content": assistant_msg
})
# νμ¬ λ©μμ§μ νμΌ λ΄μ© μ€λΉ
current_content = message
if files:
file_contents = []
for file in files:
content = process_file(file)
if content:
file_contents.append(f"νμΌ λ΄μ©:\n{content}")
if file_contents:
current_content = current_content + "\n\n" + "\n\n".join(file_contents)
# νμ¬ λ©μμ§ μΆκ°
messages.append({
"role": "user",
"content": current_content
})
# API μμ² μ€μ
request_params = {
"model": "deepseek-ai/DeepSeek-R1",
"messages": messages,
"max_tokens": max_tokens,
"temperature": temperature,
"top_p": top_p,
"stream": True
}
# API νΈμΆ
try:
stream = client.chat.completions.create(**request_params)
for chunk in stream:
if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
if "rate limit" in str(e).lower():
yield "API νΈμΆ νλμ λλ¬νμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ."
else:
error_message = str(e)
# Together.aiμ μ€λ₯ μλ΅ λΆμ
if "Input validation error" in error_message:
yield "μ
λ ₯ νμμ΄ μ¬λ°λ₯΄μ§ μμ΅λλ€. μμ€ν
κ΄λ¦¬μμκ² λ¬Έμν΄μ£ΌμΈμ."
else:
yield f"API νΈμΆ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {error_message}"
except Exception as e:
yield f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
def main():
st.set_page_config(page_title="DeepSeek μ±ν
", page_icon="π", layout="wide")
if "messages" not in st.session_state:
st.session_state.messages = []
st.title("DeepSeek μ±ν
")
st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λννμΈμ. νμν κ²½μ° νμΌμ μ
λ‘λν μ μμ΅λλ€.")
with st.sidebar:
st.header("μ€μ ")
system_message = st.text_area(
"μμ€ν
λ©μμ§",
value="λΉμ μ κΉμ΄ μκ² μκ°νλ AIμ
λλ€. λ¬Έμ λ₯Ό κΉμ΄ κ³ λ €νκ³ μ²΄κ³μ μΈ μΆλ‘ κ³Όμ μ ν΅ν΄ μ¬λ°λ₯Έ ν΄κ²°μ±
μ λμΆνμΈμ. λ°λμ νκΈλ‘ λ΅λ³νμΈμ.",
height=100
)
max_tokens = st.slider("μ΅λ ν ν° μ", 1, 4096, 2048) # ν ν° μ ν μ‘°μ
temperature = st.slider("μ¨λ", 0.0, 2.0, 0.7, 0.1) # μ¨λ λ²μ μ‘°μ
top_p = st.slider("Top-p", 0.0, 1.0, 0.7, 0.1) # top_p λ²μ μ‘°μ
uploaded_file = st.file_uploader(
"νμΌ μ
λ‘λ (μ νμ¬ν)",
type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
accept_multiple_files=True
)
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("무μμ μκ³ μΆμΌμ κ°μ?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
response_placeholder = st.empty()
full_response = ""
history = [(msg["content"], next_msg["content"])
for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])]
for response_chunk in generate_response(
prompt,
history,
system_message,
max_tokens,
temperature,
top_p,
uploaded_file
):
full_response += response_chunk
response_placeholder.markdown(full_response + "β")
response_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})
if __name__ == "__main__":
main() |