Spaces:
Running
Running
File size: 6,118 Bytes
21cbcb5 915a848 21cbcb5 d6bbfd2 915a848 21cbcb5 915a848 21cbcb5 ddc57e1 21cbcb5 d6bbfd2 915a848 d6bbfd2 21cbcb5 915a848 21cbcb5 a7c8ed0 21cbcb5 c6e2c6b 21cbcb5 915a848 21cbcb5 ddc57e1 915a848 a7c8ed0 ddc57e1 a7c8ed0 915a848 ddc57e1 915a848 ddc57e1 a7c8ed0 915a848 a7c8ed0 915a848 ddc57e1 915a848 ddc57e1 915a848 a7c8ed0 915a848 21cbcb5 a7c8ed0 915a848 21cbcb5 915a848 21cbcb5 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 ddc57e1 21cbcb5 a7c8ed0 21cbcb5 915a848 d6bbfd2 21cbcb5 a7c8ed0 21cbcb5 ddc57e1 21cbcb5 a7c8ed0 915a848 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 ddc57e1 21cbcb5 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 a7c8ed0 21cbcb5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader
API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")
@st.cache_resource
def get_client():
return Together(api_key=API_KEY)
def process_file(file) -> str:
if file is None:
return ""
try:
if file.type == "application/pdf":
text = ""
pdf_reader = PdfReader(file)
for page in pdf_reader.pages:
text += page.extract_text() + "\n"
return text
elif file.type.startswith("image/"):
return base64.b64encode(file.getvalue()).decode("utf-8")
else:
return file.getvalue().decode('utf-8')
except Exception as e:
st.error(f"νμΌ μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}")
return ""
def generate_response(
message: str,
history: list[dict], # νμ€ν 리 νμ λ³κ²½
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
files=None
) -> Iterator[str]:
client = get_client()
try:
# λ©μμ§ λ°°μ΄ μ΄κΈ°ν
messages = []
# μμ€ν
λ©μμ§ μΆκ°
if system_message.strip():
messages.append({
"role": "system",
"content": system_message
})
# λν νμ€ν 리 μΆκ° - νμ μμ
messages.extend(history)
# νμ¬ λ©μμ§μ νμΌ λ΄μ© μ€λΉ
current_content = message
if files:
file_contents = []
for file in files:
content = process_file(file)
if content:
file_contents.append(f"νμΌ λ΄μ©:\n{content}")
if file_contents:
current_content = current_content + "\n\n" + "\n\n".join(file_contents)
# νμ¬ λ©μμ§ μΆκ°
messages.append({
"role": "user",
"content": current_content
})
# API μμ²
try:
stream = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
)
for chunk in stream:
if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
if "rate limit" in str(e).lower():
yield "API νΈμΆ νλμ λλ¬νμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ."
else:
st.error(f"API μ€λ₯ μμΈ: {str(e)}") # λλ²κΉ
μ μν μ€λ₯ μΆλ ₯
yield "μ£μ‘ν©λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ."
except Exception as e:
st.error(f"μ 체 μ€λ₯ μμΈ: {str(e)}") # λλ²κΉ
μ μν μ€λ₯ μΆλ ₯
yield "μ€λ₯κ° λ°μνμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ."
def main():
st.set_page_config(page_title="DeepSeek μ±ν
", page_icon="π", layout="wide")
# μΈμ
μν μ΄κΈ°ν
if "messages" not in st.session_state:
st.session_state.messages = []
if "conversation_history" not in st.session_state: # μλ‘μ΄ λν νμ€ν 리 μ μ₯μ
st.session_state.conversation_history = []
st.title("DeepSeek μ±ν
")
st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λννμΈμ. νμν κ²½μ° νμΌμ μ
λ‘λν μ μμ΅λλ€.")
with st.sidebar:
st.header("μ€μ ")
system_message = st.text_area(
"μμ€ν
λ©μμ§",
value="λΉμ μ κΉμ΄ μκ² μκ°νλ AIμ
λλ€. λ¬Έμ λ₯Ό κΉμ΄ κ³ λ €νκ³ μ²΄κ³μ μΈ μΆλ‘ κ³Όμ μ ν΅ν΄ μ¬λ°λ₯Έ ν΄κ²°μ±
μ λμΆνμΈμ. λ°λμ νκΈλ‘ λ΅λ³νμΈμ.",
height=100
)
max_tokens = st.slider("μ΅λ ν ν° μ", 1, 4096, 2048)
temperature = st.slider("μ¨λ", 0.0, 2.0, 0.7, 0.1)
top_p = st.slider("Top-p", 0.0, 1.0, 0.7, 0.1)
uploaded_file = st.file_uploader(
"νμΌ μ
λ‘λ (μ νμ¬ν)",
type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
accept_multiple_files=True
)
# λ©μμ§ νμ
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# μ±ν
μ
λ ₯
if prompt := st.chat_input("무μμ μκ³ μΆμΌμ κ°μ?"):
# μ¬μ©μ λ©μμ§ μΆκ°
st.session_state.messages.append({"role": "user", "content": prompt})
st.session_state.conversation_history.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# μ΄μμ€ν΄νΈ μλ΅ μμ±
with st.chat_message("assistant"):
response_placeholder = st.empty()
full_response = ""
# generate_response νΈμΆ
for response_chunk in generate_response(
prompt,
st.session_state.conversation_history, # μμ λ νμ€ν 리 μ λ¬
system_message,
max_tokens,
temperature,
top_p,
uploaded_file
):
full_response += response_chunk
response_placeholder.markdown(full_response + "β")
response_placeholder.markdown(full_response)
# μλ΅ μ μ₯
st.session_state.messages.append({"role": "assistant", "content": full_response})
st.session_state.conversation_history.append({"role": "assistant", "content": full_response})
if __name__ == "__main__":
main() |