deepseek_r1_API / app.py
fantos's picture
Update app.py
915a848 verified
raw
history blame
5.49 kB
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader
API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")
# Initialize the Together client
@st.cache_resource
def get_client():
return Together(api_key=API_KEY)
def process_file(file) -> str:
"""Process uploaded file and return its content"""
if file is None:
return ""
try:
if file.type == "application/pdf":
text = ""
pdf_reader = PdfReader(file)
for page in pdf_reader.pages:
text += page.extract_text() + "\n"
return text
elif file.type.startswith("image/"):
return base64.b64encode(file.getvalue()).decode("utf-8")
else:
return file.getvalue().decode('utf-8')
except Exception as e:
st.error(f"파일 처리 쀑 였λ₯˜ λ°œμƒ: {str(e)}")
return ""
def generate_response(
message: str,
history: list[tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
files=None
) -> Iterator[str]:
client = get_client()
try:
# λ©”μ‹œμ§€ ν˜•μ‹ μˆ˜μ •
messages = [{"role": "system", "content": system_message}]
# νžˆμŠ€ν† λ¦¬ μΆ”κ°€
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": assistant_msg})
# ν˜„μž¬ λ©”μ‹œμ§€μ™€ 파일 λ‚΄μš© μΆ”κ°€
current_content = message
if files:
file_contents = []
for file in files:
content = process_file(file)
if content:
file_contents.append(f"파일 λ‚΄μš©:\n{content}")
if file_contents:
current_content = current_content + "\n\n" + "\n\n".join(file_contents)
messages.append({"role": "user", "content": current_content})
# API 호좜 μ‹œλ„
try:
stream = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
)
for chunk in stream:
if chunk.choices and chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
if "rate limit" in str(e).lower():
yield "μ£„μ†‘ν•©λ‹ˆλ‹€. API 호좜 ν•œλ„μ— λ„λ‹¬ν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
else:
yield f"API 호좜 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
except Exception as e:
yield f"였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
def main():
st.set_page_config(page_title="DeepSeek μ±„νŒ…", page_icon="πŸ’­", layout="wide")
if "messages" not in st.session_state:
st.session_state.messages = []
st.title("DeepSeek μ±„νŒ…")
st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λŒ€ν™”ν•˜μ„Έμš”. ν•„μš”ν•œ 경우 νŒŒμΌμ„ μ—…λ‘œλ“œν•  수 μžˆμŠ΅λ‹ˆλ‹€.")
with st.sidebar:
st.header("μ„€μ •")
system_message = st.text_area(
"μ‹œμŠ€ν…œ λ©”μ‹œμ§€",
value="당신은 깊이 있게 μƒκ°ν•˜λŠ” AIμž…λ‹ˆλ‹€. 문제λ₯Ό 깊이 κ³ λ €ν•˜κ³  체계적인 μΆ”λ‘  과정을 톡해 μ˜¬λ°”λ₯Έ 해결책을 λ„μΆœν•˜κΈ° μœ„ν•΄ 맀우 κΈ΄ 사고 체인을 μ‚¬μš©ν•  수 μžˆμŠ΅λ‹ˆλ‹€. λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•˜μ„Έμš”.",
height=100
)
max_tokens = st.slider("μ΅œλŒ€ 토큰 수", 1, 8192, 8192)
temperature = st.slider("μ˜¨λ„", 0.1, 4.0, 0.0, 0.1)
top_p = st.slider("Top-p", 0.1, 1.0, 0.95, 0.05)
uploaded_file = st.file_uploader(
"파일 μ—…λ‘œλ“œ (선택사항)",
type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
accept_multiple_files=True
)
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
if prompt := st.chat_input("무엇을 μ•Œκ³  μ‹ΆμœΌμ‹ κ°€μš”?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
with st.chat_message("assistant"):
response_placeholder = st.empty()
full_response = ""
history = [(msg["content"], next_msg["content"])
for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])]
for response_chunk in generate_response(
prompt,
history,
system_message,
max_tokens,
temperature,
top_p,
uploaded_file
):
full_response += response_chunk
response_placeholder.markdown(full_response + "β–Œ")
response_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})
if __name__ == "__main__":
main()