Spaces:
Running
Running
File size: 5,487 Bytes
21cbcb5 915a848 21cbcb5 d6bbfd2 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 d6bbfd2 21cbcb5 d6bbfd2 915a848 d6bbfd2 21cbcb5 915a848 21cbcb5 c6e2c6b 21cbcb5 915a848 21cbcb5 915a848 d6bbfd2 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 21cbcb5 915a848 d6bbfd2 21cbcb5 915a848 21cbcb5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 |
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader
API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")
# Initialize the Together client
@st.cache_resource
def get_client():
return Together(api_key=API_KEY)
def process_file(file) -> str:
"""Process uploaded file and return its content"""
if file is None:
return ""
try:
if file.type == "application/pdf":
text = ""
pdf_reader = PdfReader(file)
for page in pdf_reader.pages:
text += page.extract_text() + "\n"
return text
elif file.type.startswith("image/"):
return base64.b64encode(file.getvalue()).decode("utf-8")
else:
return file.getvalue().decode('utf-8')
except Exception as e:
st.error(f"νμΌ μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}")
return ""
def generate_response(
message: str,
history: list[tuple[str, str]],
system_message: str,
max_tokens: int,
temperature: float,
top_p: float,
files=None
) -> Iterator[str]:
client = get_client()
try:
# λ©μμ§ νμ μμ
messages = [{"role": "system", "content": system_message}]
# νμ€ν 리 μΆκ°
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": assistant_msg})
# νμ¬ λ©μμ§μ νμΌ λ΄μ© μΆκ°
current_content = message
if files:
file_contents = []
for file in files:
content = process_file(file)
if content:
file_contents.append(f"νμΌ λ΄μ©:\n{content}")
if file_contents:
current_content = current_content + "\n\n" + "\n\n".join(file_contents)
messages.append({"role": "user", "content": current_content})
# API νΈμΆ μλ
try:
stream = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
)
for chunk in stream:
if chunk.choices and chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
if "rate limit" in str(e).lower():
yield "μ£μ‘ν©λλ€. API νΈμΆ νλμ λλ¬νμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ."
else:
yield f"API νΈμΆ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
except Exception as e:
yield f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
def main():
st.set_page_config(page_title="DeepSeek μ±ν
", page_icon="π", layout="wide")
if "messages" not in st.session_state:
st.session_state.messages = []
st.title("DeepSeek μ±ν
")
st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λννμΈμ. νμν κ²½μ° νμΌμ μ
λ‘λν μ μμ΅λλ€.")
with st.sidebar:
st.header("μ€μ ")
system_message = st.text_area(
"μμ€ν
λ©μμ§",
value="λΉμ μ κΉμ΄ μκ² μκ°νλ AIμ
λλ€. λ¬Έμ λ₯Ό κΉμ΄ κ³ λ €νκ³ μ²΄κ³μ μΈ μΆλ‘ κ³Όμ μ ν΅ν΄ μ¬λ°λ₯Έ ν΄κ²°μ±
μ λμΆνκΈ° μν΄ λ§€μ° κΈ΄ μ¬κ³ 체μΈμ μ¬μ©ν μ μμ΅λλ€. λ°λμ νκΈλ‘ λ΅λ³νμΈμ.",
height=100
)
max_tokens = st.slider("μ΅λ ν ν° μ", 1, 8192, 8192)
temperature = st.slider("μ¨λ", 0.1, 4.0, 0.0, 0.1)
top_p = st.slider("Top-p", 0.1, 1.0, 0.95, 0.05)
uploaded_file = st.file_uploader(
"νμΌ μ
λ‘λ (μ νμ¬ν)",
type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
accept_multiple_files=True
)
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
if prompt := st.chat_input("무μμ μκ³ μΆμΌμ κ°μ?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
with st.chat_message("assistant"):
response_placeholder = st.empty()
full_response = ""
history = [(msg["content"], next_msg["content"])
for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])]
for response_chunk in generate_response(
prompt,
history,
system_message,
max_tokens,
temperature,
top_p,
uploaded_file
):
full_response += response_chunk
response_placeholder.markdown(full_response + "β")
response_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})
if __name__ == "__main__":
main() |