File size: 5,487 Bytes
21cbcb5
915a848
21cbcb5
 
d6bbfd2
 
915a848
21cbcb5
 
 
 
 
915a848
21cbcb5
 
915a848
21cbcb5
 
 
 
 
d6bbfd2
21cbcb5
d6bbfd2
 
 
 
915a848
d6bbfd2
 
 
 
 
21cbcb5
915a848
 
21cbcb5
 
 
 
 
 
 
 
c6e2c6b
21cbcb5
 
915a848
21cbcb5
915a848
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d6bbfd2
 
 
 
 
 
 
 
915a848
 
 
 
 
 
 
 
 
 
 
21cbcb5
915a848
 
21cbcb5
915a848
21cbcb5
 
 
 
915a848
 
21cbcb5
 
915a848
21cbcb5
915a848
 
21cbcb5
 
915a848
 
 
21cbcb5
915a848
 
d6bbfd2
21cbcb5
 
 
 
 
 
915a848
21cbcb5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader

API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
    raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")

# Initialize the Together client
@st.cache_resource
def get_client():
    return Together(api_key=API_KEY)

def process_file(file) -> str:
    """Process uploaded file and return its content"""
    if file is None:
        return ""

    try:
        if file.type == "application/pdf":
            text = ""
            pdf_reader = PdfReader(file)
            for page in pdf_reader.pages:
                text += page.extract_text() + "\n"
            return text
        elif file.type.startswith("image/"):
            return base64.b64encode(file.getvalue()).decode("utf-8")
        else:
            return file.getvalue().decode('utf-8')
    except Exception as e:
        st.error(f"파일 처리 쀑 였λ₯˜ λ°œμƒ: {str(e)}")
        return ""

def generate_response(
    message: str,
    history: list[tuple[str, str]],
    system_message: str,
    max_tokens: int,
    temperature: float,
    top_p: float,
    files=None
) -> Iterator[str]:
    client = get_client()
    
    try:
        # λ©”μ‹œμ§€ ν˜•μ‹ μˆ˜μ •
        messages = [{"role": "system", "content": system_message}]
        
        # νžˆμŠ€ν† λ¦¬ μΆ”κ°€
        for user_msg, assistant_msg in history:
            messages.append({"role": "user", "content": user_msg})
            messages.append({"role": "assistant", "content": assistant_msg})
        
        # ν˜„μž¬ λ©”μ‹œμ§€μ™€ 파일 λ‚΄μš© μΆ”κ°€
        current_content = message
        if files:
            file_contents = []
            for file in files:
                content = process_file(file)
                if content:
                    file_contents.append(f"파일 λ‚΄μš©:\n{content}")
            if file_contents:
                current_content = current_content + "\n\n" + "\n\n".join(file_contents)
        
        messages.append({"role": "user", "content": current_content})
        
        # API 호좜 μ‹œλ„
        try:
            stream = client.chat.completions.create(
                model="deepseek-ai/DeepSeek-R1",
                messages=messages,
                max_tokens=max_tokens,
                temperature=temperature,
                top_p=top_p,
                stream=True
            )
            
            for chunk in stream:
                if chunk.choices and chunk.choices[0].delta.content:
                    yield chunk.choices[0].delta.content
                    
        except Exception as e:
            if "rate limit" in str(e).lower():
                yield "μ£„μ†‘ν•©λ‹ˆλ‹€. API 호좜 ν•œλ„μ— λ„λ‹¬ν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
            else:
                yield f"API 호좜 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
                
    except Exception as e:
        yield f"였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"

def main():
    st.set_page_config(page_title="DeepSeek μ±„νŒ…", page_icon="πŸ’­", layout="wide")
    
    if "messages" not in st.session_state:
        st.session_state.messages = []

    st.title("DeepSeek μ±„νŒ…")
    st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λŒ€ν™”ν•˜μ„Έμš”. ν•„μš”ν•œ 경우 νŒŒμΌμ„ μ—…λ‘œλ“œν•  수 μžˆμŠ΅λ‹ˆλ‹€.")

    with st.sidebar:
        st.header("μ„€μ •")
        system_message = st.text_area(
            "μ‹œμŠ€ν…œ λ©”μ‹œμ§€",
            value="당신은 깊이 있게 μƒκ°ν•˜λŠ” AIμž…λ‹ˆλ‹€. 문제λ₯Ό 깊이 κ³ λ €ν•˜κ³  체계적인 μΆ”λ‘  과정을 톡해 μ˜¬λ°”λ₯Έ 해결책을 λ„μΆœν•˜κΈ° μœ„ν•΄ 맀우 κΈ΄ 사고 체인을 μ‚¬μš©ν•  수 μžˆμŠ΅λ‹ˆλ‹€. λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•˜μ„Έμš”.",
            height=100
        )
        max_tokens = st.slider("μ΅œλŒ€ 토큰 수", 1, 8192, 8192)
        temperature = st.slider("μ˜¨λ„", 0.1, 4.0, 0.0, 0.1)
        top_p = st.slider("Top-p", 0.1, 1.0, 0.95, 0.05)
        uploaded_file = st.file_uploader(
            "파일 μ—…λ‘œλ“œ (선택사항)",
            type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
            accept_multiple_files=True
        )

    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.write(message["content"])

    if prompt := st.chat_input("무엇을 μ•Œκ³  μ‹ΆμœΌμ‹ κ°€μš”?"):
        st.session_state.messages.append({"role": "user", "content": prompt})
        with st.chat_message("user"):
            st.write(prompt)

        with st.chat_message("assistant"):
            response_placeholder = st.empty()
            full_response = ""
            
            history = [(msg["content"], next_msg["content"]) 
                      for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])]
            
            for response_chunk in generate_response(
                prompt,
                history,
                system_message,
                max_tokens,
                temperature,
                top_p,
                uploaded_file
            ):
                full_response += response_chunk
                response_placeholder.markdown(full_response + "β–Œ")
            
            response_placeholder.markdown(full_response)
        
        st.session_state.messages.append({"role": "assistant", "content": full_response})

if __name__ == "__main__":
    main()