File size: 6,159 Bytes
21cbcb5
915a848
21cbcb5
 
d6bbfd2
 
915a848
21cbcb5
 
 
 
 
 
 
915a848
21cbcb5
 
 
 
ddc57e1
21cbcb5
d6bbfd2
 
 
 
915a848
d6bbfd2
 
 
 
 
21cbcb5
915a848
 
21cbcb5
 
 
 
 
 
 
 
c6e2c6b
21cbcb5
 
915a848
21cbcb5
ddc57e1
 
915a848
ddc57e1
 
 
 
 
 
 
 
915a848
ddc57e1
 
 
 
 
 
 
 
915a848
ddc57e1
915a848
 
 
 
 
 
 
 
 
 
ddc57e1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
915a848
ddc57e1
915a848
ddc57e1
915a848
 
ddc57e1
915a848
 
 
 
ddc57e1
915a848
ddc57e1
 
 
 
 
 
915a848
21cbcb5
915a848
 
21cbcb5
915a848
21cbcb5
 
 
 
915a848
 
21cbcb5
 
915a848
21cbcb5
915a848
ddc57e1
21cbcb5
 
ddc57e1
 
 
21cbcb5
915a848
 
d6bbfd2
21cbcb5
 
 
 
ddc57e1
21cbcb5
915a848
21cbcb5
 
ddc57e1
21cbcb5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
import streamlit as st
from together import Together
import os
from typing import Iterator
from PIL import Image
import base64
from PyPDF2 import PdfReader

API_KEY = os.getenv("TOGETHER_API_KEY")
if not API_KEY:
    raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.")

@st.cache_resource
def get_client():
    return Together(api_key=API_KEY)

def process_file(file) -> str:
    if file is None:
        return ""
    
    try:
        if file.type == "application/pdf":
            text = ""
            pdf_reader = PdfReader(file)
            for page in pdf_reader.pages:
                text += page.extract_text() + "\n"
            return text
        elif file.type.startswith("image/"):
            return base64.b64encode(file.getvalue()).decode("utf-8")
        else:
            return file.getvalue().decode('utf-8')
    except Exception as e:
        st.error(f"파일 처리 쀑 였λ₯˜ λ°œμƒ: {str(e)}")
        return ""

def generate_response(
    message: str,
    history: list[tuple[str, str]],
    system_message: str,
    max_tokens: int,
    temperature: float,
    top_p: float,
    files=None
) -> Iterator[str]:
    client = get_client()
    
    try:
        # λ©”μ‹œμ§€ λ°°μ—΄ μ΄ˆκΈ°ν™”
        messages = []
        
        # μ‹œμŠ€ν…œ λ©”μ‹œμ§€κ°€ μžˆλŠ” κ²½μš°μ—λ§Œ μΆ”κ°€
        if system_message.strip():
            messages.append({
                "role": "system",
                "content": system_message
            })
        
        # λŒ€ν™” νžˆμŠ€ν† λ¦¬ μΆ”κ°€
        for user_msg, assistant_msg in history:
            messages.append({
                "role": "user",
                "content": user_msg
            })
            messages.append({
                "role": "assistant",
                "content": assistant_msg
            })
        
        # ν˜„μž¬ λ©”μ‹œμ§€μ™€ 파일 λ‚΄μš© μ€€λΉ„
        current_content = message
        if files:
            file_contents = []
            for file in files:
                content = process_file(file)
                if content:
                    file_contents.append(f"파일 λ‚΄μš©:\n{content}")
            if file_contents:
                current_content = current_content + "\n\n" + "\n\n".join(file_contents)
        
        # ν˜„μž¬ λ©”μ‹œμ§€ μΆ”κ°€
        messages.append({
            "role": "user",
            "content": current_content
        })
        
        # API μš”μ²­ μ„€μ •
        request_params = {
            "model": "deepseek-ai/DeepSeek-R1",
            "messages": messages,
            "max_tokens": max_tokens,
            "temperature": temperature,
            "top_p": top_p,
            "stream": True
        }
        
        # API 호좜
        try:
            stream = client.chat.completions.create(**request_params)
            
            for chunk in stream:
                if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content:
                    yield chunk.choices[0].delta.content
                    
        except Exception as e:
            if "rate limit" in str(e).lower():
                yield "API 호좜 ν•œλ„μ— λ„λ‹¬ν–ˆμŠ΅λ‹ˆλ‹€. μž μ‹œ ν›„ λ‹€μ‹œ μ‹œλ„ν•΄μ£Όμ„Έμš”."
            else:
                error_message = str(e)
                # Together.ai의 였λ₯˜ 응닡 뢄석
                if "Input validation error" in error_message:
                    yield "μž…λ ₯ ν˜•μ‹μ΄ μ˜¬λ°”λ₯΄μ§€ μ•ŠμŠ΅λ‹ˆλ‹€. μ‹œμŠ€ν…œ κ΄€λ¦¬μžμ—κ²Œ λ¬Έμ˜ν•΄μ£Όμ„Έμš”."
                else:
                    yield f"API 호좜 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {error_message}"
                
    except Exception as e:
        yield f"였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"

def main():
    st.set_page_config(page_title="DeepSeek μ±„νŒ…", page_icon="πŸ’­", layout="wide")
    
    if "messages" not in st.session_state:
        st.session_state.messages = []

    st.title("DeepSeek μ±„νŒ…")
    st.markdown("DeepSeek AI λͺ¨λΈκ³Ό λŒ€ν™”ν•˜μ„Έμš”. ν•„μš”ν•œ 경우 νŒŒμΌμ„ μ—…λ‘œλ“œν•  수 μžˆμŠ΅λ‹ˆλ‹€.")

    with st.sidebar:
        st.header("μ„€μ •")
        system_message = st.text_area(
            "μ‹œμŠ€ν…œ λ©”μ‹œμ§€",
            value="당신은 깊이 있게 μƒκ°ν•˜λŠ” AIμž…λ‹ˆλ‹€. 문제λ₯Ό 깊이 κ³ λ €ν•˜κ³  체계적인 μΆ”λ‘  과정을 톡해 μ˜¬λ°”λ₯Έ 해결책을 λ„μΆœν•˜μ„Έμš”. λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ λ‹΅λ³€ν•˜μ„Έμš”.",
            height=100
        )
        max_tokens = st.slider("μ΅œλŒ€ 토큰 수", 1, 4096, 2048)  # 토큰 μ œν•œ μ‘°μ •
        temperature = st.slider("μ˜¨λ„", 0.0, 2.0, 0.7, 0.1)    # μ˜¨λ„ λ²”μœ„ μ‘°μ •
        top_p = st.slider("Top-p", 0.0, 1.0, 0.7, 0.1)        # top_p λ²”μœ„ μ‘°μ •
        uploaded_file = st.file_uploader(
            "파일 μ—…λ‘œλ“œ (선택사항)",
            type=['txt', 'py', 'md', 'pdf', 'png', 'jpg', 'jpeg'],
            accept_multiple_files=True
        )

    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.markdown(message["content"])

    if prompt := st.chat_input("무엇을 μ•Œκ³  μ‹ΆμœΌμ‹ κ°€μš”?"):
        st.session_state.messages.append({"role": "user", "content": prompt})
        with st.chat_message("user"):
            st.markdown(prompt)

        with st.chat_message("assistant"):
            response_placeholder = st.empty()
            full_response = ""
            
            history = [(msg["content"], next_msg["content"]) 
                      for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])]
            
            for response_chunk in generate_response(
                prompt,
                history,
                system_message,
                max_tokens,
                temperature,
                top_p,
                uploaded_file
            ):
                full_response += response_chunk
                response_placeholder.markdown(full_response + "β–Œ")
            
            response_placeholder.markdown(full_response)
        
        st.session_state.messages.append({"role": "assistant", "content": full_response})

if __name__ == "__main__":
    main()