Spaces:
				
			
			
	
			
			
		Build error
		
	
	
	
			
			
	
	
	
	
		
		
		Build error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | 
         @@ -96,169 +96,70 @@ def format_history(history): 
     | 
|
| 96 | 
         
             
                        formatted_history.append({"role": "assistant", "content": assistant_msg})
         
     | 
| 97 | 
         
             
                return formatted_history
         
     | 
| 98 | 
         | 
| 
         | 
|
| 99 | 
         
             
            def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
         
     | 
| 100 | 
         
            -
                system_prefix = """ 
     | 
| 101 | 
         
            -
            1. π Overall structure and composition
         
     | 
| 102 | 
         
            -
            2. π Key content and pattern analysis
         
     | 
| 103 | 
         
            -
            3. π Data characteristics and meaning
         
     | 
| 104 | 
         
            -
               - For datasets: Column meanings, data types, value distributions
         
     | 
| 105 | 
         
            -
               - For text/code: Structural features, main patterns
         
     | 
| 106 | 
         
            -
            4. π‘ Potential applications
         
     | 
| 107 | 
         
            -
            5. β¨ Data quality and areas for improvement
         
     | 
| 108 | 
         | 
| 109 | 
         
            -
             
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 110 | 
         | 
| 111 | 
         
            -
             
     | 
| 112 | 
         
            -
             
     | 
| 113 | 
         
            -
                    if file_type == "error":
         
     | 
| 114 | 
         
            -
                        return "", [{"role": "user", "content": message}, {"role": "assistant", "content": content}]
         
     | 
| 115 | 
         
            -
                    
         
     | 
| 116 | 
         
            -
                    file_summary = analyze_file_content(content, file_type)
         
     | 
| 117 | 
         
            -
                    
         
     | 
| 118 | 
         
            -
                    if file_type in ['parquet', 'csv']:
         
     | 
| 119 | 
         
            -
                        system_message += f"\n\nFile Content:\n```markdown\n{content}\n```"
         
     | 
| 120 | 
         
            -
                    else:
         
     | 
| 121 | 
         
            -
                        system_message += f"\n\nFile Content:\n```\n{content}\n```"
         
     | 
| 122 | 
         
            -
                        
         
     | 
| 123 | 
         
            -
                    if message == "Starting file analysis...":
         
     | 
| 124 | 
         
            -
                        message = f"""[Structure Analysis] {file_summary}
         
     | 
| 125 | 
         
            -
             
     | 
| 126 | 
         
            -
            Please provide detailed analysis from these perspectives:
         
     | 
| 127 | 
         
            -
            1. π Overall file structure and format
         
     | 
| 128 | 
         
            -
            2. π Key content and component analysis
         
     | 
| 129 | 
         
            -
            3. π Data/content characteristics and patterns
         
     | 
| 130 | 
         
            -
            4. β Quality and completeness evaluation
         
     | 
| 131 | 
         
            -
            5. π‘ Suggested improvements
         
     | 
| 132 | 
         
            -
            6. π― Practical applications and recommendations"""
         
     | 
| 133 | 
         | 
| 134 | 
         
            -
             
     | 
| 135 | 
         
            -
             
     | 
| 136 | 
         
            -
                # Convert history to message format
         
     | 
| 137 | 
         
            -
                if history is not None:
         
     | 
| 138 | 
         
            -
                    for item in history:
         
     | 
| 139 | 
         
            -
                        if isinstance(item, dict):
         
     | 
| 140 | 
         
            -
                            messages.append(item)
         
     | 
| 141 | 
         
            -
                        elif isinstance(item, (list, tuple)) and len(item) == 2:
         
     | 
| 142 | 
         
            -
                            messages.append({"role": "user", "content": item[0]})
         
     | 
| 143 | 
         
            -
                            if item[1]:
         
     | 
| 144 | 
         
            -
                                messages.append({"role": "assistant", "content": item[1]})
         
     | 
| 145 | 
         
            -
             
     | 
| 146 | 
         
            -
                messages.append({"role": "user", "content": message})
         
     | 
| 147 | 
         
            -
             
     | 
| 148 | 
         
            -
                try:
         
     | 
| 149 | 
         
            -
                    client = get_client()
         
     | 
| 150 | 
         
            -
                    partial_message = ""
         
     | 
| 151 | 
         
            -
                    current_history = []
         
     | 
| 152 | 
         
            -
                    
         
     | 
| 153 | 
         
            -
                    for msg in client.chat_completion(
         
     | 
| 154 | 
         
            -
                        messages,
         
     | 
| 155 | 
         
            -
                        max_tokens=max_tokens,
         
     | 
| 156 | 
         
            -
                        stream=True,
         
     | 
| 157 | 
         
            -
                        temperature=temperature,
         
     | 
| 158 | 
         
            -
                        top_p=top_p,
         
     | 
| 159 | 
         
            -
                    ):
         
     | 
| 160 | 
         
            -
                        token = msg.choices[0].delta.get('content', None)
         
     | 
| 161 | 
         
            -
                        if token:
         
     | 
| 162 | 
         
            -
                            partial_message += token
         
     | 
| 163 | 
         
            -
                            current_history = [
         
     | 
| 164 | 
         
            -
                                {"role": "user", "content": message},
         
     | 
| 165 | 
         
            -
                                {"role": "assistant", "content": partial_message}
         
     | 
| 166 | 
         
            -
                            ]
         
     | 
| 167 | 
         
            -
                            yield "", current_history
         
     | 
| 168 | 
         
            -
                            
         
     | 
| 169 | 
         
            -
                except Exception as e:
         
     | 
| 170 | 
         
            -
                    error_msg = f"β Inference error: {str(e)}"
         
     | 
| 171 | 
         
            -
                    error_history = [
         
     | 
| 172 | 
         
            -
                        {"role": "user", "content": message},
         
     | 
| 173 | 
         
            -
                        {"role": "assistant", "content": error_msg}
         
     | 
| 174 | 
         
            -
                    ]
         
     | 
| 175 | 
         
            -
                    yield "", error_history
         
     | 
| 176 | 
         
            -
             
     | 
| 177 | 
         
            -
            css = """
         
     | 
| 178 | 
         
            -
            footer {visibility: hidden}
         
     | 
| 179 | 
         
            -
            """
         
     | 
| 180 | 
         
            -
             
     | 
| 181 | 
         
            -
            with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="EveryChat π€") as demo:
         
     | 
| 182 | 
         
             
                gr.HTML(
         
     | 
| 183 | 
         
             
                    """
         
     | 
| 184 | 
         
             
                    <div style="text-align: center; max-width: 800px; margin: 0 auto;">
         
     | 
| 185 | 
         
            -
                        <h1 style="font-size: 3em; font-weight: 600; margin: 0.5em;"> 
     | 
| 186 | 
         
            -
                        <h3 style="font-size: 1.2em; margin: 1em;" 
     | 
| 187 | 
         
             
                    </div>
         
     | 
| 188 | 
         
             
                    """
         
     | 
| 189 | 
         
             
                )
         
     | 
| 190 | 
         
            -
             
     | 
| 191 | 
         
             
                with gr.Row():
         
     | 
| 192 | 
         
             
                    with gr.Column(scale=2):
         
     | 
| 193 | 
         
             
                        chatbot = gr.Chatbot(
         
     | 
| 194 | 
         
             
                            height=600, 
         
     | 
| 195 | 
         
            -
                            label=" 
     | 
| 196 | 
         
             
                            type="messages"
         
     | 
| 197 | 
         
             
                        )
         
     | 
| 198 | 
         
             
                        msg = gr.Textbox(
         
     | 
| 199 | 
         
            -
                            label=" 
     | 
| 200 | 
         
             
                            show_label=False,
         
     | 
| 201 | 
         
            -
                            placeholder=" 
     | 
| 202 | 
         
             
                            container=False
         
     | 
| 203 | 
         
             
                        )
         
     | 
| 204 | 
         
             
                        with gr.Row():
         
     | 
| 205 | 
         
            -
                            clear = gr.ClearButton([msg, chatbot])
         
     | 
| 206 | 
         
            -
                            send = gr.Button(" 
     | 
| 207 | 
         | 
| 208 | 
         
             
                    with gr.Column(scale=1):
         
     | 
| 209 | 
         
            -
                        gr.Markdown("###  
     | 
| 210 | 
         
             
                        file_upload = gr.File(
         
     | 
| 211 | 
         
            -
                            label=" 
     | 
| 212 | 
         
             
                            file_types=["text", ".csv", ".parquet"],
         
     | 
| 213 | 
         
             
                            type="filepath"
         
     | 
| 214 | 
         
             
                        )
         
     | 
| 215 | 
         | 
| 216 | 
         
            -
                        with gr.Accordion(" 
     | 
| 217 | 
         
            -
                            system_message = gr.Textbox(label=" 
     | 
| 218 | 
         
            -
                            max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label=" 
     | 
| 219 | 
         
            -
                            temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label=" 
     | 
| 220 | 
         
            -
                            top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label=" 
     | 
| 221 | 
         
            -
             
     | 
| 222 | 
         
            -
                # Event bindings
         
     | 
| 223 | 
         
            -
                msg.submit(
         
     | 
| 224 | 
         
            -
                    chat,
         
     | 
| 225 | 
         
            -
                    inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
         
     | 
| 226 | 
         
            -
                    outputs=[msg, chatbot],
         
     | 
| 227 | 
         
            -
                    queue=True
         
     | 
| 228 | 
         
            -
                ).then(
         
     | 
| 229 | 
         
            -
                    lambda: gr.update(interactive=True),
         
     | 
| 230 | 
         
            -
                    None,
         
     | 
| 231 | 
         
            -
                    [msg]
         
     | 
| 232 | 
         
            -
                )
         
     | 
| 233 | 
         
            -
             
     | 
| 234 | 
         
            -
                send.click(
         
     | 
| 235 | 
         
            -
                    chat,
         
     | 
| 236 | 
         
            -
                    inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
         
     | 
| 237 | 
         
            -
                    outputs=[msg, chatbot],
         
     | 
| 238 | 
         
            -
                    queue=True
         
     | 
| 239 | 
         
            -
                ).then(
         
     | 
| 240 | 
         
            -
                    lambda: gr.update(interactive=True),
         
     | 
| 241 | 
         
            -
                    None,
         
     | 
| 242 | 
         
            -
                    [msg]
         
     | 
| 243 | 
         
            -
                )
         
     | 
| 244 | 
         
            -
             
     | 
| 245 | 
         
            -
                # Auto-analysis on file upload
         
     | 
| 246 | 
         
            -
                file_upload.change(
         
     | 
| 247 | 
         
            -
                    chat,
         
     | 
| 248 | 
         
            -
                    inputs=[gr.Textbox(value="Starting file analysis..."), chatbot, file_upload, system_message, max_tokens, temperature, top_p],
         
     | 
| 249 | 
         
            -
                    outputs=[msg, chatbot],
         
     | 
| 250 | 
         
            -
                    queue=True
         
     | 
| 251 | 
         
            -
                )
         
     | 
| 252 | 
         | 
| 253 | 
         
            -
                #  
     | 
| 254 | 
         
             
                gr.Examples(
         
     | 
| 255 | 
         
             
                    examples=[
         
     | 
| 256 | 
         
            -
                        [" 
     | 
| 257 | 
         
            -
                        [" 
     | 
| 258 | 
         
            -
                        [" 
     | 
| 259 | 
         
            -
                        [" 
     | 
| 260 | 
         
            -
                        [" 
     | 
| 261 | 
         
            -
                        [" 
     | 
| 262 | 
         
             
                    ],
         
     | 
| 263 | 
         
             
                    inputs=msg,
         
     | 
| 264 | 
         
             
                )
         
     | 
| 
         | 
|
| 96 | 
         
             
                        formatted_history.append({"role": "assistant", "content": assistant_msg})
         
     | 
| 97 | 
         
             
                return formatted_history
         
     | 
| 98 | 
         | 
| 99 | 
         
            +
            # μμ€ν
 ν둬ννΈ μμ 
         
     | 
| 100 | 
         
             
            def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
         
     | 
| 101 | 
         
            +
                system_prefix = """μ λ μ¬λ¬λΆμ μΉκ·Όνκ³  μ§μ μΈ AI μ΄μμ€ν΄νΈμ
λλ€. λ€μκ³Ό κ°μ μμΉμΌλ‘ μν΅νκ² μ΅λλ€:
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 102 | 
         | 
| 103 | 
         
            +
            1. π€ μΉκ·Όνκ³  곡κ°μ μΈ νλλ‘ λν
         
     | 
| 104 | 
         
            +
            2. π‘ λͺ
ννκ³  μ΄ν΄νκΈ° μ¬μ΄ μ€λͺ
 μ κ³΅
         
     | 
| 105 | 
         
            +
            3. π― μ§λ¬Έμ μλλ₯Ό μ νν νμ
νμ¬ λ§μΆ€ν λ΅λ³
         
     | 
| 106 | 
         
            +
            4. π νμν κ²½μ° μ
λ‘λλ νμΌ λ΄μ©μ μ°Έκ³ νμ¬ κ΅¬μ²΄μ μΈ λμ μ κ³΅
         
     | 
| 107 | 
         
            +
            5. β¨ μΆκ°μ μΈ ν΅μ°°κ³Ό μ μμ ν΅ν κ°μΉ μλ λν
         
     | 
| 108 | 
         | 
| 109 | 
         
            +
            νμ μμ λ°λ₯΄κ³  μΉμ νκ² μλ΅νλ©°, νμν κ²½μ° κ΅¬μ²΄μ μΈ μμλ μ€λͺ
μ μΆκ°νμ¬ 
         
     | 
| 110 | 
         
            +
            μ΄ν΄λ₯Ό λκ² μ΅λλ€."""
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 111 | 
         | 
| 112 | 
         
            +
            # UI ν
μ€νΈ νκΈν
         
     | 
| 113 | 
         
            +
            with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="AI μ΄μμ€ν΄νΈ π€") as demo:
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 114 | 
         
             
                gr.HTML(
         
     | 
| 115 | 
         
             
                    """
         
     | 
| 116 | 
         
             
                    <div style="text-align: center; max-width: 800px; margin: 0 auto;">
         
     | 
| 117 | 
         
            +
                        <h1 style="font-size: 3em; font-weight: 600; margin: 0.5em;">AI μ΄μμ€ν΄νΈ π€</h1>
         
     | 
| 118 | 
         
            +
                        <h3 style="font-size: 1.2em; margin: 1em;">λΉμ μ λ λ ν λν ννΈλ π¬</h3>
         
     | 
| 119 | 
         
             
                    </div>
         
     | 
| 120 | 
         
             
                    """
         
     | 
| 121 | 
         
             
                )
         
     | 
| 122 | 
         
            +
             
     | 
| 123 | 
         
             
                with gr.Row():
         
     | 
| 124 | 
         
             
                    with gr.Column(scale=2):
         
     | 
| 125 | 
         
             
                        chatbot = gr.Chatbot(
         
     | 
| 126 | 
         
             
                            height=600, 
         
     | 
| 127 | 
         
            +
                            label="λνμ°½ π¬",
         
     | 
| 128 | 
         
             
                            type="messages"
         
     | 
| 129 | 
         
             
                        )
         
     | 
| 130 | 
         
             
                        msg = gr.Textbox(
         
     | 
| 131 | 
         
            +
                            label="λ©μμ§ μ
λ ₯",
         
     | 
| 132 | 
         
             
                            show_label=False,
         
     | 
| 133 | 
         
            +
                            placeholder="무μμ΄λ  λ¬Όμ΄λ³΄μΈμ... π",
         
     | 
| 134 | 
         
             
                            container=False
         
     | 
| 135 | 
         
             
                        )
         
     | 
| 136 | 
         
             
                        with gr.Row():
         
     | 
| 137 | 
         
            +
                            clear = gr.ClearButton([msg, chatbot], value="λνλ΄μ© μ§μ°κΈ°")
         
     | 
| 138 | 
         
            +
                            send = gr.Button("보λ΄κΈ° π€")
         
     | 
| 139 | 
         | 
| 140 | 
         
             
                    with gr.Column(scale=1):
         
     | 
| 141 | 
         
            +
                        gr.Markdown("### νμΌ μ
λ‘λ π\nμ§μ νμ: ν
μ€νΈ, μ½λ, CSV, Parquet νμΌ")
         
     | 
| 142 | 
         
             
                        file_upload = gr.File(
         
     | 
| 143 | 
         
            +
                            label="νμΌ μ ν",
         
     | 
| 144 | 
         
             
                            file_types=["text", ".csv", ".parquet"],
         
     | 
| 145 | 
         
             
                            type="filepath"
         
     | 
| 146 | 
         
             
                        )
         
     | 
| 147 | 
         | 
| 148 | 
         
            +
                        with gr.Accordion("κ³ κΈ μ€μ  βοΈ", open=False):
         
     | 
| 149 | 
         
            +
                            system_message = gr.Textbox(label="μμ€ν
 λ©μμ§ π", value="")
         
     | 
| 150 | 
         
            +
                            max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="μ΅λ ν ν° μ π")
         
     | 
| 151 | 
         
            +
                            temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μμ± μμ€ π‘οΈ")
         
     | 
| 152 | 
         
            +
                            top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="μλ΅ λ€μμ± π")
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 153 | 
         | 
| 154 | 
         
            +
                # μμ μ§λ¬Έ μμ 
         
     | 
| 155 | 
         
             
                gr.Examples(
         
     | 
| 156 | 
         
             
                    examples=[
         
     | 
| 157 | 
         
            +
                        ["μλ
νμΈμ! μ΄λ€ λμμ΄ νμνμ κ°μ? π€"],
         
     | 
| 158 | 
         
            +
                        ["μ΄ λ΄μ©μ λν΄ μ’ λ μμΈν μ€λͺ
ν΄ μ£Όμ€ μ μλμ? π‘"],
         
     | 
| 159 | 
         
            +
                        ["μ κ° μ΄ν΄νκΈ° μ½κ² μ€λͺ
ν΄ μ£Όμκ² μ΄μ? π"],
         
     | 
| 160 | 
         
            +
                        ["μ΄ λ΄μ©μ μ€μ λ‘ μ΄λ»κ² νμ©ν  μ μμκΉμ? π―"],
         
     | 
| 161 | 
         
            +
                        ["μΆκ°λ‘ μ‘°μΈν΄ μ£Όμ€ λ΄μ©μ΄ μμΌμ κ°μ? β¨"],
         
     | 
| 162 | 
         
            +
                        ["κΆκΈν μ μ΄ λ μλλ° μ¬μ€λ΄λ λ κΉμ? π€"],
         
     | 
| 163 | 
         
             
                    ],
         
     | 
| 164 | 
         
             
                    inputs=msg,
         
     | 
| 165 | 
         
             
                )
         
     |