Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -1,4 +1,3 @@ | |
| 1 | 
            -
             | 
| 2 | 
             
            import streamlit as st
         | 
| 3 | 
             
            import requests
         | 
| 4 | 
             
            import logging
         | 
| @@ -21,15 +20,10 @@ if "messages" not in st.session_state: | |
| 21 | 
             
            # Sidebar configuration
         | 
| 22 | 
             
            with st.sidebar:
         | 
| 23 | 
             
                st.header("Model Configuration")
         | 
| 24 | 
            -
               # st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
         | 
| 25 |  | 
| 26 | 
             
                # Dropdown to select model
         | 
| 27 | 
             
                model_options = [
         | 
| 28 | 
             
                    "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
         | 
| 29 | 
            -
                 #   "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
         | 
| 30 | 
            -
                 #   "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
         | 
| 31 | 
            -
                 #   "deepseek-ai/DeepSeek-R1-Distill-Llama-70B", 
         | 
| 32 | 
            -
                 #   "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"
         | 
| 33 | 
             
                ]
         | 
| 34 | 
             
                selected_model = st.selectbox("Select Model", model_options, index=0)
         | 
| 35 |  | 
| @@ -82,7 +76,7 @@ if prompt := st.chat_input("Type your message..."): | |
| 82 | 
             
                    with st.spinner("Generating response..."):
         | 
| 83 | 
             
                        # Prepare the payload for the API
         | 
| 84 | 
             
                        payload = {
         | 
| 85 | 
            -
                            "inputs": prompt,
         | 
| 86 | 
             
                            "parameters": {
         | 
| 87 | 
             
                                "max_new_tokens": max_tokens,
         | 
| 88 | 
             
                                "temperature": temperature,
         | 
| @@ -114,4 +108,3 @@ if prompt := st.chat_input("Type your message..."): | |
| 114 | 
             
                except Exception as e:
         | 
| 115 | 
             
                    logger.error(f"Application Error: {str(e)}", exc_info=True)
         | 
| 116 | 
             
                    st.error(f"Application Error: {str(e)}")
         | 
| 117 | 
            -
             | 
|  | |
|  | |
| 1 | 
             
            import streamlit as st
         | 
| 2 | 
             
            import requests
         | 
| 3 | 
             
            import logging
         | 
|  | |
| 20 | 
             
            # Sidebar configuration
         | 
| 21 | 
             
            with st.sidebar:
         | 
| 22 | 
             
                st.header("Model Configuration")
         | 
|  | |
| 23 |  | 
| 24 | 
             
                # Dropdown to select model
         | 
| 25 | 
             
                model_options = [
         | 
| 26 | 
             
                    "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
         | 
|  | |
|  | |
|  | |
|  | |
| 27 | 
             
                ]
         | 
| 28 | 
             
                selected_model = st.selectbox("Select Model", model_options, index=0)
         | 
| 29 |  | 
|  | |
| 76 | 
             
                    with st.spinner("Generating response..."):
         | 
| 77 | 
             
                        # Prepare the payload for the API
         | 
| 78 | 
             
                        payload = {
         | 
| 79 | 
            +
                            "inputs": f"{system_message}\n{prompt}",
         | 
| 80 | 
             
                            "parameters": {
         | 
| 81 | 
             
                                "max_new_tokens": max_tokens,
         | 
| 82 | 
             
                                "temperature": temperature,
         | 
|  | |
| 108 | 
             
                except Exception as e:
         | 
| 109 | 
             
                    logger.error(f"Application Error: {str(e)}", exc_info=True)
         | 
| 110 | 
             
                    st.error(f"Application Error: {str(e)}")
         | 
|  | 
