ruslanmv commited on
Commit
649e09f
·
verified ·
1 Parent(s): 10b9770

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -16
app.py CHANGED
@@ -55,7 +55,11 @@ def query(payload, api_url):
55
  logger.info(f"Sending request to {api_url} with payload: {payload}")
56
  response = requests.post(api_url, headers=headers, json=payload)
57
  logger.info(f"Received response: {response.status_code}, {response.text}")
58
- return response.json()
 
 
 
 
59
 
60
  # Chat interface
61
  st.title("🤖 DeepSeek Chatbot")
@@ -76,11 +80,10 @@ if prompt := st.chat_input("Type your message..."):
76
  try:
77
  with st.spinner("Generating response..."):
78
  # Prepare the payload for the API
 
 
79
  payload = {
80
- "inputs": {
81
- "system_message": system_message,
82
- "user_message": prompt
83
- },
84
  "parameters": {
85
  "max_new_tokens": max_tokens,
86
  "temperature": temperature,
@@ -92,22 +95,26 @@ if prompt := st.chat_input("Type your message..."):
92
  # Dynamically construct the API URL based on the selected model
93
  api_url = f"https://api-inference.huggingface.co/models/{selected_model}"
94
  logger.info(f"Selected model: {selected_model}, API URL: {api_url}")
95
- print("payload:",payload)
96
  # Query the Hugging Face API using the selected model
97
  output = query(payload, api_url)
98
 
99
  # Handle API response
100
- if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]:
101
- assistant_response = output[0]['generated_text']
102
- logger.info(f"Generated response: {assistant_response}")
103
-
104
- with st.chat_message("assistant"):
105
- st.markdown(assistant_response)
106
-
107
- st.session_state.messages.append({"role": "assistant", "content": assistant_response})
 
 
 
 
108
  else:
109
- logger.error(f"Unexpected API response: {output}")
110
- st.error("Error: Unable to generate a response. Please try again.")
111
 
112
  except Exception as e:
113
  logger.error(f"Application Error: {str(e)}", exc_info=True)
 
55
  logger.info(f"Sending request to {api_url} with payload: {payload}")
56
  response = requests.post(api_url, headers=headers, json=payload)
57
  logger.info(f"Received response: {response.status_code}, {response.text}")
58
+ try:
59
+ return response.json()
60
+ except requests.exceptions.JSONDecodeError:
61
+ logger.error(f"Failed to decode JSON response: {response.text}")
62
+ return None
63
 
64
  # Chat interface
65
  st.title("🤖 DeepSeek Chatbot")
 
80
  try:
81
  with st.spinner("Generating response..."):
82
  # Prepare the payload for the API
83
+ # Combine system message and user input into a single prompt
84
+ full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
85
  payload = {
86
+ "inputs": full_prompt,
 
 
 
87
  "parameters": {
88
  "max_new_tokens": max_tokens,
89
  "temperature": temperature,
 
95
  # Dynamically construct the API URL based on the selected model
96
  api_url = f"https://api-inference.huggingface.co/models/{selected_model}"
97
  logger.info(f"Selected model: {selected_model}, API URL: {api_url}")
98
+ print("payload",payload)
99
  # Query the Hugging Face API using the selected model
100
  output = query(payload, api_url)
101
 
102
  # Handle API response
103
+ if output is not None and isinstance(output, list) and len(output) > 0:
104
+ if 'generated_text' in output[0]:
105
+ assistant_response = output[0]['generated_text']
106
+ logger.info(f"Generated response: {assistant_response}")
107
+
108
+ with st.chat_message("assistant"):
109
+ st.markdown(assistant_response)
110
+
111
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
112
+ else:
113
+ logger.error(f"Unexpected API response structure: {output}")
114
+ st.error("Error: Unexpected response from the model. Please try again.")
115
  else:
116
+ logger.error(f"Empty or invalid API response: {output}")
117
+ st.error("Error: Unable to generate a response. Please check the model and try again.")
118
 
119
  except Exception as e:
120
  logger.error(f"Application Error: {str(e)}", exc_info=True)