|
|
|
import os |
|
import requests |
|
import streamlit as st |
|
from models import get_hf_api |
|
|
|
|
|
API_URL = get_hf_api() |
|
headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"} |
|
|
|
|
|
st.set_page_config( |
|
page_title="DeepSeek Chatbot - ruslanmv.com", |
|
page_icon="🤖", |
|
layout="centered" |
|
) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
with st.sidebar: |
|
st.header("Model Configuration") |
|
st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)") |
|
|
|
system_message = st.text_area( |
|
"System Message", |
|
value="You are a friendly Chatbot created by ruslanmv.com", |
|
height=100 |
|
) |
|
|
|
max_tokens = st.slider( |
|
"Max Tokens", |
|
1, 4000, 512 |
|
) |
|
|
|
temperature = st.slider( |
|
"Temperature", |
|
0.1, 4.0, 0.7 |
|
) |
|
|
|
top_p = st.slider( |
|
"Top-p", |
|
0.1, 1.0, 0.9 |
|
) |
|
|
|
|
|
st.title("🤖 DeepSeek Chatbot") |
|
st.caption("Powered by Hugging Face Inference API - Configure in sidebar") |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("Type your message..."): |
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
try: |
|
with st.spinner("Generating response..."): |
|
full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:" |
|
|
|
|
|
response = requests.post( |
|
API_URL, |
|
headers=headers, |
|
json={ |
|
"inputs": full_prompt, |
|
"parameters": { |
|
"max_new_tokens": max_tokens, |
|
"temperature": temperature, |
|
"top_p": top_p, |
|
"return_full_text": False |
|
} |
|
} |
|
) |
|
|
|
|
|
if response.status_code != 200: |
|
error_msg = response.json().get('error', 'Unknown API error') |
|
st.error(f"API Error: {error_msg}") |
|
if "loading" in error_msg.lower(): |
|
st.info("Please wait a moment and try again. The model might be loading.") |
|
return |
|
|
|
|
|
result = response.json() |
|
|
|
if isinstance(result, list): |
|
|
|
assistant_response = result[0].get('generated_text', 'No response generated') |
|
|
|
|
|
if "Assistant:" in assistant_response: |
|
assistant_response = assistant_response.split("Assistant:")[-1].strip() |
|
|
|
elif isinstance(result, dict) and 'error' in result: |
|
|
|
st.error(f"API Error: {result['error']}") |
|
return |
|
else: |
|
st.error("Unexpected response format from API") |
|
return |
|
|
|
with st.chat_message("assistant"): |
|
st.markdown(assistant_response) |
|
|
|
st.session_state.messages.append({"role": "assistant", "content": assistant_response}) |
|
|
|
except Exception as e: |
|
st.error(f"Application Error: {str(e)}") |