File size: 2,601 Bytes
3b55d4b
2da3f27
 
9ab0176
2da3f27
94576e1
2da3f27
 
 
76792d2
e72ed81
 
 
 
 
51e9476
 
e72ed81
 
 
 
2da3f27
e72ed81
 
2da3f27
76792d2
e72ed81
 
 
 
 
76792d2
 
789e9e5
2da3f27
e72ed81
76792d2
e72ed81
 
2da3f27
e72ed81
76792d2
e72ed81
2da3f27
 
e72ed81
 
2da3f27
e72ed81
2da3f27
e72ed81
2da3f27
e72ed81
 
 
 
2da3f27
e72ed81
 
76792d2
e72ed81
 
2da3f27
e72ed81
 
f150754
 
2da3f27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e72ed81
f150754
76792d2
f150754
2da3f27
77c7a99
2da3f27
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# app.py
import os
import requests
import streamlit as st
from models import get_hf_api

# Configure API
API_URL = get_hf_api()
headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"}

# Page configuration
st.set_page_config(
    page_title="DeepSeek Chatbot - ruslanmv.com",
    page_icon="🤖",
    layout="centered"
)

# Initialize session state for chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Sidebar configuration
with st.sidebar:
    st.header("Model Configuration")
    st.markdown("[Get HuggingFace Token](https://huggingface.co/settings/tokens)")
    
    system_message = st.text_area(
        "System Message",
        value="You are a friendly Chatbot created by ruslanmv.com",
        height=100
    )
    
    max_tokens = st.slider(
        "Max Tokens",
        1, 4000, 512
    )
    
    temperature = st.slider(
        "Temperature",
        0.1, 4.0, 0.7
    )
    
    top_p = st.slider(
        "Top-p",
        0.1, 1.0, 0.9
    )

# Chat interface
st.title("🤖 DeepSeek Chatbot")
st.caption("Powered by Hugging Face Inference API - Configure in sidebar")

# Display chat history
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# Handle input
if prompt := st.chat_input("Type your message..."):
    st.session_state.messages.append({"role": "user", "content": prompt})
    
    with st.chat_message("user"):
        st.markdown(prompt)

    try:
        with st.spinner("Generating response..."):
            full_prompt = f"{system_message}\n\nUser: {prompt}\nAssistant:"
            
            response = requests.post(
                API_URL,
                headers=headers,
                json={
                    "inputs": full_prompt,
                    "parameters": {
                        "max_new_tokens": max_tokens,
                        "temperature": temperature,
                        "top_p": top_p,
                        "return_full_text": False
                    }
                }
            ).json()
            
            if isinstance(response, list) and len(response) > 0:
                assistant_response = response[0].get('generated_text', '')
            else:
                assistant_response = "Error: Unexpected API response"

        with st.chat_message("assistant"):
            st.markdown(assistant_response)
        
        st.session_state.messages.append({"role": "assistant", "content": assistant_response})

    except Exception as e:
        st.error(f"API Error: {str(e)}")