File size: 970 Bytes
9ab0176
3ac3046
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import streamlit as st
import requests
import json

st.title("DeepSeek-R1-Distill-Qwen-32B")

# The Inference API endpoint for your model
API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"

# If your model is public, you can often omit the token.
# If it is private or rate-limited, you need to provide a token:
#   headers = {"Authorization": "Bearer YOUR_HF_INFERENCE_API_TOKEN"}
headers = {}

def query_hf_api(prompt: str):
    """
    Sends a JSON payload to the HF Inference API.
    """
    payload = {"inputs": prompt}
    response = requests.post(API_URL, headers=headers, data=json.dumps(payload))
    return response.json()

# Simple text box for user input
user_input = st.text_input("Enter your prompt", value="Hello, how are you?")

# Generate button
if st.button("Generate"):
    with st.spinner("Generating..."):
        result = query_hf_api(user_input)
    st.write("**API Response:**")
    st.json(result)