Spaces:
Running
Running
| import streamlit as st | |
| import requests | |
| import json | |
| st.title("DeepSeek-R1-Distill-Qwen-32B") | |
| # The Inference API endpoint for your model | |
| API_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" | |
| # If your model is public, you can often omit the token. | |
| # If it is private or rate-limited, you need to provide a token: | |
| # headers = {"Authorization": "Bearer YOUR_HF_INFERENCE_API_TOKEN"} | |
| headers = {} | |
| def query_hf_api(prompt: str): | |
| """ | |
| Sends a JSON payload to the HF Inference API. | |
| """ | |
| payload = {"inputs": prompt} | |
| response = requests.post(API_URL, headers=headers, data=json.dumps(payload)) | |
| return response.json() | |
| # Simple text box for user input | |
| user_input = st.text_input("Enter your prompt", value="Hello, how are you?") | |
| # Generate button | |
| if st.button("Generate"): | |
| with st.spinner("Generating..."): | |
| result = query_hf_api(user_input) | |
| st.write("**API Response:**") | |
| st.json(result) | |