File size: 2,749 Bytes
57e3831 7e6100e 57e3831 4da2558 7e6100e 9f4b1c6 4da2558 57e3831 9f4b1c6 7e6100e 9f4b1c6 7e6100e 9f4b1c6 7e6100e 9f4b1c6 7e6100e 9f4b1c6 7e6100e 9f4b1c6 57e3831 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import gradio as gr
from huggingface_hub import login, logout, whoami
# Custom CSS for background gradient
custom_css = """
body, .gradio-container {
background: linear-gradient(135deg, #1e3c72, #2a5298);
color: white;
}
.sidebar {
background: rgba(255, 255, 255, 0.1) !important;
border-radius: 10px;
padding: 20px;
margin: 10px;
}
.sidebar .markdown {
color: white !important;
}
"""
# Function to handle login
def handle_login(token):
try:
# Attempt to log in with the provided token
login(token=token, add_to_git_credential=False)
user_info = whoami()
return f"Logged in as: {user_info['name']}"
except Exception as e:
# Handle login failure
logout() # Ensure the user is logged out if login fails
return f"Login failed: {str(e)}"
# Function to check if the user is logged in
def is_logged_in():
try:
# Check if the user is authenticated
whoami()
return True
except:
return False
# Function to restrict access to the app
def restricted_functionality(prompt):
if not is_logged_in():
return "Please log in to use this feature."
# Simulate model response (replace with actual model inference)
return f"Model response to: {prompt}"
# Gradio interface
with gr.Blocks(css=custom_css) as demo:
with gr.Sidebar():
gr.Markdown("# Inference Provider")
gr.Markdown("This Space showcases the deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
token_input = gr.Textbox(label="Hugging Face Token", type="password")
login_button = gr.Button("Sign in")
login_status = gr.Markdown("")
# Main app functionality
with gr.Column(visible=False) as main_interface: # Hide until logged in
prompt = gr.Textbox(label="Your Prompt")
output = gr.Textbox(label="Model Response")
generate_button = gr.Button("Generate")
# Load the model
model_interface = gr.load("models/deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct", provider="nebius")
# Handle login
def update_interface(token):
login_result = handle_login(token)
if "Logged in as:" in login_result:
return {main_interface: gr.update(visible=True), login_status: login_result}
else:
return {main_interface: gr.update(visible=False), login_status: login_result}
login_button.click(update_interface, inputs=token_input, outputs=[main_interface, login_status])
# Handle text generation (restricted to logged-in users)
generate_button.click(restricted_functionality, inputs=prompt, outputs=output)
# Launch the app
demo.launch() |