|
import gradio as gr |
|
from datasets import load_dataset, Dataset, concatenate_datasets |
|
from datetime import datetime |
|
import os |
|
from huggingface_hub import hf_hub_download, whoami |
|
|
|
|
|
DATASET_NAME = "andito/technical_interview_internship_2025" |
|
TOKEN = os.environ.get("HF_TOKEN") |
|
EXERCISE_URL = os.environ.get("EXERCISE") |
|
whitelist = os.environ.get("WHITELIST").split(",") |
|
|
|
|
|
def fetch_exercise_file(): |
|
return hf_hub_download(repo_id=DATASET_NAME, filename=EXERCISE_URL, repo_type="dataset", local_dir=".") |
|
|
|
|
|
def log_to_hf_dataset(oauth_token: gr.OAuthToken | None): |
|
if oauth_token is None: |
|
return "You have to be logged in.", "README.md" |
|
|
|
username = whoami(token=oauth_token.token)["name"] |
|
if username not in whitelist: |
|
return "You are not authorized to download the exercise.", "README.md" |
|
|
|
|
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
|
|
|
new_entry = Dataset.from_dict({ |
|
"username": [username], |
|
"timestamp": [timestamp], |
|
"ip_address": ["egg"], |
|
}) |
|
dataset = load_dataset(DATASET_NAME, split="train") |
|
updated_dataset = concatenate_datasets([dataset, new_entry]) |
|
updated_dataset.push_to_hub(DATASET_NAME, token=TOKEN) |
|
local_file_path = fetch_exercise_file() |
|
|
|
|
|
return "Thank you! Your download is ready.", local_file_path |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("You must be logged in to use download the exercise.") |
|
gr.LoginButton(min_width=250) |
|
download_button = gr.Button("Download Exercise") |
|
output = gr.Text() |
|
file = gr.File(label="Download your exercise file") |
|
|
|
download_button.click(log_to_hf_dataset, inputs=[], outputs=[output, file]) |
|
|
|
|
|
demo.launch() |