# app.py – encoder-only demo for bert-beatrix-2048 # ------------------------------------------------ # launch: python app.py → http://localhost:7860 import json, re, sys from pathlib import Path, PurePosixPath # ← PurePosixPath import import gradio as gr import spaces import torch from huggingface_hub import snapshot_download from bert_handler import create_handler_from_checkpoint # ------------------------------------------------------------------ # 0. Download & patch config.json --------------------------------- # ------------------------------------------------------------------ REPO_ID = "AbstractPhil/bert-beatrix-2048" LOCAL_CKPT = "bert-beatrix-2048" # cache dir name snapshot_download( repo_id=REPO_ID, revision="main", local_dir=LOCAL_CKPT, local_dir_use_symlinks=False, ) cfg_path = Path(LOCAL_CKPT) / "config.json" with cfg_path.open() as f: cfg = json.load(f) auto_map = cfg.get("auto_map", {}) patched = False for k, v in auto_map.items(): if "--" in v: # strip repo--module.Class auto_map[k] = PurePosixPath(v.split("--", 1)[1]).as_posix() patched = True if patched: cfg["auto_map"] = auto_map with cfg_path.open("w") as f: json.dump(cfg, f, indent=2) print("🛠️ Patched config.json → auto_map now points to local modules") # ------------------------------------------------------------------ # 1. Load model / tokenizer --------------------------------------- # ------------------------------------------------------------------ handler, full_model, tokenizer = create_handler_from_checkpoint(LOCAL_CKPT) full_model = full_model.eval().cuda() encoder = full_model.bert.encoder embeddings = full_model.bert.embeddings emb_ln = full_model.bert.emb_ln emb_drop = full_model.bert.emb_drop # ------------------------------------------------------------------ # 2. Symbolic roles ------------------------------------------------ # ------------------------------------------------------------------ SYMBOLIC_ROLES = [ "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "