Spaces:
Sleeping
Sleeping
File size: 5,812 Bytes
8712c90 31d9406 a5ec26c 5ac398b a5ec26c 89cfd4d a5ec26c 89cfd4d a5ec26c 89cfd4d 5ac398b 89cfd4d 5ac398b 89cfd4d 5ac398b 89cfd4d 5ac398b a5ec26c 321a1b2 1df4bb8 321a1b2 a5ec26c 321a1b2 a5ec26c 89cfd4d a5ec26c 89cfd4d a5ec26c 89cfd4d a5ec26c 89cfd4d f643580 89cfd4d 30ede40 383d1f8 89cfd4d 383d1f8 89cfd4d 9ec0501 89cfd4d 30ede40 383d1f8 89cfd4d 383d1f8 a5ec26c f643580 89cfd4d 3bc7e87 1df4bb8 89cfd4d a5ec26c 30ede40 1df4bb8 89cfd4d 1df4bb8 89cfd4d 44c881e 1df4bb8 a5ec26c 89cfd4d a5ec26c 89cfd4d a5ec26c 89cfd4d 30ede40 89cfd4d a5ec26c 1df4bb8 89cfd4d a5ec26c 30ede40 1df4bb8 8712c90 44c881e a5ec26c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
import requests
import gradio as gr
from enum import Enum
class Model(Enum):
GEMMA = "gemma-2-2b"
GPT2 = "gpt2-small"
MODEL_CONFIGS = {
Model.GEMMA: "20-gemmascope-res-16k",
Model.GPT2: "9-res-jb"
}
def get_features(text: str, model: Model):
url = "https://www.neuronpedia.org/api/search-with-topk"
payload = {
"modelId": model.value,
"text": text,
"layer": MODEL_CONFIGS[model]
}
try:
response = requests.post(url, headers={"Content-Type": "application/json"}, json=payload)
response.raise_for_status()
return response.json()
except Exception as e:
return None
def create_dashboard(feature_id: int, model: Model) -> str:
model_path = model.value.lower()
layer_name = MODEL_CONFIGS[model].lower()
return f"""
<div class="dashboard-container p-4">
<h3 class="text-lg font-semibold mb-4">Feature {feature_id} Dashboard</h3>
<iframe
src="https://www.neuronpedia.org/{model_path}/{layer_name}/{feature_id}?embed=true&embedexplanation=true&embedplots=true&embedtest=true&height=300"
width="100%"
height="600"
frameborder="0"
class="rounded-lg"
></iframe>
</div>
"""
def handle_feature_click(feature_id: int, model: Model):
return create_dashboard(feature_id, model)
def analyze_text(text: str, selected_model: str):
model = Model.GEMMA if selected_model == "Gemini" else Model.GPT2
if not text:
return [], ""
features_data = get_features(text, model)
if not features_data:
return [], ""
features = []
first_feature_id = None
for result in features_data['results']:
if result['token'] == '<bos>':
continue
token = result['token']
token_features = []
for feature in result['top_features'][:3]:
feature_id = feature['feature_index']
if first_feature_id is None:
first_feature_id = feature_id
token_features.append({
"token": token,
"id": feature_id,
"activation": feature['activation_value']
})
features.append({"token": token, "features": token_features})
return features, create_dashboard(first_feature_id, model) if first_feature_id else ""
css = """
@import url('https://fonts.googleapis.com/css2?family=Open+Sans:wght@300;400;600;700&display=swap');
body { font-family: 'Open Sans', sans-serif !important; }
.dashboard-container {
border: 1px solid #e0e5ff;
border-radius: 8px;
background-color: #ffffff;
}
.token-header {
font-size: 1.25rem;
font-weight: 600;
margin-top: 1rem;
margin-bottom: 0.5rem;
}
.feature-button {
display: inline-block;
margin: 0.25rem;
padding: 0.5rem 1rem;
background-color: #f3f4f6;
border: 1px solid #e5e7eb;
border-radius: 0.375rem;
font-size: 0.875rem;
}
.feature-button:hover {
background-color: #e5e7eb;
}
.model-selector {
display: flex;
gap: 1rem;
margin-bottom: 1rem;
}
"""
with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
gr.Markdown("# Brand Analyzer", elem_classes="text-2xl font-bold mb-2")
gr.Markdown("*Analyze text using interpretable neural features*", elem_classes="text-gray-600 mb-6")
features_state = gr.State([])
selected_model = gr.State("Gemini") # Default to Gemini
with gr.Row(elem_classes="model-selector"):
gemini_btn = gr.Button("🧬 Gemini", variant="primary" if selected_model.value == "Gemini" else "secondary")
openai_btn = gr.Button("🤖 OpenAI", variant="secondary")
with gr.Row():
with gr.Column(scale=1):
input_text = gr.Textbox(
lines=5,
placeholder="Enter text to analyze...",
label="Input Text"
)
analyze_btn = gr.Button("Analyze Features", variant="primary")
gr.Examples(
examples=["WordLift", "Think Different", "Just Do It"],
inputs=input_text
)
with gr.Column(scale=2):
@gr.render(inputs=[features_state, selected_model])
def render_features(features, current_model):
if not features:
return
model = Model.GEMMA if current_model == "Gemini" else Model.GPT2
for token_group in features:
gr.Markdown(f"### {token_group['token']}")
with gr.Row():
for feature in token_group['features']:
btn = gr.Button(
f"Feature {feature['id']} (Activation: {feature['activation']:.2f})",
elem_classes=["feature-button"]
)
btn.click(
fn=lambda fid=feature['id']: handle_feature_click(fid, model),
outputs=dashboard
)
dashboard = gr.HTML()
def update_model(new_model):
return new_model
gemini_btn.click(
fn=lambda: update_model("Gemini"),
outputs=selected_model,
queue=False
)
openai_btn.click(
fn=lambda: update_model("OpenAI"),
outputs=selected_model,
queue=False
)
analyze_btn.click(
fn=analyze_text,
inputs=[input_text, selected_model],
outputs=[features_state, dashboard]
)
if __name__ == "__main__":
demo.launch(share=False) |