model-editing / app.py
Charles Lin
Use session state instead of global vars, and add progress spinners for loading models
bb4bb43
raw
history blame
4.45 kB
import streamlit as st
import pandas as pd
import time
import importlib
import algs
import config
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
EDIT_ALGS = [
"MEND: Model editor networks using gradient decomposition",
"SERAC: Semi-parametric editing with a retrieval-augmented counterfactual model",
"ENN: Editable neural networks",
"KE: KnowledgeEditor",
"FT: Fine-tuning",
"LU: Lookup Cache",
]
def reset():
st.session_state.edits.drop(st.session_state.edits.index, inplace=True)
st.session_state.model_outputs.drop(st.session_state.edits.index, inplace=True)
selected_alg = st.session_state.alg_selector
selected_alg_idx = EDIT_ALGS.index(selected_alg)
with st.spinner('Loading model...'):
alg_abbrv = selected_alg[:selected_alg.index(":")]
alg_module = importlib.import_module(f"algs.{alg_abbrv.lower()}")
alg_class = getattr(alg_module, alg_abbrv.upper())
cfg = getattr(config, f"{alg_abbrv.lower()}_config")
st.session_state.editable_model = alg_class(
st.session_state.model,
cfg,
lambda: copy.deepcopy(st.session_state.model),
).eval()
def apply_edit():
st.session_state.edits.loc[len(st.session_state.edits)] = [str(edit_input), str(edit_label)]
############# Actually do the edit to the model
def sample_model():
input_str = str(test_input)
model_output = "blah blah blah" ############## Actually sample the model
n_edits = len(st.session_state.edits)
alg_name = st.session_state.alg_selector
alg_abbrv = alg_name[:alg_name.index(":")]
st.session_state.model_outputs.loc[len(st.session_state.model_outputs)] = [input_str, model_output, n_edits, alg_abbrv]
################################
#### Backend initialization ####
################################
if "init" not in st.session_state:
st.session_state.edits = pd.DataFrame([], columns=["Edit input", "Edit label"])
st.session_state.model_outputs = pd.DataFrame([], columns=["Input", "Output", "N edits", "Alg"])
st.session_state.init = True
with st.spinner('Loading model...'):
st.session_state.tokenizer = AutoTokenizer.from_pretrained("google/t5-large-ssm-nq")
st.session_state.model = AutoModelForSeq2SeqLM.from_pretrained("google/t5-large-ssm-nq").eval()
st.session_state.editable_model = None
########################
#### Interface code ####
########################
st.title("Language Model Editing")
st.markdown("**Note: this HF space is currently under development and doesn't actually work yet!**")
st.markdown("The goal of this demo is to give you a sense of the *abilities* and *limitations* of existing methods for **editing** pre-trained language models. **Model editing** algorithms use a single input-output pair to update a pre-trained model's behavior for that input (and ideally, related inputs).")
st.markdown("This demo uses a [T5-large](https://huggingface.co/google/t5-large-ssm-nq) model fine-tuned on [Natural Questions](https://arxiv.org/pdf/2002.08910.pdf) as the base pre-trained model.")
st.write("You can choose from a variety of algorithms for model editing in the dropdown below. At the bottom of the page, you can query the model for whatever input you want before/after editing.")
st.markdown("***")
col1, col2 = st.columns([5,1])
with col1:
alg_selector = st.selectbox("Editing algorithm:", EDIT_ALGS, key="alg_selector", on_change=reset)
with col2:
st.text("ㅤ")
st.button("Clear edits", on_click=reset)
st.write("Edits applied so far:")
st.table(st.session_state.edits)
col1, col2, col3 = st.columns([3, 2, 1])
with col1:
edit_input = st.text_input("Edit input:", placeholder="e.g., 'What is the tallest mountain on Earth?'")
with col2:
edit_label = st.text_input("Edit target:", placeholder="e.g., 'Denali'")
with col3:
st.text("ㅤ")
edit_button = st.button("Apply edit", on_click=apply_edit)
st.markdown("***")
if len(st.session_state.edits) == 0:
title = "Input to sample from *unedited* model:"
else:
title = f"Input to sample from *edited* model:"
col1, col2 = st.columns([5, 1])
with col1:
test_input = st.text_input(title, placeholder="e.g., 'What is the earth's tallest mountain?'")
with col2:
st.text("ㅤ")
generate_button = st.button("Generate", on_click=sample_model)
st.write("Model generation history:")
st.table(st.session_state.model_outputs)