Update app.py
Browse files
app.py
CHANGED
@@ -9,10 +9,16 @@ import streamlit as st
|
|
9 |
REPO_NAME = 'schuler/experimental-JP47D20'
|
10 |
|
11 |
# Load tokenizer and model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
try:
|
13 |
-
tokenizer = AutoTokenizer.from_pretrained(REPO_NAME, trust_remote_code=True)
|
14 |
-
generator_conf = GenerationConfig.from_pretrained(REPO_NAME)
|
15 |
-
model = AutoModelForCausalLM.from_pretrained(REPO_NAME, trust_remote_code=True)
|
16 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
17 |
except Exception as e:
|
18 |
st.error(f"Failed to load model: {str(e)}")
|
|
|
9 |
REPO_NAME = 'schuler/experimental-JP47D20'
|
10 |
|
11 |
# Load tokenizer and model
|
12 |
+
@st.cache_resource(show_spinner="Loading model...")
|
13 |
+
def load_model(repo_name):
|
14 |
+
tokenizer = AutoTokenizer.from_pretrained(repo_name, trust_remote_code=True)
|
15 |
+
generator_conf = GenerationConfig.from_pretrained(repo_name)
|
16 |
+
model = AutoModelForCausalLM.from_pretrained(repo_name, trust_remote_code=True)
|
17 |
+
return tokenizer, generator_conf, model
|
18 |
+
|
19 |
+
tokenizer, generator_conf, model = load_model(REPO_NAME)
|
20 |
+
|
21 |
try:
|
|
|
|
|
|
|
22 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
23 |
except Exception as e:
|
24 |
st.error(f"Failed to load model: {str(e)}")
|