Spaces:
Configuration error
Configuration error
Update app.py
Browse files
app.py
CHANGED
@@ -25,7 +25,7 @@ def load_stories():
|
|
25 |
# Load system prompts from Google Sheets
|
26 |
def load_prompts():
|
27 |
prompts_data = prompts_sheet.get_all_values()
|
28 |
-
prompts = [prompt[0] for prompt in prompts_data if prompt[0] != "System Prompt"] # Skip header row
|
29 |
return prompts
|
30 |
|
31 |
# Load available stories and prompts
|
@@ -73,7 +73,7 @@ chat_history = []
|
|
73 |
def interact(user_input, history):
|
74 |
global tokenizer, model
|
75 |
try:
|
76 |
-
if tokenizer is None or model is
|
77 |
raise ValueError("Tokenizer or model is not initialized.")
|
78 |
|
79 |
messages = history + [{"role": "user", "content": user_input}]
|
@@ -97,7 +97,7 @@ def interact(user_input, history):
|
|
97 |
formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
|
98 |
return "", formatted_history, history
|
99 |
except Exception as e:
|
100 |
-
if torch.cuda.
|
101 |
torch.cuda.empty_cache()
|
102 |
print(f"Error during interaction: {e}")
|
103 |
raise gr.Error(f"An error occurred during interaction: {str(e)}")
|
|
|
25 |
# Load system prompts from Google Sheets
|
26 |
def load_prompts():
|
27 |
prompts_data = prompts_sheet.get_all_values()
|
28 |
+
prompts = [prompt[0] for prompt in prompts_data[1:] if prompt[0] != "System Prompt"] # Skip header row
|
29 |
return prompts
|
30 |
|
31 |
# Load available stories and prompts
|
|
|
73 |
def interact(user_input, history):
|
74 |
global tokenizer, model
|
75 |
try:
|
76 |
+
if tokenizer is None or model is not initialized:
|
77 |
raise ValueError("Tokenizer or model is not initialized.")
|
78 |
|
79 |
messages = history + [{"role": "user", "content": user_input}]
|
|
|
97 |
formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
|
98 |
return "", formatted_history, history
|
99 |
except Exception as e:
|
100 |
+
if torch.cuda.is available():
|
101 |
torch.cuda.empty_cache()
|
102 |
print(f"Error during interaction: {e}")
|
103 |
raise gr.Error(f"An error occurred during interaction: {str(e)}")
|