Spaces:
Configuration error
Configuration error
Update app.py
Browse files
app.py
CHANGED
|
@@ -108,14 +108,17 @@ def interact(user_input, history):
|
|
| 108 |
formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
|
| 109 |
return "", formatted_history, history
|
| 110 |
except Exception as e:
|
| 111 |
-
if torch.cuda.
|
| 112 |
torch.cuda.empty_cache()
|
| 113 |
print(f"Error during interaction: {e}")
|
| 114 |
raise gr.Error(f"An error occurred during interaction: {str(e)}")
|
| 115 |
|
| 116 |
# Function to send selected story and initial message
|
| 117 |
def send_selected_story(title, model_name, system_prompt):
|
| 118 |
-
global chat_history
|
|
|
|
|
|
|
|
|
|
| 119 |
data = [] # Reset data for new story
|
| 120 |
interaction_count = 0 # Reset interaction count
|
| 121 |
tokenizer, model = load_model(model_name)
|
|
|
|
| 108 |
formatted_history = [(entry["content"], None) if entry["role"] == "user" else (None, entry["content"]) for entry in history if entry["role"] in ["user", "assistant"]]
|
| 109 |
return "", formatted_history, history
|
| 110 |
except Exception as e:
|
| 111 |
+
if torch.cuda.is_available():
|
| 112 |
torch.cuda.empty_cache()
|
| 113 |
print(f"Error during interaction: {e}")
|
| 114 |
raise gr.Error(f"An error occurred during interaction: {str(e)}")
|
| 115 |
|
| 116 |
# Function to send selected story and initial message
|
| 117 |
def send_selected_story(title, model_name, system_prompt):
|
| 118 |
+
global chat_history
|
| 119 |
+
global selected_story
|
| 120 |
+
global data
|
| 121 |
+
global interaction_count
|
| 122 |
data = [] # Reset data for new story
|
| 123 |
interaction_count = 0 # Reset interaction count
|
| 124 |
tokenizer, model = load_model(model_name)
|