Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -126,7 +126,17 @@ DO NOT MAKE IT LONG. DO NOT GO OVER 5 CHATS. THE END MESSAGE DOESNT NEED TO BE E
|
|
126 |
REMEMBER TO SHOW INTEREST IN THE USER BY ASKING QUESTIONS ABOUT THEM. ALWAYS, BUT ALWAYS MAKE AT LEAST ONE QUESTION ABOUT THEM.
|
127 |
"""
|
128 |
|
129 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
You are a horny flirty 20 something snapchat pick-me girl chatting with a user on tinder.
|
131 |
|
132 |
## Your Personality:
|
@@ -192,27 +202,17 @@ For example:
|
|
192 |
I ain't no bot, heres a pic if u need proof
|
193 |
|
194 |
Remember that the image MUST be on the list. DO NOT ALLUCINATE.
|
|
|
|
|
195 |
|
196 |
-
""
|
197 |
-
|
198 |
-
AVAILABLE_PICTURES = """
|
199 |
-
- [WAVING TO THE CAMERA]
|
200 |
-
- [STARRING AT THE CAMERA]
|
201 |
-
- [MIDDLE FINGER]
|
202 |
-
- [HOLDING A FORK]
|
203 |
-
- [HOLDING A SPOON]
|
204 |
-
"""
|
205 |
-
|
206 |
-
def attack_sus(message, max_tokens, temperature, top_p, available_pic_str):
|
207 |
-
|
208 |
-
SUS_PROMPT = SUS_PROMPT.replace("AVAILABLE_PICTURES_REPLACE_TEXT", available_pic_str)
|
209 |
|
210 |
output = llm_client.chat_completion(
|
211 |
model="meta-llama/Llama-3.2-11B-Vision-Instruct",
|
212 |
messages=[
|
213 |
{
|
214 |
"role": "system",
|
215 |
-
"content":
|
216 |
},
|
217 |
{
|
218 |
"role": "user",
|
@@ -225,7 +225,7 @@ def attack_sus(message, max_tokens, temperature, top_p, available_pic_str):
|
|
225 |
top_p=top_p,
|
226 |
)
|
227 |
|
228 |
-
print("final sus prompt: ",
|
229 |
return output.choices[0].message.content
|
230 |
|
231 |
def respond(
|
|
|
126 |
REMEMBER TO SHOW INTEREST IN THE USER BY ASKING QUESTIONS ABOUT THEM. ALWAYS, BUT ALWAYS MAKE AT LEAST ONE QUESTION ABOUT THEM.
|
127 |
"""
|
128 |
|
129 |
+
AVAILABLE_PICTURES = """
|
130 |
+
- [WAVING TO THE CAMERA]
|
131 |
+
- [STARRING AT THE CAMERA]
|
132 |
+
- [MIDDLE FINGER]
|
133 |
+
- [HOLDING A FORK]
|
134 |
+
- [HOLDING A SPOON]
|
135 |
+
"""
|
136 |
+
|
137 |
+
def attack_sus(message, max_tokens, temperature, top_p, available_pic_str):
|
138 |
+
|
139 |
+
sus_prompt = f"""
|
140 |
You are a horny flirty 20 something snapchat pick-me girl chatting with a user on tinder.
|
141 |
|
142 |
## Your Personality:
|
|
|
202 |
I ain't no bot, heres a pic if u need proof
|
203 |
|
204 |
Remember that the image MUST be on the list. DO NOT ALLUCINATE.
|
205 |
+
|
206 |
+
"""
|
207 |
|
208 |
+
sus_prompt = sus_prompt.replace("AVAILABLE_PICTURES_REPLACE_TEXT", available_pic_str)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
209 |
|
210 |
output = llm_client.chat_completion(
|
211 |
model="meta-llama/Llama-3.2-11B-Vision-Instruct",
|
212 |
messages=[
|
213 |
{
|
214 |
"role": "system",
|
215 |
+
"content": sus_prompt
|
216 |
},
|
217 |
{
|
218 |
"role": "user",
|
|
|
225 |
top_p=top_p,
|
226 |
)
|
227 |
|
228 |
+
print("final sus prompt: ", sus_prompt)
|
229 |
return output.choices[0].message.content
|
230 |
|
231 |
def respond(
|