Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -81,7 +81,7 @@ tokenizer.tokenizer_legacy=False
|
|
81 |
model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map='balanced')
|
82 |
#model = torch.compile(model)
|
83 |
|
84 |
-
def filter_text(text):
|
85 |
"""Filters out the text up to and including 'Rewritten Prompt:'."""
|
86 |
phrase = "Rewritten Prompt:"
|
87 |
phraseB = "rewritten text:"
|
@@ -94,6 +94,7 @@ def filter_text(text):
|
|
94 |
match = re.search(pattern, filtered_text, flags=re.DOTALL)
|
95 |
if match:
|
96 |
filtered_text = match.group(2)
|
|
|
97 |
return filtered_text
|
98 |
else:
|
99 |
return filtered_text
|
@@ -148,7 +149,7 @@ def infer(
|
|
148 |
enhanced_prompt = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
149 |
print('-- generated prompt --')
|
150 |
print(enhanced_prompt)
|
151 |
-
enhanced_prompt = filter_text(enhanced_prompt)
|
152 |
print('-- filtered prompt --')
|
153 |
print(enhanced_prompt)
|
154 |
print('-- generating image --')
|
|
|
81 |
model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map='balanced')
|
82 |
#model = torch.compile(model)
|
83 |
|
84 |
+
def filter_text(text,phraseC):
|
85 |
"""Filters out the text up to and including 'Rewritten Prompt:'."""
|
86 |
phrase = "Rewritten Prompt:"
|
87 |
phraseB = "rewritten text:"
|
|
|
94 |
match = re.search(pattern, filtered_text, flags=re.DOTALL)
|
95 |
if match:
|
96 |
filtered_text = match.group(2)
|
97 |
+
filtered_text = re.sub(phraseC, "", filtered_text, flags=re.DOTALL) # Replaces the matched pattern with an empty string
|
98 |
return filtered_text
|
99 |
else:
|
100 |
return filtered_text
|
|
|
149 |
enhanced_prompt = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
150 |
print('-- generated prompt --')
|
151 |
print(enhanced_prompt)
|
152 |
+
enhanced_prompt = filter_text(enhanced_prompt,prompt)
|
153 |
print('-- filtered prompt --')
|
154 |
print(enhanced_prompt)
|
155 |
print('-- generating image --')
|