Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -421,7 +421,7 @@ def generate_30(
|
|
421 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
422 |
filename= f'rv_IP_{timestamp}.png'
|
423 |
print("-- using image file --")
|
424 |
-
captions = caption
|
425 |
captions = flatten_and_stringify(captions)
|
426 |
captions = " ".join(captions)
|
427 |
print(captions)
|
@@ -433,13 +433,14 @@ def generate_30(
|
|
433 |
del processor5
|
434 |
gc.collect()
|
435 |
torch.cuda.empty_cache()
|
436 |
-
expanded = expand_prompt(captions)
|
437 |
-
new_prompt = prompt+' '+expanded
|
438 |
print("-- ------------ --")
|
439 |
print("-- FINAL PROMPT --")
|
440 |
print(new_prompt)
|
441 |
print("-- FINAL PROMPT --")
|
442 |
print("-- ------------ --")
|
|
|
443 |
global model
|
444 |
global txt_tokenizer
|
445 |
del model
|
@@ -571,7 +572,7 @@ def generate_60(
|
|
571 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
572 |
filename= f'rv_IP_{timestamp}.png'
|
573 |
print("-- using image file --")
|
574 |
-
captions = caption
|
575 |
captions = flatten_and_stringify(captions)
|
576 |
captions = " ".join(captions)
|
577 |
print(captions)
|
@@ -583,8 +584,8 @@ def generate_60(
|
|
583 |
del processor5
|
584 |
gc.collect()
|
585 |
torch.cuda.empty_cache()
|
586 |
-
expanded = expand_prompt(captions)
|
587 |
-
new_prompt = prompt+' '+expanded
|
588 |
print("-- ------------ --")
|
589 |
print("-- FINAL PROMPT --")
|
590 |
print(new_prompt)
|
@@ -722,7 +723,7 @@ def generate_90(
|
|
722 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
723 |
filename= f'rv_IP_{timestamp}.png'
|
724 |
print("-- using image file --")
|
725 |
-
captions = caption
|
726 |
captions = flatten_and_stringify(captions)
|
727 |
captions = " ".join(captions)
|
728 |
print(captions)
|
@@ -734,8 +735,8 @@ def generate_90(
|
|
734 |
del processor5
|
735 |
gc.collect()
|
736 |
torch.cuda.empty_cache()
|
737 |
-
expanded = expand_prompt(captions)
|
738 |
-
new_prompt = prompt+' '+expanded
|
739 |
print("-- ------------ --")
|
740 |
print("-- FINAL PROMPT --")
|
741 |
print(new_prompt)
|
|
|
421 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
422 |
filename= f'rv_IP_{timestamp}.png'
|
423 |
print("-- using image file --")
|
424 |
+
captions = caption+caption_2
|
425 |
captions = flatten_and_stringify(captions)
|
426 |
captions = " ".join(captions)
|
427 |
print(captions)
|
|
|
433 |
del processor5
|
434 |
gc.collect()
|
435 |
torch.cuda.empty_cache()
|
436 |
+
#expanded = expand_prompt(captions)
|
437 |
+
new_prompt = captions #prompt+' '+expanded
|
438 |
print("-- ------------ --")
|
439 |
print("-- FINAL PROMPT --")
|
440 |
print(new_prompt)
|
441 |
print("-- FINAL PROMPT --")
|
442 |
print("-- ------------ --")
|
443 |
+
|
444 |
global model
|
445 |
global txt_tokenizer
|
446 |
del model
|
|
|
572 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
573 |
filename= f'rv_IP_{timestamp}.png'
|
574 |
print("-- using image file --")
|
575 |
+
captions = caption+caption_2
|
576 |
captions = flatten_and_stringify(captions)
|
577 |
captions = " ".join(captions)
|
578 |
print(captions)
|
|
|
584 |
del processor5
|
585 |
gc.collect()
|
586 |
torch.cuda.empty_cache()
|
587 |
+
#expanded = expand_prompt(captions)
|
588 |
+
new_prompt = captions #prompt+' '+expanded
|
589 |
print("-- ------------ --")
|
590 |
print("-- FINAL PROMPT --")
|
591 |
print(new_prompt)
|
|
|
723 |
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
724 |
filename= f'rv_IP_{timestamp}.png'
|
725 |
print("-- using image file --")
|
726 |
+
captions = caption+caption_2
|
727 |
captions = flatten_and_stringify(captions)
|
728 |
captions = " ".join(captions)
|
729 |
print(captions)
|
|
|
735 |
del processor5
|
736 |
gc.collect()
|
737 |
torch.cuda.empty_cache()
|
738 |
+
#expanded = expand_prompt(captions)
|
739 |
+
new_prompt = captions #prompt+' '+expanded
|
740 |
print("-- ------------ --")
|
741 |
print("-- FINAL PROMPT --")
|
742 |
print(new_prompt)
|