Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,67 +9,78 @@ from image_gen_aux.utils import load_image
|
|
| 9 |
# They did great work, and I was happy to see them to also use my models :) I thought Id duplicate it and extend it.
|
| 10 |
# It basically made me get a pro account so I can make a Zero GPU space and made me upload a lot of my models on huggingface now so I can use them here ;)
|
| 11 |
|
| 12 |
-
# My models
|
| 13 |
MODELS = {
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
}
|
| 74 |
|
| 75 |
|
|
@@ -90,7 +101,7 @@ def clear_result():
|
|
| 90 |
title = """<h1 align="center">Image Upscaler</h1>
|
| 91 |
<div align="center">Use this Space to upscale your images, makes use of the
|
| 92 |
<a href="https://github.com/asomoza/image_gen_aux">Image Generation Auxiliary Tools</a> library. <br>
|
| 93 |
-
This space makes use of <a href="https://github.com/Phhofm/models">my self trained models</a> and
|
| 94 |
Here is an <a href="https://huggingface.co/spaces/Phips/Upscaler/resolve/main/input_example1.png">example input image</a> you can use to try it out.</div>
|
| 95 |
"""
|
| 96 |
|
|
@@ -103,7 +114,7 @@ with gr.Blocks() as demo:
|
|
| 103 |
model_selection = gr.Dropdown(
|
| 104 |
choices=list(MODELS.keys()),
|
| 105 |
value="4xNomosWebPhoto_RealPLKSR",
|
| 106 |
-
label="Model",
|
| 107 |
)
|
| 108 |
|
| 109 |
run_button = gr.Button("Upscale")
|
|
|
|
| 9 |
# They did great work, and I was happy to see them to also use my models :) I thought Id duplicate it and extend it.
|
| 10 |
# It basically made me get a pro account so I can make a Zero GPU space and made me upload a lot of my models on huggingface now so I can use them here ;)
|
| 11 |
|
| 12 |
+
# My models, alphabetically sorted
|
| 13 |
MODELS = {
|
| 14 |
+
"1xDeH264_realplksr": "Phips/1xDeH264_realplksr",
|
| 15 |
+
"1xDeJPG_HAT": "Phips/1xDeJPG_HAT",
|
| 16 |
+
"1xDeJPG_OmniSR": "Phips/1xDeJPG_OmniSR",
|
| 17 |
+
"1xDeJPG_realplksr_otf": "Phips/1xDeJPG_realplksr_otf",
|
| 18 |
+
"1xDeJPG_SRFormer_light": "Phips/1xDeJPG_SRFormer_light",
|
| 19 |
+
"1xDeNoise_realplksr_otf": "Phips/1xDeNoise_realplksr_otf",
|
| 20 |
+
"1xExposureCorrection_compact": "Phips/1xExposureCorrection_compact",
|
| 21 |
+
"1xOverExposureCorrection_compact": "Phips/1xOverExposureCorrection_compact",
|
| 22 |
+
"1xUnderExposureCorrection_compact": "Phips/1xUnderExposureCorrection_compact",
|
| 23 |
+
"2xAoMR_mosr": "Phips/2xAoMR_mosr",
|
| 24 |
+
"2xEvangelion_compact": "Phips/2xEvangelion_compact",
|
| 25 |
+
"2xEvangelion_dat2": "Phips/2xEvangelion_dat2",
|
| 26 |
+
"2xEvangelion_omnisr": "Phips/2xEvangelion_omnisr",
|
| 27 |
+
"2xHFA2k_compact_multijpg": "Phips/2xHFA2k_compact_multijpg",
|
| 28 |
+
"2xHFA2k_LUDVAE_compact": "Phips/2xHFA2k_LUDVAE_compact",
|
| 29 |
+
"2xHFA2k_LUDVAE_SPAN": "Phips/2xHFA2k_LUDVAE_SPAN",
|
| 30 |
+
"2xHFA2kAVCCompact": "Phips/2xHFA2kAVCCompact",
|
| 31 |
+
"2xHFA2kCompact": "Phips/2xHFA2kCompact",
|
| 32 |
+
"2xHFA2kOmniSR": "Phips/2xHFA2kOmniSR",
|
| 33 |
+
"2xHFA2kReal-CUGAN": "Phips/2xHFA2kReal-CUGAN",
|
| 34 |
+
"2xHFA2kShallowESRGAN": "Phips/2xHFA2kShallowESRGAN",
|
| 35 |
+
"2xHFA2kSPAN": "Phips/2xHFA2kSPAN",
|
| 36 |
+
"2xHFA2kSwinIR-S": "Phips/2xHFA2kSwinIR-S",
|
| 37 |
+
"2xNomosUni_compact_multijpg": "Phips/2xNomosUni_compact_multijpg",
|
| 38 |
+
"2xNomosUni_compact_multijpg_ldl": "Phips/2xNomosUni_compact_multijpg_ldl",
|
| 39 |
+
"2xNomosUni_compact_otf_medium": "Phips/2xNomosUni_compact_otf_medium",
|
| 40 |
+
"2xNomosUni_esrgan_multijpg": "Phips/2xNomosUni_esrgan_multijpg",
|
| 41 |
+
"2xNomosUni_span_multijpg": "Phips/2xNomosUni_span_multijpg",
|
| 42 |
+
"2xNomosUni_span_multijpg_ldl": "Phips/2xNomosUni_span_multijpg_ldl",
|
| 43 |
+
"4x4xTextures_GTAV_rgt-s": "Phips/4xTextures_GTAV_rgt-s",
|
| 44 |
+
"4xArtFaces_realplksr_dysample": "Phips/4xArtFaces_realplksr_dysample",
|
| 45 |
+
"4xFaceUpDAT": "Phips/4xFaceUpDAT",
|
| 46 |
+
"4xFaceUpLDAT": "Phips/4xFaceUpLDAT",
|
| 47 |
+
"4xFaceUpSharpDAT": "Phips/4xFaceUpSharpDAT",
|
| 48 |
+
"4xFaceUpSharpLDAT": "Phips/4xFaceUpSharpLDAT",
|
| 49 |
+
"4xFFHQDAT": "Phips/4xFFHQDAT",
|
| 50 |
+
"4xFFHQLDAT": "Phips/4xFFHQLDAT",
|
| 51 |
+
"4xHFA2k_ludvae_realplksr_dysample": "Phips/4xHFA2k_ludvae_realplksr_dysample",
|
| 52 |
+
"4xLexicaDAT2_otf": "Phips/4xLexicaDAT2_otf",
|
| 53 |
+
"4xLSDIRDAT": "Phips/4xLSDIRDAT",
|
| 54 |
+
"4xNature_realplksr_dysample": "Phips/4xNature_realplksr_dysample",
|
| 55 |
+
"4xNomos2_hq_atd": "Phips/4xNomos2_hq_atd",
|
| 56 |
+
"4xNomos2_hq_dat2": "Phips/4xNomos2_hq_dat2",
|
| 57 |
+
"4xNomos2_hq_drct-l": "Phips/4xNomos2_hq_drct-l",
|
| 58 |
+
"4xNomos2_hq_mosr": "Phips/4xNomos2_hq_mosr",
|
| 59 |
+
"4xNomos2_otf_esrgan": "Phips/4xNomos2_otf_esrgan",
|
| 60 |
+
"4xNomos2_realplksr_dysample": "Phips/4xNomos2_realplksr_dysample",
|
| 61 |
+
"4xNomos8k_atd_jpg": "Phips/4xNomos8k_atd_jpg",
|
| 62 |
+
"4xNomos8kDAT": "Phips/4xNomos8kDAT",
|
| 63 |
+
"4xNomos8kHAT-L_bokeh_jpg": "Phips/4xNomos8kHAT-L_bokeh_jpg",
|
| 64 |
+
"4xNomos8kHAT-L_otf": "Phips/4xNomos8kHAT-L_otf",
|
| 65 |
+
"4xNomos8kSCHAT-L": "Phips/4xNomos8kSCHAT-L",
|
| 66 |
+
"4xNomosUni_rgt_multijpg": "Phips/4xNomosUni_rgt_multijpg",
|
| 67 |
+
"4xNomosUni_rgt_s_multijpg": "Phips/4xNomosUni_rgt_s_multijpg",
|
| 68 |
+
"4xNomosUni_span_multijpg": "Phips/4xNomosUni_span_multijpg",
|
| 69 |
+
"4xNomosUniDAT2_box": "Phips/4xNomosUniDAT2_box",
|
| 70 |
+
"4xNomosUniDAT2_multijpg_ldl": "Phips/4xNomosUniDAT2_multijpg_ldl",
|
| 71 |
+
"4xNomosUniDAT2_multijpg_ldl_sharp": "Phips/4xNomosUniDAT2_multijpg_ldl_sharp",
|
| 72 |
+
"4xNomosUniDAT_bokeh_jpg": "Phips/4xNomosUniDAT_bokeh_jpg",
|
| 73 |
+
"4xNomosUniDAT_otf": "Phips/4xNomosUniDAT_otf",
|
| 74 |
+
"4xNomosWebPhoto_atd": "Phips/4xNomosWebPhoto_atd",
|
| 75 |
+
"4xNomosWebPhoto_esrgan": "Phips/4xNomosWebPhoto_esrgan",
|
| 76 |
+
"4xNomosWebPhoto_RealPLKSR": "Phips/4xNomosWebPhoto_RealPLKSR",
|
| 77 |
+
"4xReal_SSDIR_DAT_GAN": "Phips/4xReal_SSDIR_DAT_GAN",
|
| 78 |
+
"4xRealWebPhoto_v3_atd": "Phips/4xRealWebPhoto_v3_atd",
|
| 79 |
+
"4xRealWebPhoto_v4_dat2": "Phips/4xRealWebPhoto_v4_dat2",
|
| 80 |
+
"4xRealWebPhoto_v4_drct-l": "Phips/4xRealWebPhoto_v4_drct-l",
|
| 81 |
+
"4xSSDIRDAT": "Phips/4xSSDIRDAT",
|
| 82 |
+
"4xTextureDAT2_otf": "Phips/4xTextureDAT2_otf",
|
| 83 |
+
"4xTextures_GTAV_rgt-s_dither": "Phips/4xTextures_GTAV_rgt-s_dither",
|
| 84 |
}
|
| 85 |
|
| 86 |
|
|
|
|
| 101 |
title = """<h1 align="center">Image Upscaler</h1>
|
| 102 |
<div align="center">Use this Space to upscale your images, makes use of the
|
| 103 |
<a href="https://github.com/asomoza/image_gen_aux">Image Generation Auxiliary Tools</a> library. <br>
|
| 104 |
+
This space makes use of <a href="https://github.com/Phhofm/models">my self trained models</a> and tiles at 1024x1024<br>
|
| 105 |
Here is an <a href="https://huggingface.co/spaces/Phips/Upscaler/resolve/main/input_example1.png">example input image</a> you can use to try it out.</div>
|
| 106 |
"""
|
| 107 |
|
|
|
|
| 114 |
model_selection = gr.Dropdown(
|
| 115 |
choices=list(MODELS.keys()),
|
| 116 |
value="4xNomosWebPhoto_RealPLKSR",
|
| 117 |
+
label="Model (alphabetically sorted)",
|
| 118 |
)
|
| 119 |
|
| 120 |
run_button = gr.Button("Upscale")
|