Merge pull request #3 from andreped/demo
Browse files- .github/workflows/linting.yml +2 -2
- demo/app.py +1 -2
- demo/src/convert.py +1 -5
- demo/src/gui.py +7 -25
- demo/src/inference.py +2 -10
.github/workflows/linting.yml
CHANGED
@@ -14,10 +14,10 @@ jobs:
|
|
14 |
runs-on: ubuntu-20.04
|
15 |
steps:
|
16 |
- uses: actions/checkout@v1
|
17 |
-
- name: Set up Python 3.
|
18 |
uses: actions/setup-python@v2
|
19 |
with:
|
20 |
-
python-version: 3.
|
21 |
|
22 |
- name: Install lint dependencies
|
23 |
run: pip install wheel setuptools black==22.3.0 isort==5.10.1 flake8==4.0.1
|
|
|
14 |
runs-on: ubuntu-20.04
|
15 |
steps:
|
16 |
- uses: actions/checkout@v1
|
17 |
+
- name: Set up Python 3.10
|
18 |
uses: actions/setup-python@v2
|
19 |
with:
|
20 |
+
python-version: "3.10"
|
21 |
|
22 |
- name: Install lint dependencies
|
23 |
run: pip install wheel setuptools black==22.3.0 isort==5.10.1 flake8==4.0.1
|
demo/app.py
CHANGED
@@ -16,8 +16,7 @@ def main():
|
|
16 |
"--share",
|
17 |
type=int,
|
18 |
default=0,
|
19 |
-
help="Whether to enable the app to be accessible online"
|
20 |
-
"-> setups a public link which requires internet access.",
|
21 |
)
|
22 |
args = parser.parse_args()
|
23 |
|
|
|
16 |
"--share",
|
17 |
type=int,
|
18 |
default=0,
|
19 |
+
help="Whether to enable the app to be accessible online" "-> setups a public link which requires internet access.",
|
|
|
20 |
)
|
21 |
args = parser.parse_args()
|
22 |
|
demo/src/convert.py
CHANGED
@@ -28,8 +28,4 @@ def nifti_to_obj(path, output="prediction.obj"):
|
|
28 |
thefile.write("vn {0} {1} {2}\n".format(item[0], item[1], item[2]))
|
29 |
|
30 |
for item in faces:
|
31 |
-
thefile.write(
|
32 |
-
"f {0}//{0} {1}//{1} {2}//{2}\n".format(
|
33 |
-
item[0], item[1], item[2]
|
34 |
-
)
|
35 |
-
)
|
|
|
28 |
thefile.write("vn {0} {1} {2}\n".format(item[0], item[1], item[2]))
|
29 |
|
30 |
for item in faces:
|
31 |
+
thefile.write("f {0}//{0} {1}//{1} {2}//{2}\n".format(item[0], item[1], item[2]))
|
|
|
|
|
|
|
|
demo/src/gui.py
CHANGED
@@ -92,11 +92,7 @@ class WebUI:
|
|
92 |
|
93 |
def get_img_pred_pair(self, k):
|
94 |
k = int(k)
|
95 |
-
out = gr.AnnotatedImage(
|
96 |
-
self.combine_ct_and_seg(self.images[k], self.pred_images[k]),
|
97 |
-
visible=True,
|
98 |
-
elem_id="model-2d",
|
99 |
-
).style(
|
100 |
color_map={self.class_name: "#ffae00"},
|
101 |
height=512,
|
102 |
width=512,
|
@@ -141,17 +137,11 @@ class WebUI:
|
|
141 |
[sidebar_left, sidebar_state],
|
142 |
)
|
143 |
|
144 |
-
btn_clear_logs = gr.Button(
|
145 |
-
"Clear logs", elem_id="logs-button"
|
146 |
-
)
|
147 |
btn_clear_logs.click(flush_logs, [], [])
|
148 |
|
149 |
-
file_output = gr.File(
|
150 |
-
|
151 |
-
)
|
152 |
-
file_output.upload(
|
153 |
-
self.upload_file, file_output, file_output
|
154 |
-
)
|
155 |
|
156 |
model_selector = gr.Dropdown(
|
157 |
list(self.class_names.keys()),
|
@@ -167,11 +157,7 @@ class WebUI:
|
|
167 |
)
|
168 |
|
169 |
with gr.Column(scale=0.2, min_width=150):
|
170 |
-
run_btn = gr.Button(
|
171 |
-
"Run analysis",
|
172 |
-
variant="primary",
|
173 |
-
elem_id="run-button",
|
174 |
-
).style(
|
175 |
full_width=False,
|
176 |
size="lg",
|
177 |
)
|
@@ -203,9 +189,7 @@ class WebUI:
|
|
203 |
with gr.Box():
|
204 |
with gr.Column():
|
205 |
# create dummy image to be replaced by loaded images
|
206 |
-
t = gr.AnnotatedImage(
|
207 |
-
visible=True, elem_id="model-2d"
|
208 |
-
).style(
|
209 |
color_map={self.class_name: "#ffae00"},
|
210 |
height=512,
|
211 |
width=512,
|
@@ -226,6 +210,4 @@ class WebUI:
|
|
226 |
# https://gradio.app/sharing-your-app/
|
227 |
# inference times > 60 seconds -> need queue():
|
228 |
# https://github.com/tloen/alpaca-lora/issues/60#issuecomment-1510006062
|
229 |
-
demo.queue().launch(
|
230 |
-
server_name="0.0.0.0", server_port=7860, share=self.share
|
231 |
-
)
|
|
|
92 |
|
93 |
def get_img_pred_pair(self, k):
|
94 |
k = int(k)
|
95 |
+
out = gr.AnnotatedImage(self.combine_ct_and_seg(self.images[k], self.pred_images[k]), visible=True, elem_id="model-2d",).style(
|
|
|
|
|
|
|
|
|
96 |
color_map={self.class_name: "#ffae00"},
|
97 |
height=512,
|
98 |
width=512,
|
|
|
137 |
[sidebar_left, sidebar_state],
|
138 |
)
|
139 |
|
140 |
+
btn_clear_logs = gr.Button("Clear logs", elem_id="logs-button")
|
|
|
|
|
141 |
btn_clear_logs.click(flush_logs, [], [])
|
142 |
|
143 |
+
file_output = gr.File(file_count="single", elem_id="upload")
|
144 |
+
file_output.upload(self.upload_file, file_output, file_output)
|
|
|
|
|
|
|
|
|
145 |
|
146 |
model_selector = gr.Dropdown(
|
147 |
list(self.class_names.keys()),
|
|
|
157 |
)
|
158 |
|
159 |
with gr.Column(scale=0.2, min_width=150):
|
160 |
+
run_btn = gr.Button("Run analysis", variant="primary", elem_id="run-button",).style(
|
|
|
|
|
|
|
|
|
161 |
full_width=False,
|
162 |
size="lg",
|
163 |
)
|
|
|
189 |
with gr.Box():
|
190 |
with gr.Column():
|
191 |
# create dummy image to be replaced by loaded images
|
192 |
+
t = gr.AnnotatedImage(visible=True, elem_id="model-2d").style(
|
|
|
|
|
193 |
color_map={self.class_name: "#ffae00"},
|
194 |
height=512,
|
195 |
width=512,
|
|
|
210 |
# https://gradio.app/sharing-your-app/
|
211 |
# inference times > 60 seconds -> need queue():
|
212 |
# https://github.com/tloen/alpaca-lora/issues/60#issuecomment-1510006062
|
213 |
+
demo.queue().launch(server_name="0.0.0.0", server_port=7860, share=self.share)
|
|
|
|
demo/src/inference.py
CHANGED
@@ -61,9 +61,7 @@ def run_model(
|
|
61 |
os.path.join(model_path, task, "pipeline.json"),
|
62 |
)
|
63 |
rads_config.add_section("Runtime")
|
64 |
-
rads_config.set(
|
65 |
-
"Runtime", "reconstruction_method", "thresholding"
|
66 |
-
) # thresholding, probabilities
|
67 |
rads_config.set("Runtime", "reconstruction_order", "resample_first")
|
68 |
rads_config.set("Runtime", "use_preprocessed_data", "False")
|
69 |
|
@@ -77,13 +75,7 @@ def run_model(
|
|
77 |
|
78 |
# rename and move final result
|
79 |
os.rename(
|
80 |
-
"./result/prediction-"
|
81 |
-
+ splits[0]
|
82 |
-
+ "/T0/"
|
83 |
-
+ splits[0]
|
84 |
-
+ "-t1gd_annotation-"
|
85 |
-
+ name
|
86 |
-
+ ".nii.gz",
|
87 |
"./prediction.nii.gz",
|
88 |
)
|
89 |
# Clean-up
|
|
|
61 |
os.path.join(model_path, task, "pipeline.json"),
|
62 |
)
|
63 |
rads_config.add_section("Runtime")
|
64 |
+
rads_config.set("Runtime", "reconstruction_method", "thresholding") # thresholding, probabilities
|
|
|
|
|
65 |
rads_config.set("Runtime", "reconstruction_order", "resample_first")
|
66 |
rads_config.set("Runtime", "use_preprocessed_data", "False")
|
67 |
|
|
|
75 |
|
76 |
# rename and move final result
|
77 |
os.rename(
|
78 |
+
"./result/prediction-" + splits[0] + "/T0/" + splits[0] + "-t1gd_annotation-" + name + ".nii.gz",
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
"./prediction.nii.gz",
|
80 |
)
|
81 |
# Clean-up
|