Spaces:
Sleeping
Sleeping
File size: 8,996 Bytes
8e07496 869a62d 0aeb285 869a62d edc96fb 869a62d edc96fb a3e3b6e 4acce98 edc96fb b7846fd edc96fb b7846fd edc96fb 869a62d 97a468f edc96fb 97a468f edc96fb 97a468f b7846fd edc96fb 869a62d b7846fd edc96fb 869a62d b7846fd edc96fb b7846fd edc96fb 869a62d b7846fd edc96fb b7846fd edc96fb 97a468f edc96fb 97a468f edc96fb 97a468f edc96fb 97a468f edc96fb 869a62d edc96fb b7846fd edc96fb 869a62d fbb58ea edc96fb 869a62d fbb58ea edc96fb 869a62d b7846fd edc96fb b7846fd edc96fb a3e3b6e edc96fb 813981a b7846fd edc96fb a3e3b6e 291eaa0 a3e3b6e 869a62d 291eaa0 97a468f b7846fd edc96fb 5689d49 5459696 a70d772 135c49f df06f36 edc96fb b7846fd edc96fb 291eaa0 edc96fb 97a468f edc96fb b7846fd edc96fb 97a468f edc96fb b7846fd edc96fb df06f36 edc96fb 952dd6b edc96fb 97a468f 869a62d edc96fb b7846fd edc96fb 869a62d edc96fb 3f3122f edc96fb 3f3122f edc96fb 97a468f 1253c31 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 |
# app.py
import torch
import gradio as gr
import os
import requests
import argparse
from libra.eval import libra_eval
from libra.eval.run_libra import load_model
DEFAULT_MODEL_PATHS = ["X-iZhang/libra-v1.0-7b",
"X-iZhang/libra-Llama-3.2-3B-Instruct"]
def get_model_short_name(model_path: str) -> str:
return model_path.rstrip("/").split("/")[-1]
loaded_models = {} # {model_key: reuse_model_object}
def generate_radiology_description(
selected_model_name: str,
current_img_data,
prior_img_data,
use_no_prior: bool,
prompt: str,
temperature: float,
top_p: float,
num_beams: int,
max_new_tokens: int,
model_paths_dict: dict
) -> str:
real_model_path = model_paths_dict[selected_model_name]
if not current_img_data:
return "Error: Please select or upload the Current Image."
if use_no_prior:
prior_img_data = current_img_data
else:
if not prior_img_data:
return "Error: Please select or upload the Prior Image, or check 'Without Prior Image'."
if selected_model_name in loaded_models:
reuse_model = loaded_models[selected_model_name]
else:
reuse_model = load_model(real_model_path)
loaded_models[selected_model_name] = reuse_model
try:
output = libra_eval(
libra_model=reuse_model,
image_file=[current_img_data, prior_img_data],
query=prompt,
temperature=temperature,
top_p=top_p,
num_beams=num_beams,
length_penalty=1.0,
num_return_sequences=1,
conv_mode="libra_v1",
max_new_tokens=max_new_tokens
)
return output
except Exception as e:
return f"An error occurred during model inference: {str(e)}"
def main():
cur_dir = os.path.abspath(os.path.dirname(__file__))
example_curent_path = os.path.join(cur_dir, "examples", "curent.jpg")
example_curent_path = os.path.abspath(example_curent_path)
example_prior_path = os.path.join(cur_dir, "examples", "prior.jpg")
example_prior_path = os.path.abspath(example_prior_path)
IMAGE_EXAMPLES = [
[example_curent_path],
[example_prior_path]
]
parser = argparse.ArgumentParser(description="Demo for Radiology Image Description Generator (Local Examples)")
parser.add_argument(
"--model-path",
type=str,
nargs="+",
default=DEFAULT_MODEL_PATHS,
help="User-specified model path. If not provided, only default model is shown."
)
args = parser.parse_args()
cmd_model_paths = args.model_path if isinstance(args.model_path, list) else [args.model_path]
model_paths_dict = {}
for model_path in DEFAULT_MODEL_PATHS:
model_key = get_model_short_name(model_path)
model_paths_dict[model_key] = model_path
for model_path in cmd_model_paths:
user_key = get_model_short_name(model_path)
if model_path not in DEFAULT_MODEL_PATHS:
model_paths_dict[user_key] = model_path
default_ui_model = get_model_short_name(DEFAULT_MODEL_PATHS[0])
with gr.Blocks(title="Libra: Radiology Analysis with Direct URL Examples") as demo:
gr.Markdown("""
## 🩻 Libra: Leveraging Temporal Images for Biomedical Radiology Analysis
[Project Page](https://x-izhang.github.io/Libra_v1.0/) | [Paper](https://arxiv.org/abs/2411.19378) | [Code](https://github.com/X-iZhang/Libra) | [Model](https://huggingface.co/X-iZhang/libra-v1.0-7b)
**🚨 Performance Warning**
The demo is currently running on **CPU**, and a single inference takes approximately **500 seconds**.
To achieve optimal performance and significantly reduce inference time, **GPU** is required for effective operation.
For more details, please refer to the [launch demo locally](https://github.com/X-iZhang/Libra#gradio-web-ui).
""")
model_dropdown = gr.Dropdown(
label="Select Model",
choices=list(model_paths_dict.keys()),
value=default_ui_model,
interactive=True
)
prompt_input = gr.Textbox(
label="Clinical Prompt",
value="Provide a detailed description of the findings in the radiology image.",
lines=2,
info=(
"If clinical instructions are available, include them after the default prompt. "
"For example: “Provide a detailed description of the findings in the radiology image. "
"Following clinical context: Indication: chest pain, History: ...”"
)
)
with gr.Row():
with gr.Column():
gr.Markdown("### Current Image")
current_img = gr.Image(
label="Drop Or Upload Current Image",
type="filepath",
interactive=True
)
gr.Examples(
examples=IMAGE_EXAMPLES,
inputs=current_img,
label="Example Current Images"
)
with gr.Column():
gr.Markdown("### Prior Image")
prior_img = gr.Image(
label="Drop Or Upload Prior Image",
type="filepath",
interactive=True
)
with gr.Row():
gr.Examples(
examples=IMAGE_EXAMPLES,
inputs=prior_img,
label="Example Prior Images"
)
without_prior_checkbox = gr.Checkbox(
label="Without Prior Image",
value=False,
info="If checked, the current image will be used as the dummy prior image in the Libra model."
)
with gr.Accordion("Parameters Settings", open=False):
temperature_slider = gr.Slider(
label="Temperature",
minimum=0.1, maximum=1.0, step=0.1, value=0.9
)
top_p_slider = gr.Slider(
label="Top P",
minimum=0.1, maximum=1.0, step=0.1, value=0.8
)
num_beams_slider = gr.Slider(
label="Number of Beams",
minimum=1, maximum=20, step=1, value=1
)
max_tokens_slider = gr.Slider(
label="Max output tokens",
minimum=10, maximum=4096, step=10, value=25
)
output_text = gr.Textbox(
label="Generated Findings Section",
lines=5
)
generate_button = gr.Button("Generate Findings Description")
generate_button.click(
fn=lambda model_name, c_img, p_img, no_prior, prompt, temp, top_p, beams, tokens: generate_radiology_description(
model_name,
c_img,
p_img,
no_prior,
prompt,
temp,
top_p,
beams,
tokens,
model_paths_dict
),
inputs=[
model_dropdown, # model_name
current_img, # c_img
prior_img, # p_img
without_prior_checkbox, # no_prior
prompt_input, # prompt
temperature_slider,# temp
top_p_slider, # top_p
num_beams_slider, # beams
max_tokens_slider # tokens
],
outputs=output_text
)
gr.Markdown("""
### Terms of Use
The service is a research preview intended for non-commercial use only, subject to the model [License](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md) of LLaMA.
By accessing or using this demo, you acknowledge and agree to the following:
- **Research & Non-Commercial Purposes**: This demo is provided solely for research and demonstration. It must not be used for commercial activities or profit-driven endeavors.
- **Not Medical Advice**: All generated content is experimental and must not replace professional medical judgment.
- **Content Moderationt**: While we apply basic safety checks, the system may still produce inaccurate or offensive outputs.
- **Responsible Use**: Do not use this demo for any illegal, harmful, hateful, violent, or sexual purposes.
By continuing to use this service, you confirm your acceptance of these terms. If you do not agree, please discontinue use immediately.
""")
demo.launch(share=True)
if __name__ == "__main__":
main() |