|
import gradio as gr |
|
from transformers import T5Tokenizer, T5ForConditionalGeneration |
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained('t5-small') |
|
tokenizer = T5Tokenizer.from_pretrained('t5-small') |
|
|
|
def generate_clinical_report(input_text): |
|
""" |
|
Generate a clinical report from the input text using the T5 model. |
|
""" |
|
try: |
|
|
|
input_ids = tokenizer.encode("summarize: " + input_text, return_tensors="pt", max_length=512, truncation=True) |
|
|
|
|
|
outputs = model.generate( |
|
input_ids, |
|
max_length=256, |
|
num_beams=4, |
|
no_repeat_ngram_size=3, |
|
length_penalty=2.0, |
|
early_stopping=True, |
|
bad_words_ids=[[tokenizer.encode(word, add_special_tokens=False)[0]] |
|
for word in ['http', 'www', '.com', '.org']] |
|
) |
|
|
|
|
|
return tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
except Exception as e: |
|
print(f"Error generating report: {str(e)}") |
|
return f"Error: {str(e)}" |
|
|
|
|
|
demo = gr.Interface( |
|
fn=generate_clinical_report, |
|
inputs=gr.Textbox( |
|
lines=8, |
|
placeholder="Enter clinical notes here...", |
|
label="Clinical Notes" |
|
), |
|
outputs=gr.Textbox( |
|
lines=8, |
|
label="Generated Clinical Report" |
|
), |
|
title="Clinical Report Generator", |
|
description="Generate professional clinical reports from clinical notes using a T5 model.", |
|
allow_flagging="never" |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.queue(concurrency_count=1) |
|
demo.launch( |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
share=False, |
|
root_path="", |
|
show_api=True, |
|
allowed_paths=None, |
|
quiet=True |
|
) |
|
|