|
import gradio as gr |
|
import numpy as np |
|
import random |
|
|
|
|
|
from diffusers import DiffusionPipeline |
|
import torch |
|
|
|
device = "cuda" if torch.cuda.is_available() else "cpu" |
|
|
|
if torch.cuda.is_available(): |
|
torch_dtype = torch.bfloat16 |
|
else: |
|
torch_dtype = torch.float32 |
|
|
|
MAX_SEED = np.iinfo(np.int32).max |
|
MAX_IMAGE_SIZE = 512 |
|
|
|
|
|
|
|
def infer( |
|
model_repo_id, |
|
prompt, |
|
negative_prompt, |
|
seed, |
|
randomize_seed, |
|
guidance_scale, |
|
num_inference_steps, |
|
width=MAX_IMAGE_SIZE, |
|
height=MAX_IMAGE_SIZE, |
|
progress=gr.Progress(track_tqdm=True), |
|
): |
|
if randomize_seed: |
|
seed = random.randint(0, MAX_SEED) |
|
|
|
generator = torch.Generator().manual_seed(seed) |
|
|
|
|
|
pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype) |
|
pipe = pipe.to(device) |
|
image = pipe( |
|
prompt=prompt, |
|
negative_prompt=negative_prompt, |
|
guidance_scale=guidance_scale, |
|
num_inference_steps=num_inference_steps, |
|
width=width, |
|
height=height, |
|
generator=generator, |
|
).images[0] |
|
|
|
return image, seed |
|
|
|
|
|
examples = [ |
|
"mimic: PA view, FINAL REPORT\n PA AND LATERAL CHEST OF ___\n \n COMPARISON: ___ radiograph.\n \n FINDINGS: Cardiac silhouette is upper limits of normal in size and\n accompanied by pulmonary vascular congestion and a basilar predominant\n interstitial abnormality which most likely represents interstitial edema. \n Small bilateral pleural effusions are present, left greater than right, with\n interval decrease in size since the prior radiograph. There is also improving\n aeration in the left retrocardiac region, likely resolving atelectasis.", |
|
"mimic: AP view, small left-sided pleural effusion, discrete density overlying the postero-lateral rib, supraclavicular catheter at the cavoatrial junction, new opacity within the left lower lobe and lingula", |
|
"mimic: AP view, FINAL REPORT\n EXAMINATION: CHEST (PORTABLE AP)\n \n INDICATION: ___ year old woman with ?TIA // r/o acute CP process r/o\n acute CP process\n \n IMPRESSION: \n \n In comparison with the study of ___, there again are low lung\n volumes with elevation of the right hemidiaphragmatic contour. Cardiac\n silhouette is at the upper limits of normal or mildly enlarged. No evidence of\n acute pneumonia or vascular congestion.\n \n Right IJ catheter extends to the mid to lower portion of the SVC.", |
|
] |
|
|
|
css = """ |
|
#col-container { |
|
margin: 0 auto; |
|
max-width: 640px; |
|
} |
|
""" |
|
|
|
with gr.Blocks(css=css) as demo: |
|
with gr.Column(elem_id="col-container"): |
|
gr.Markdown(" # SD Demo") |
|
gr.Markdown(" ## Inference Settings:") |
|
gr.Markdown(" **roentgen**: Guidance Scale: `4`") |
|
gr.Markdown(" **sd2-findings**: Guidance Scale: `7.5`") |
|
|
|
with gr.Row(): |
|
model_repo_id = gr.Dropdown( |
|
choices=['Cylumn/roentgen', 'Cylumn/sd2-findings'], |
|
label="Select Model", |
|
value='Cylumn/sd2-findings' |
|
) |
|
|
|
with gr.Row(): |
|
prompt = gr.Text( |
|
label="Prompt", |
|
show_label=False, |
|
max_lines=1, |
|
placeholder="Enter your prompt", |
|
container=False, |
|
) |
|
|
|
run_button = gr.Button("Run", scale=0, variant="primary") |
|
|
|
result = gr.Image(label="Result", show_label=False) |
|
|
|
with gr.Accordion("Advanced Settings", open=False): |
|
negative_prompt = gr.Text( |
|
label="Negative prompt", |
|
max_lines=1, |
|
placeholder="Enter a negative prompt", |
|
visible=False, |
|
) |
|
|
|
seed = gr.Slider( |
|
label="Seed", |
|
minimum=0, |
|
maximum=MAX_SEED, |
|
step=1, |
|
value=0, |
|
) |
|
|
|
randomize_seed = gr.Checkbox(label="Randomize seed", value=True) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Row(): |
|
guidance_scale = gr.Slider( |
|
label="Guidance scale", |
|
minimum=0, |
|
maximum=10.0, |
|
step=0.5, |
|
value=7.5, |
|
) |
|
|
|
num_inference_steps = gr.Slider( |
|
label="Number of inference steps", |
|
minimum=25, |
|
maximum=100, |
|
step=5, |
|
value=35, |
|
) |
|
|
|
gr.Examples(examples=examples, inputs=[prompt]) |
|
gr.on( |
|
triggers=[run_button.click, prompt.submit], |
|
fn=infer, |
|
inputs=[ |
|
model_repo_id, |
|
prompt, |
|
negative_prompt, |
|
seed, |
|
randomize_seed, |
|
guidance_scale, |
|
num_inference_steps, |
|
], |
|
outputs=[result, seed], |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|