Spaces:
Running
on
Zero
Running
on
Zero
fixed image slider, and relativ imports
Browse files- app.py +4 -5
- debug_upscaled_input.png +3 -0
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
from gradio_imageslider import ImageSlider # Replaces gr.ImageCompare
|
| 3 |
import torch
|
| 4 |
import yaml
|
| 5 |
import numpy as np
|
|
@@ -492,7 +491,7 @@ def superres_preview_preprocess(pil_image, resolution=768):
|
|
| 492 |
# Dynamically load examples from demo_images directory
|
| 493 |
example_list_inp = []
|
| 494 |
example_list_sr = []
|
| 495 |
-
demo_images_dir = os.path.join(project_root, "
|
| 496 |
|
| 497 |
if os.path.exists(demo_images_dir):
|
| 498 |
filenames = sorted(os.listdir(demo_images_dir))
|
|
@@ -652,7 +651,7 @@ Use the slider to compare the low resolution input image with the super-resolved
|
|
| 652 |
|
| 653 |
with gr.Column(scale=1): # Output column
|
| 654 |
output_image_display = gr.Image(type="pil", label="Result")
|
| 655 |
-
sr_compare_display = ImageSlider(label="Super-Resolution: Input vs Output", visible=False
|
| 656 |
|
| 657 |
|
| 658 |
|
|
@@ -667,7 +666,7 @@ Use the slider to compare the low resolution input image with the super-resolved
|
|
| 667 |
sr_scale_slider: gr.update(visible=is_super_resolution),
|
| 668 |
downscale_input: gr.update(visible=is_super_resolution),
|
| 669 |
output_image_display: gr.update(visible=is_inpainting),
|
| 670 |
-
sr_compare_display: gr.update(visible=is_super_resolution
|
| 671 |
downscale_input: gr.update(visible=is_super_resolution),
|
| 672 |
}
|
| 673 |
|
|
@@ -687,7 +686,7 @@ Use the slider to compare the low resolution input image with the super-resolved
|
|
| 687 |
lr_image_for_sr, prompt_text, fixed_seed_value, use_random_seed,
|
| 688 |
guidance_scale, num_steps, sr_scale_factor_value, downscale_input
|
| 689 |
)
|
| 690 |
-
return result_images[1], gr.update(value=result_images
|
| 691 |
else:
|
| 692 |
raise gr.Error("Unsupported task.")
|
| 693 |
|
|
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
import torch
|
| 3 |
import yaml
|
| 4 |
import numpy as np
|
|
|
|
| 491 |
# Dynamically load examples from demo_images directory
|
| 492 |
example_list_inp = []
|
| 493 |
example_list_sr = []
|
| 494 |
+
demo_images_dir = os.path.join(project_root, "demo_images")
|
| 495 |
|
| 496 |
if os.path.exists(demo_images_dir):
|
| 497 |
filenames = sorted(os.listdir(demo_images_dir))
|
|
|
|
| 651 |
|
| 652 |
with gr.Column(scale=1): # Output column
|
| 653 |
output_image_display = gr.Image(type="pil", label="Result")
|
| 654 |
+
sr_compare_display = gr.ImageSlider(label="Super-Resolution: Input vs Output", visible=False)
|
| 655 |
|
| 656 |
|
| 657 |
|
|
|
|
| 666 |
sr_scale_slider: gr.update(visible=is_super_resolution),
|
| 667 |
downscale_input: gr.update(visible=is_super_resolution),
|
| 668 |
output_image_display: gr.update(visible=is_inpainting),
|
| 669 |
+
sr_compare_display: gr.update(visible=is_super_resolution),
|
| 670 |
downscale_input: gr.update(visible=is_super_resolution),
|
| 671 |
}
|
| 672 |
|
|
|
|
| 686 |
lr_image_for_sr, prompt_text, fixed_seed_value, use_random_seed,
|
| 687 |
guidance_scale, num_steps, sr_scale_factor_value, downscale_input
|
| 688 |
)
|
| 689 |
+
return result_images[1], gr.update(value=result_images), seed_val
|
| 690 |
else:
|
| 691 |
raise gr.Error("Unsupported task.")
|
| 692 |
|
debug_upscaled_input.png
ADDED
|
Git LFS Details
|
requirements.txt
CHANGED
|
@@ -15,4 +15,5 @@ opencv-python
|
|
| 15 |
sentencepiece
|
| 16 |
protobuf
|
| 17 |
accelerate
|
| 18 |
-
gradio
|
|
|
|
|
|
| 15 |
sentencepiece
|
| 16 |
protobuf
|
| 17 |
accelerate
|
| 18 |
+
gradio
|
| 19 |
+
gradio_imageslider
|