add
Browse files- App_main.py +1 -1
- examples/AID_bridge_19_HR.png +0 -0
- examples/AID_bridge_19_LR.png +0 -0
- examples/AID_commercial_32_HR.png +0 -0
- examples/AID_commercial_32_LR.png +0 -0
- examples/AID_parking_60_HR.png +0 -0
- examples/AID_parking_60_LR.png +0 -0
- examples/AID_school_161_HR.png +0 -0
- examples/AID_school_161_LR.png +0 -0
- examples/UC_airplane00_HR.png +0 -0
- examples/UC_airplane00_LR.png +0 -0
- examples/UC_airplane95_HR.png +0 -0
- examples/UC_airplane95_LR.png +0 -0
- examples/UC_freeway35_HR.png +0 -0
- examples/UC_freeway35_LR.png +0 -0
- examples/UC_storagetanks54_HR.png +0 -0
- examples/UC_storagetanks54_LR.png +0 -0
- examples/resize.py +6 -6
App_main.py
CHANGED
|
@@ -94,7 +94,7 @@ with gr.Blocks() as demo:
|
|
| 94 |
image_output = gr.outputs.Image(label='SR Result', type='numpy')
|
| 95 |
with gr.Row():
|
| 96 |
checkpoint = gr.inputs.Radio(['UC', 'AID'], label='Checkpoint')
|
| 97 |
-
scale = gr.Slider(1,
|
| 98 |
|
| 99 |
io = gr.Interface(fn=sr_func,
|
| 100 |
inputs=[image_input,
|
|
|
|
| 94 |
image_output = gr.outputs.Image(label='SR Result', type='numpy')
|
| 95 |
with gr.Row():
|
| 96 |
checkpoint = gr.inputs.Radio(['UC', 'AID'], label='Checkpoint')
|
| 97 |
+
scale = gr.Slider(1, 10, value=4.0, step=0.1, label='scale')
|
| 98 |
|
| 99 |
io = gr.Interface(fn=sr_func,
|
| 100 |
inputs=[image_input,
|
examples/AID_bridge_19_HR.png
CHANGED
|
|
examples/AID_bridge_19_LR.png
CHANGED
|
|
examples/AID_commercial_32_HR.png
CHANGED
|
|
examples/AID_commercial_32_LR.png
CHANGED
|
|
examples/AID_parking_60_HR.png
CHANGED
|
|
examples/AID_parking_60_LR.png
CHANGED
|
|
examples/AID_school_161_HR.png
CHANGED
|
|
examples/AID_school_161_LR.png
CHANGED
|
|
examples/UC_airplane00_HR.png
CHANGED
|
|
examples/UC_airplane00_LR.png
CHANGED
|
|
examples/UC_airplane95_HR.png
CHANGED
|
|
examples/UC_airplane95_LR.png
CHANGED
|
|
examples/UC_freeway35_HR.png
CHANGED
|
|
examples/UC_freeway35_LR.png
CHANGED
|
|
examples/UC_storagetanks54_HR.png
CHANGED
|
|
examples/UC_storagetanks54_LR.png
CHANGED
|
|
examples/resize.py
CHANGED
|
@@ -7,14 +7,14 @@ from torchvision.transforms import InterpolationMode
|
|
| 7 |
|
| 8 |
patch_size = 48
|
| 9 |
|
| 10 |
-
for file in glob.glob("*.
|
| 11 |
img = transforms.ToTensor()(Image.open(file).convert('RGB')) * 255
|
| 12 |
img_lr = transforms.Resize(patch_size, InterpolationMode.BICUBIC)(
|
| 13 |
-
transforms.CenterCrop(
|
| 14 |
|
| 15 |
-
img_hr = transforms.CenterCrop(
|
| 16 |
|
| 17 |
-
cv2.imwrite(f'
|
| 18 |
-
print(f'
|
| 19 |
-
cv2.imwrite(f'
|
| 20 |
|
|
|
|
| 7 |
|
| 8 |
patch_size = 48
|
| 9 |
|
| 10 |
+
for file in glob.glob("*.jpg"):
|
| 11 |
img = transforms.ToTensor()(Image.open(file).convert('RGB')) * 255
|
| 12 |
img_lr = transforms.Resize(patch_size, InterpolationMode.BICUBIC)(
|
| 13 |
+
transforms.CenterCrop(8 * patch_size)(img))
|
| 14 |
|
| 15 |
+
img_hr = transforms.CenterCrop(8 * patch_size)(img)
|
| 16 |
|
| 17 |
+
cv2.imwrite(f'AID_{file.split(".")[0]}_LR.png', img_lr.permute((1, 2, 0)).numpy())
|
| 18 |
+
print(f'AID_{file.split(".")[0]}_LR.png')
|
| 19 |
+
cv2.imwrite(f'AID_{file.split(".")[0]}_HR.png', img_hr.permute((1, 2, 0)).numpy())
|
| 20 |
|