Spaces:
Paused
Paused
Fabrice-TIERCELIN
commited on
Commit
•
74dd986
1
Parent(s):
56f2c0e
A function that does nothing
Browse files- gradio_demo.py +5 -0
gradio_demo.py
CHANGED
@@ -16,9 +16,11 @@ import time
|
|
16 |
import spaces
|
17 |
from huggingface_hub import hf_hub_download
|
18 |
|
|
|
19 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="sd_xl_base_1.0_0.9vae.safetensors", local_dir="yushan777_SUPIR")
|
20 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="SUPIR-v0F.ckpt", local_dir="yushan777_SUPIR")
|
21 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="SUPIR-v0Q.ckpt", local_dir="yushan777_SUPIR")
|
|
|
22 |
|
23 |
parser = argparse.ArgumentParser()
|
24 |
parser.add_argument("--opt", type=str, default='options/SUPIR_v0.yaml')
|
@@ -67,6 +69,9 @@ if torch.cuda.device_count() > 0:
|
|
67 |
|
68 |
@spaces.GPU(duration=120)
|
69 |
def stage1_process(input_image, gamma_correction):
|
|
|
|
|
|
|
70 |
if torch.cuda.device_count() == 0:
|
71 |
gr.Warning('Set this space to GPU config to make it work.')
|
72 |
return None
|
|
|
16 |
import spaces
|
17 |
from huggingface_hub import hf_hub_download
|
18 |
|
19 |
+
hf_hub_download(repo_id="laion/CLIP-ViT-bigG-14-laion2B-39B-b160k", filename="open_clip_pytorch_model.bin", local_dir="laion_CLIP-ViT-bigG-14-laion2B-39B-b160k")
|
20 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="sd_xl_base_1.0_0.9vae.safetensors", local_dir="yushan777_SUPIR")
|
21 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="SUPIR-v0F.ckpt", local_dir="yushan777_SUPIR")
|
22 |
hf_hub_download(repo_id="camenduru/SUPIR", filename="SUPIR-v0Q.ckpt", local_dir="yushan777_SUPIR")
|
23 |
+
hf_hub_download(repo_id="RunDiffusion/Juggernaut-XL-Lightning", filename="Juggernaut_RunDiffusionPhoto2_Lightning_4Steps.safetensors", local_dir="RunDiffusion_Juggernaut-XL-Lightning")
|
24 |
|
25 |
parser = argparse.ArgumentParser()
|
26 |
parser.add_argument("--opt", type=str, default='options/SUPIR_v0.yaml')
|
|
|
69 |
|
70 |
@spaces.GPU(duration=120)
|
71 |
def stage1_process(input_image, gamma_correction):
|
72 |
+
return None
|
73 |
+
|
74 |
+
def stage1_process2(input_image, gamma_correction):
|
75 |
if torch.cuda.device_count() == 0:
|
76 |
gr.Warning('Set this space to GPU config to make it work.')
|
77 |
return None
|