Fabrice-TIERCELIN commited on
Commit
0d06c7a
1 Parent(s): 04586ec

Your computer must not enter into standby mode

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -1,6 +1,3 @@
1
- from diffusers import AutoPipelineForImage2Image
2
- from PIL import Image, ImageFilter
3
-
4
  import gradio as gr
5
  import numpy as np
6
  import time
@@ -9,11 +6,15 @@ import random
9
  import imageio
10
  import torch
11
 
 
 
 
12
  max_64_bit_int = 2**63 - 1
13
 
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
15
  floatType = torch.float16 if torch.cuda.is_available() else torch.float32
16
  variant = "fp16" if torch.cuda.is_available() else None
 
17
  pipe = AutoPipelineForImage2Image.from_pretrained("stabilityai/sdxl-turbo", torch_dtype = floatType, variant = variant)
18
  pipe = pipe.to(device)
19
 
@@ -46,7 +47,7 @@ def check(
46
  if prompt is None or prompt == "":
47
  raise gr.Error("Please provide a prompt input.")
48
 
49
- def inpaint(
50
  source_img,
51
  prompt,
52
  negative_prompt,
@@ -229,7 +230,7 @@ with gr.Blocks() as interface:
229
  denoising_steps,
230
  seed,
231
  debug_mode
232
- ], outputs = [], queue = False, show_progress = False).success(inpaint, inputs = [
233
  source_img,
234
  prompt,
235
  negative_prompt,
@@ -247,6 +248,7 @@ with gr.Blocks() as interface:
247
  ], scroll_to_output = True)
248
 
249
  gr.Examples(
 
250
  inputs = [
251
  source_img,
252
  prompt,
@@ -256,7 +258,6 @@ with gr.Blocks() as interface:
256
  image_guidance_scale,
257
  strength,
258
  denoising_steps,
259
- randomize_seed,
260
  seed,
261
  debug_mode
262
  ],
@@ -275,7 +276,6 @@ with gr.Blocks() as interface:
275
  1.1,
276
  0.6,
277
  1000,
278
- True,
279
  42,
280
  False
281
  ],
 
 
 
 
1
  import gradio as gr
2
  import numpy as np
3
  import time
 
6
  import imageio
7
  import torch
8
 
9
+ from diffusers import AutoPipelineForImage2Image
10
+ from PIL import Image, ImageFilter
11
+
12
  max_64_bit_int = 2**63 - 1
13
 
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
15
  floatType = torch.float16 if torch.cuda.is_available() else torch.float32
16
  variant = "fp16" if torch.cuda.is_available() else None
17
+
18
  pipe = AutoPipelineForImage2Image.from_pretrained("stabilityai/sdxl-turbo", torch_dtype = floatType, variant = variant)
19
  pipe = pipe.to(device)
20
 
 
47
  if prompt is None or prompt == "":
48
  raise gr.Error("Please provide a prompt input.")
49
 
50
+ def redraw(
51
  source_img,
52
  prompt,
53
  negative_prompt,
 
230
  denoising_steps,
231
  seed,
232
  debug_mode
233
+ ], outputs = [], queue = False, show_progress = False).success(redraw, inputs = [
234
  source_img,
235
  prompt,
236
  negative_prompt,
 
248
  ], scroll_to_output = True)
249
 
250
  gr.Examples(
251
+ fn = redraw,
252
  inputs = [
253
  source_img,
254
  prompt,
 
258
  image_guidance_scale,
259
  strength,
260
  denoising_steps,
 
261
  seed,
262
  debug_mode
263
  ],
 
276
  1.1,
277
  0.6,
278
  1000,
 
279
  42,
280
  False
281
  ],