Falln87 commited on
Commit
322bd58
·
verified ·
1 Parent(s): fc3a1ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -47
app.py CHANGED
@@ -8,58 +8,13 @@ from diffusers import StableDiffusion3Pipeline
8
  import os
9
  print(os.getenv('HF_TOKEN'))
10
 
11
- #Hardware Selection
12
- device = "cuda" if torch.cuda.is_available() else "cpu"
13
-
14
- # GPU support
15
- if torch.cuda.is_available():
16
- torch.cuda.max_memory_allocated(device=device)
17
- pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", token=HF_TOKEN, torch_dtype=torch.float16)
18
- pipe.enable_xformers_memory_efficient_attention()
19
- pipe = pipe.to(device)
20
-
21
- # CPU Support
22
- else:
23
- pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
24
- pipe = pipe.to(device)
25
-
26
- MAX_SEED = np.iinfo(np.int32).max
27
- MAX_IMAGE_SIZE = 2048
28
-
29
- def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
30
-
31
- if randomize_seed:
32
- seed = random.randint(0, MAX_SEED)
33
-
34
- generator = torch.Generator().manual_seed(seed)
35
-
36
- image = pipe(
37
- prompt = prompt,
38
- negative_prompt = negative_prompt,
39
- guidance_scale = guidance_scale,
40
- num_inference_steps = num_inference_steps,
41
- width = width,
42
- height = height,
43
- generator = generator
44
- ).images[0]
45
-
46
- return image
47
-
48
- css="""
49
- #col-container {
50
- margin: 0 auto;
51
- max-width: 520px;
52
- }
53
- """
54
 
55
 
56
  with gr.Blocks(css=css) as demo:
57
 
58
  with gr.Column(elem_id="col-container"):
59
- gr.Markdown(f"""
60
- # FallnAI Text2Image
61
- """)
62
-
63
  with gr.Row():
64
 
65
  prompt = gr.Text(
 
8
  import os
9
  print(os.getenv('HF_TOKEN'))
10
 
11
+ gr.load("models/stabilityai/stable-diffusion-3-diffusers")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
 
14
  with gr.Blocks(css=css) as demo:
15
 
16
  with gr.Column(elem_id="col-container"):
17
+
 
 
 
18
  with gr.Row():
19
 
20
  prompt = gr.Text(