Falln87 commited on
Commit
44594ab
·
verified ·
1 Parent(s): 898a621

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -1
app.py CHANGED
@@ -4,13 +4,21 @@ import random
4
  import torch
5
  from diffusers import StableDiffusion3Pipeline
6
 
 
 
 
 
 
7
  device = "cuda" if torch.cuda.is_available() else "cpu"
8
 
 
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
- pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
 
 
14
  else:
15
  pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
16
  pipe = pipe.to(device)
 
4
  import torch
5
  from diffusers import StableDiffusion3Pipeline
6
 
7
+ # Access Secrets, token authentication for gated models
8
+ import os
9
+ print(os.getenv('HF_TOKEN'))
10
+
11
+ #Hardware Selection
12
  device = "cuda" if torch.cuda.is_available() else "cpu"
13
 
14
+ # GPU support
15
  if torch.cuda.is_available():
16
  torch.cuda.max_memory_allocated(device=device)
17
+ pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", token=HF_TOKEN torch_dtype=torch.float16)
18
  pipe.enable_xformers_memory_efficient_attention()
19
  pipe = pipe.to(device)
20
+
21
+ # CPU Support
22
  else:
23
  pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
24
  pipe = pipe.to(device)