NIRVANALAN commited on
Commit
1786dd6
1 Parent(s): 76e9ff7
ldm/modules/attention.py CHANGED
@@ -17,7 +17,8 @@ from xformers.ops import MemoryEfficientAttentionFlashAttentionOp
17
  try:
18
  from apex.normalization import FusedRMSNorm as RMSNorm
19
  except:
20
- from dit.norm import RMSNorm
 
21
 
22
 
23
  def exists(val):
 
17
  try:
18
  from apex.normalization import FusedRMSNorm as RMSNorm
19
  except:
20
+ # from dit.norm import RMSNorm
21
+ from diffusers.models.normalization import RMSNorm
22
 
23
 
24
  def exists(val):
vit/vision_transformer.py CHANGED
@@ -37,7 +37,8 @@ from pdb import set_trace as st
37
  try:
38
  from apex.normalization import FusedRMSNorm as RMSNorm
39
  except:
40
- from dit.norm import RMSNorm
 
41
 
42
  # from apex.normalization import FusedLayerNorm as LayerNorm
43
 
 
37
  try:
38
  from apex.normalization import FusedRMSNorm as RMSNorm
39
  except:
40
+ # from dit.norm import RMSNorm
41
+ from diffusers.models.normalization import RMSNorm
42
 
43
  # from apex.normalization import FusedLayerNorm as LayerNorm
44