刘虹雨 commited on
Commit
57da35d
·
1 Parent(s): 427d074

update code

Browse files
DiT_VAE/diffusion/model/nets/PixArt_blocks.py CHANGED
@@ -49,7 +49,7 @@ class MultiHeadCrossAttention(nn.Module):
49
  k, v = kv.unbind(2)
50
  attn_bias = None
51
  if mask is not None:
52
- attn_bias = xformers.ops.fmha.BlockDiagonalMask.from_seqlens([N] * B, mask)
53
  x = xformers.ops.memory_efficient_attention(q, k, v, p=self.attn_drop.p, attn_bias=attn_bias)
54
  x = x.view(B, -1, C)
55
  x = self.proj(x)
 
49
  k, v = kv.unbind(2)
50
  attn_bias = None
51
  if mask is not None:
52
+ attn_bias = xformers.ops.fmha.attn_bias.BlockDiagonalMask.from_seqlens([N] * B, mask)
53
  x = xformers.ops.memory_efficient_attention(q, k, v, p=self.attn_drop.p, attn_bias=attn_bias)
54
  x = x.view(B, -1, C)
55
  x = self.proj(x)