Missed this one.

This commit is contained in:
comfyanonymous 2023-12-05 12:48:41 -05:00
parent 9b655d4fd7
commit 1bbd65ab30

View File

@ -384,7 +384,7 @@ class BasicTransformerBlock(nn.Module):
self.is_res = inner_dim == dim
if self.ff_in:
self.norm_in = nn.LayerNorm(dim, dtype=dtype, device=device)
self.norm_in = operations.LayerNorm(dim, dtype=dtype, device=device)
self.ff_in = FeedForward(dim, dim_out=inner_dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations)
self.disable_self_attn = disable_self_attn