From bd8ffd36bf1f42ece520fe6391fe680a23304c38 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Tue, 18 Apr 2023 19:20:38 -0400 Subject: [PATCH] bump to diffusers 0.15.1, remove dangling module --- .../stable_diffusion/diffusion/cross_attention_control.py | 4 ---- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/invokeai/backend/stable_diffusion/diffusion/cross_attention_control.py b/invokeai/backend/stable_diffusion/diffusion/cross_attention_control.py index 2d87527518..d6c90503fe 100644 --- a/invokeai/backend/stable_diffusion/diffusion/cross_attention_control.py +++ b/invokeai/backend/stable_diffusion/diffusion/cross_attention_control.py @@ -11,7 +11,6 @@ import psutil import torch from compel.cross_attention_control import Arguments from diffusers.models.attention_processor import AttentionProcessor -from diffusers.models.unet_2d_condition import UNet2DConditionModel from torch import nn from ...util import torch_dtype @@ -408,12 +407,9 @@ def override_cross_attention(model, context: Context, is_running_diffusers=False def get_cross_attention_modules( model, which: CrossAttentionType ) -> list[tuple[str, InvokeAICrossAttentionMixin]]: - from ldm.modules.attention import CrossAttention # avoid circular import - TODO: rename as in diffusers? cross_attention_class: type = ( InvokeAIDiffusersCrossAttention - if isinstance(model, UNet2DConditionModel) - else CrossAttention ) which_attn = "attn1" if which is CrossAttentionType.SELF else "attn2" attention_module_tuples = [ diff --git a/pyproject.toml b/pyproject.toml index 465deb8b25..af197507ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ dependencies = [ "clip_anytorch", # replacing "clip @ https://github.com/openai/CLIP/archive/eaa22acb90a5876642d0507623e859909230a52d.zip", "compel==1.0.5", "datasets", - "diffusers[torch]==0.15.*", + "diffusers[torch]==0.15.1", "dnspython==2.2.1", "einops", "eventlet",