Skip to content

Commit 0071478

Browse files
yiyixuxuyiyixuxu
andauthored
allow attention processors to have different signatures (#6915)
add Co-authored-by: yiyixuxu <yixu310@gmail,com>
1 parent 7c8cab3 commit 0071478

File tree

1 file changed

+10
-0
lines changed

1 file changed

+10
-0
lines changed

src/diffusers/models/attention_processor.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14+
import inspect
1415
from importlib import import_module
1516
from typing import Callable, Optional, Union
1617

@@ -509,6 +510,15 @@ def forward(
509510
# The `Attention` class can call different attention processors / attention functions
510511
# here we simply pass along all tensors to the selected processor class
511512
# For standard processors that are defined here, `**cross_attention_kwargs` is empty
513+
514+
attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys())
515+
unused_kwargs = [k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters]
516+
if len(unused_kwargs) > 0:
517+
logger.warning(
518+
f"cross_attention_kwargs {unused_kwargs} are not expected by {self.processor.__class__.__name__} and will be ignored."
519+
)
520+
cross_attention_kwargs = {k: w for k, w in cross_attention_kwargs.items() if k in attn_parameters}
521+
512522
return self.processor(
513523
self,
514524
hidden_states,

0 commit comments

Comments
 (0)