Skip to content

Commit 3b558e0

Browse files
committed
Comment out untraceable log for peft.
1 parent d76426d commit 3b558e0

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

src/diffusers/models/transformers/transformer_sd3.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -278,13 +278,13 @@ def forward(
278278
else:
279279
lora_scale = 1.0
280280

281-
if USE_PEFT_BACKEND:
282-
# weight the lora layers by setting `lora_scale` for each PEFT layer
283-
scale_lora_layers(self, lora_scale)
284-
else:
285-
logger.warning(
286-
"Passing `scale` via `joint_attention_kwargs` when not using the PEFT backend is ineffective."
287-
)
281+
# if USE_PEFT_BACKEND:
282+
# # weight the lora layers by setting `lora_scale` for each PEFT layer
283+
# scale_lora_layers(self, lora_scale)
284+
# else:
285+
# logger.warning(
286+
# "Passing `scale` via `joint_attention_kwargs` when not using the PEFT backend is ineffective."
287+
# )
288288

289289
height, width = hidden_states.shape[-2:]
290290

0 commit comments

Comments
 (0)