Skip to content

Commit ab81a51

Browse files
author
Kevin Turner
committed
WIP!: …they weren't in diffusers format…
1 parent 0ea5581 commit ab81a51

File tree

1 file changed

+5
-6
lines changed

1 file changed

+5
-6
lines changed

invokeai/backend/patches/lora_conversions/flux_aitoolkit_lora_conversion_utils.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,8 @@
55
import torch
66

77
from invokeai.backend.patches.layers.base_layer_patch import BaseLayerPatch
8-
from invokeai.backend.patches.lora_conversions.flux_diffusers_lora_conversion_utils import (
9-
lora_layers_from_flux_diffusers_grouped_state_dict,
10-
)
8+
from invokeai.backend.patches.layers.utils import any_lora_layer_from_state_dict
9+
from invokeai.backend.patches.lora_conversions.flux_lora_constants import FLUX_LORA_TRANSFORMER_PREFIX
1110
from invokeai.backend.patches.model_patch_raw import ModelPatchRaw
1211

1312

@@ -34,8 +33,8 @@ def lora_model_from_flux_aitoolkit_state_dict(state_dict: dict[str, torch.Tensor
3433
else:
3534
raise ValueError(f"Layer '{layer_name}' does not match the expected pattern for FLUX LoRA weights.")
3635

37-
layers: dict[str, BaseLayerPatch] = lora_layers_from_flux_diffusers_grouped_state_dict(
38-
transformer_grouped_sd, alpha=None
39-
)
36+
layers: dict[str, BaseLayerPatch] = {}
37+
for layer_key, layer_state_dict in transformer_grouped_sd.items():
38+
layers[FLUX_LORA_TRANSFORMER_PREFIX + layer_key] = any_lora_layer_from_state_dict(layer_state_dict)
4039

4140
return ModelPatchRaw(layers=layers)

0 commit comments

Comments
 (0)