Skip to content

Commit

Permalink
thank you linter
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Jul 13, 2023
1 parent 6c5f83b commit dc39061
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions extensions-builtin/Lora/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,9 +229,9 @@ def load_lora(name, lora_on_disk):
elif type(sd_module) == torch.nn.Conv2d and weight.shape[2:] == (3, 3):
module = torch.nn.Conv2d(weight.shape[1], weight.shape[0], (3, 3), bias=False)
else:
print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}')
print(f'Lora layer {key_lora} matched a layer with unsupported type: {type(sd_module).__name__}')
continue
raise AssertionError(f"Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}")
raise AssertionError(f"Lora layer {key_lora} matched a layer with unsupported type: {type(sd_module).__name__}")

with torch.no_grad():
module.weight.copy_(weight)
Expand All @@ -243,7 +243,7 @@ def load_lora(name, lora_on_disk):
elif lora_key == "lora_down.weight":
lora_module.down = module
else:
raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha")
raise AssertionError(f"Bad Lora layer name: {key_lora} - must end in lora_up.weight, lora_down.weight or alpha")

if keys_failed_to_match:
print(f"Failed to match keys when loading Lora {lora_on_disk.filename}: {keys_failed_to_match}")
Expand Down

0 comments on commit dc39061

Please sign in to comment.