-
Notifications
You must be signed in to change notification settings - Fork 6.4k
[LoRA] make set_adapters() method more robust. #9535
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
f20c022
1fa581c
7bd6dd9
cf15c35
b0dc60d
b5eed9a
33bee5d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -929,12 +929,24 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self): | |
|
|
||
| pipe.set_adapters("adapter-1") | ||
| output_adapter_1 = pipe(**inputs, generator=torch.manual_seed(0))[0] | ||
| self.assertFalse( | ||
| np.allclose(output_no_lora, output_adapter_1, atol=1e-3, rtol=1e-3), | ||
| "Adapter outputs should be different.", | ||
| ) | ||
|
|
||
| pipe.set_adapters("adapter-2") | ||
| output_adapter_2 = pipe(**inputs, generator=torch.manual_seed(0))[0] | ||
| self.assertFalse( | ||
| np.allclose(output_no_lora, output_adapter_2, atol=1e-3, rtol=1e-3), | ||
| "Adapter outputs should be different.", | ||
| ) | ||
|
|
||
| pipe.set_adapters(["adapter-1", "adapter-2"]) | ||
| output_adapter_mixed = pipe(**inputs, generator=torch.manual_seed(0))[0] | ||
| self.assertFalse( | ||
| np.allclose(output_no_lora, output_adapter_mixed, atol=1e-3, rtol=1e-3), | ||
| "Adapter outputs should be different.", | ||
| ) | ||
|
|
||
| # Fuse and unfuse should lead to the same results | ||
| self.assertFalse( | ||
|
|
@@ -960,6 +972,38 @@ def test_simple_inference_with_text_denoiser_multi_adapter(self): | |
| "output with no lora and output with lora disabled should give same results", | ||
| ) | ||
|
|
||
| def test_wrong_adapter_name_raises_error(self): | ||
| scheduler_cls = self.scheduler_classes[0] | ||
| components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls) | ||
| pipe = self.pipeline_class(**components) | ||
| pipe = pipe.to(torch_device) | ||
| pipe.set_progress_bar_config(disable=None) | ||
| _, _, inputs = self.get_dummy_inputs(with_generator=False) | ||
|
|
||
| if "text_encoder" in self.pipeline_class._lora_loadable_modules: | ||
| pipe.text_encoder.add_adapter(text_lora_config, "adapter-1") | ||
| self.assertTrue(check_if_lora_correctly_set(pipe.text_encoder), "Lora not correctly set in text encoder") | ||
|
|
||
| denoiser = pipe.transformer if self.unet_kwargs is None else pipe.unet | ||
| denoiser.add_adapter(denoiser_lora_config, "adapter-1") | ||
| self.assertTrue(check_if_lora_correctly_set(denoiser), "Lora not correctly set in denoiser.") | ||
|
|
||
| if self.has_two_text_encoders or self.has_three_text_encoders: | ||
| if "text_encoder_2" in self.pipeline_class._lora_loadable_modules: | ||
| pipe.text_encoder_2.add_adapter(text_lora_config, "adapter-1") | ||
| self.assertTrue( | ||
| check_if_lora_correctly_set(pipe.text_encoder_2), "Lora not correctly set in text encoder 2" | ||
| ) | ||
|
|
||
| with self.assertRaises(ValueError) as err_context: | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could use There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We just prefer to do a bit more explicitly across the codebase (which is what I have followed here). |
||
| pipe.set_adapters("test") | ||
|
|
||
| self.assertTrue("not in the list of present adapters" in str(err_context.exception)) | ||
|
|
||
| # test this works. | ||
| pipe.set_adapters("adapter-1") | ||
| _ = pipe(**inputs, generator=torch.manual_seed(0))[0] | ||
|
|
||
| def test_simple_inference_with_text_denoiser_block_scale(self): | ||
| """ | ||
| Tests a simple inference with lora attached to text encoder and unet, attaches | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.