We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a812fb6 commit ef95907Copy full SHA for ef95907
tests/models/test_models_unet_3d_condition.py
@@ -261,7 +261,7 @@ def test_lora_save_load(self):
261
with torch.no_grad():
262
new_sample = new_model(**inputs_dict, cross_attention_kwargs={"scale": 0.5}).sample
263
264
- assert (sample - new_sample).abs().max() < 5e-4
+ assert (sample - new_sample).abs().max() < 1e-3
265
266
# LoRA and no LoRA should NOT be the same
267
assert (sample - old_sample).abs().max() > 1e-4
0 commit comments