Skip to content

Commit e747fe4

Browse files
authored
Fix wrapped transformer config access in Flux2 Klein training (#13219)
1 parent 46bd005 commit e747fe4

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

examples/dreambooth/train_dreambooth_lora_flux2_klein.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1715,7 +1715,7 @@ def get_sigmas(timesteps, n_dim=4, dtype=torch.float32):
17151715
packed_noisy_model_input = Flux2KleinPipeline._pack_latents(noisy_model_input)
17161716

17171717
# handle guidance
1718-
if transformer.config.guidance_embeds:
1718+
if unwrap_model(transformer).config.guidance_embeds:
17191719
guidance = torch.full([1], args.guidance_scale, device=accelerator.device)
17201720
guidance = guidance.expand(model_input.shape[0])
17211721
else:

examples/dreambooth/train_dreambooth_lora_flux2_klein_img2img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1682,7 +1682,7 @@ def get_sigmas(timesteps, n_dim=4, dtype=torch.float32):
16821682
model_input_ids = torch.cat([model_input_ids, cond_model_input_ids], dim=1)
16831683

16841684
# handle guidance
1685-
if transformer.config.guidance_embeds:
1685+
if unwrap_model(transformer).config.guidance_embeds:
16861686
guidance = torch.full([1], args.guidance_scale, device=accelerator.device)
16871687
guidance = guidance.expand(model_input.shape[0])
16881688
else:

0 commit comments

Comments
 (0)