jimmycarter commited on
Commit
a23a8bc
1 Parent(s): 7d024b3

Upload 2 files

Browse files
Files changed (1) hide show
  1. pipeline.py +4 -4
pipeline.py CHANGED
@@ -1617,12 +1617,12 @@ class CustomPipeline(DiffusionPipeline, SD3LoraLoaderMixin):
1617
  pooled_prompt_embeds_input = torch.cat([negative_pooled_prompt_embeds, pooled_prompt_embeds], dim=0)
1618
 
1619
  # # Concatenate text IDs if they are used
1620
- # if text_ids is not None and negative_text_ids is not None:
1621
- # text_ids_input = torch.cat([negative_text_ids, text_ids], dim=0)
1622
 
1623
  # Concatenate latent image IDs if they are used
1624
- # if latent_image_ids is not None:
1625
- # latent_image_ids_input = torch.cat([latent_image_ids, latent_image_ids], dim=0)
1626
 
1627
  # Concatenate prompt masks if they are used
1628
  if prompt_mask is not None and negative_mask is not None:
 
1617
  pooled_prompt_embeds_input = torch.cat([negative_pooled_prompt_embeds, pooled_prompt_embeds], dim=0)
1618
 
1619
  # # Concatenate text IDs if they are used
1620
+ if text_ids is not None and negative_text_ids is not None:
1621
+ text_ids_input = torch.cat([negative_text_ids, text_ids], dim=0)
1622
 
1623
  # Concatenate latent image IDs if they are used
1624
+ if latent_image_ids is not None:
1625
+ latent_image_ids_input = torch.cat([latent_image_ids, latent_image_ids], dim=0)
1626
 
1627
  # Concatenate prompt masks if they are used
1628
  if prompt_mask is not None and negative_mask is not None: