Update inference_manager.py
Browse files- inference_manager.py +2 -2
inference_manager.py
CHANGED
@@ -472,7 +472,7 @@ class ModelManager:
|
|
472 |
raise Exception(f"invalid model_id {model_id}")
|
473 |
if not model.ip_adapter_faceid_pipeline:
|
474 |
raise Exception(f"model does not support ip adapter")
|
475 |
-
|
476 |
cfg = model.cfg
|
477 |
p = inference_params.get("prompt")
|
478 |
negative_prompt = inference_params.get("negative_prompt", cfg.get("negative_prompt", ""))
|
@@ -510,7 +510,7 @@ class ModelManager:
|
|
510 |
prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", p)
|
511 |
generator = torch.Generator(pipe.device).manual_seed(seed)
|
512 |
print(f"generate: p={p}, np={np}, steps={steps}, guidance_scale={guidance_scale}, size={width},{height}, seed={seed}")
|
513 |
-
images =
|
514 |
prompt=prompt_str,
|
515 |
negative_prompt=negative_prompt,
|
516 |
faceid_embeds=average_embedding,
|
|
|
472 |
raise Exception(f"invalid model_id {model_id}")
|
473 |
if not model.ip_adapter_faceid_pipeline:
|
474 |
raise Exception(f"model does not support ip adapter")
|
475 |
+
ip_model = model.ip_adapter_faceid_pipeline
|
476 |
cfg = model.cfg
|
477 |
p = inference_params.get("prompt")
|
478 |
negative_prompt = inference_params.get("negative_prompt", cfg.get("negative_prompt", ""))
|
|
|
510 |
prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", p)
|
511 |
generator = torch.Generator(pipe.device).manual_seed(seed)
|
512 |
print(f"generate: p={p}, np={np}, steps={steps}, guidance_scale={guidance_scale}, size={width},{height}, seed={seed}")
|
513 |
+
images = ip_model.generate(
|
514 |
prompt=prompt_str,
|
515 |
negative_prompt=negative_prompt,
|
516 |
faceid_embeds=average_embedding,
|