Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix issue with Styles when Hires prompt is used #15269

Merged
merged 1 commit into from
Mar 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions modules/infotext_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,17 +265,6 @@ def parse_generation_parameters(x: str, skip_fields: list[str] | None = None):
else:
prompt += ("" if prompt == "" else "\n") + line

if shared.opts.infotext_styles != "Ignore":
found_styles, prompt, negative_prompt = shared.prompt_styles.extract_styles_from_prompt(prompt, negative_prompt)

if shared.opts.infotext_styles == "Apply":
res["Styles array"] = found_styles
elif shared.opts.infotext_styles == "Apply if any" and found_styles:
res["Styles array"] = found_styles

res["Prompt"] = prompt
res["Negative prompt"] = negative_prompt

for k, v in re_param.findall(lastline):
try:
if v[0] == '"' and v[-1] == '"':
Expand All @@ -290,6 +279,26 @@ def parse_generation_parameters(x: str, skip_fields: list[str] | None = None):
except Exception:
print(f"Error parsing \"{k}: {v}\"")

# Extract styles from prompt
if shared.opts.infotext_styles != "Ignore":
found_styles, prompt_no_styles, negative_prompt_no_styles = shared.prompt_styles.extract_styles_from_prompt(prompt, negative_prompt)

same_hr_styles = True
if ("Hires prompt" in res or "Hires negative prompt" in res) and (infotext_ver > infotext_versions.v180_hr_styles if (infotext_ver := infotext_versions.parse_version(res.get("Version"))) else True):
hr_prompt, hr_negative_prompt = res.get("Hires prompt", prompt), res.get("Hires negative prompt", negative_prompt)
hr_found_styles, hr_prompt_no_styles, hr_negative_prompt_no_styles = shared.prompt_styles.extract_styles_from_prompt(hr_prompt, hr_negative_prompt)
if same_hr_styles := found_styles == hr_found_styles:
res["Hires prompt"] = '' if hr_prompt_no_styles == prompt_no_styles else hr_prompt_no_styles
res['Hires negative prompt'] = '' if hr_negative_prompt_no_styles == negative_prompt_no_styles else hr_negative_prompt_no_styles

if same_hr_styles:
prompt, negative_prompt = prompt_no_styles, negative_prompt_no_styles
if (shared.opts.infotext_styles == "Apply if any" and found_styles) or shared.opts.infotext_styles == "Apply":
res['Styles array'] = found_styles

res["Prompt"] = prompt
res["Negative prompt"] = negative_prompt

# Missing CLIP skip means it was set to 1 (the default)
if "Clip skip" not in res:
res["Clip skip"] = "1"
Expand Down
1 change: 1 addition & 0 deletions modules/infotext_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
v160 = version.parse("1.6.0")
v170_tsnr = version.parse("v1.7.0-225")
v180 = version.parse("1.8.0")
v180_hr_styles = version.parse("1.8.0-136") # todo: change to the actual version number after merge


def parse_version(text):
Expand Down
15 changes: 8 additions & 7 deletions modules/processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -702,7 +702,7 @@ def program_version():
return res


def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments=None, iteration=0, position_in_batch=0, use_main_prompt=False, index=None, all_negative_prompts=None):
def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments=None, iteration=0, position_in_batch=0, use_main_prompt=False, index=None, all_negative_prompts=None, all_hr_prompts=None, all_hr_negative_prompts=None):
if index is None:
index = position_in_batch + iteration * p.batch_size

Expand Down Expand Up @@ -745,11 +745,18 @@ def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments=None, iter
"RNG": opts.randn_source if opts.randn_source != "GPU" else None,
"NGMS": None if p.s_min_uncond == 0 else p.s_min_uncond,
"Tiling": "True" if p.tiling else None,
"Hires prompt": None, # This is set later, insert here to keep order
"Hires negative prompt": None, # This is set later, insert here to keep order
**p.extra_generation_params,
"Version": program_version() if opts.add_version_to_infotext else None,
"User": p.user if opts.add_user_name_to_info else None,
}

if all_hr_prompts := all_hr_prompts or getattr(p, 'all_hr_prompts', None):
generation_params['Hires prompt'] = all_hr_prompts[index] if all_hr_prompts[index] != all_prompts[index] else None
if all_hr_negative_prompts := all_hr_negative_prompts or getattr(p, 'all_hr_negative_prompts', None):
generation_params['Hires negative prompt'] = all_hr_negative_prompts[index] if all_hr_negative_prompts[index] != all_negative_prompts[index] else None

generation_params_text = ", ".join([k if k == v else f'{k}: {infotext_utils.quote(v)}' for k, v in generation_params.items() if v is not None])

prompt_text = p.main_prompt if use_main_prompt else all_prompts[index]
Expand Down Expand Up @@ -1194,12 +1201,6 @@ def init(self, all_prompts, all_seeds, all_subseeds):
if self.hr_sampler_name is not None and self.hr_sampler_name != self.sampler_name:
self.extra_generation_params["Hires sampler"] = self.hr_sampler_name

if tuple(self.hr_prompt) != tuple(self.prompt):
self.extra_generation_params["Hires prompt"] = self.hr_prompt

if tuple(self.hr_negative_prompt) != tuple(self.negative_prompt):
self.extra_generation_params["Hires negative prompt"] = self.hr_negative_prompt

self.latent_scale_mode = shared.latent_upscale_modes.get(self.hr_upscaler, None) if self.hr_upscaler is not None else shared.latent_upscale_modes.get(shared.latent_upscale_default_mode, "nearest")
if self.enable_hr and self.latent_scale_mode is None:
if not any(x.name == self.hr_upscaler for x in shared.sd_upscalers):
Expand Down
Loading