Skip to content

Commit

Permalink
feat: add clip skip handling (lllyasviel#2999)
Browse files Browse the repository at this point in the history
  • Loading branch information
mashb1t authored and csokun committed Jun 16, 2024
1 parent 6e8093f commit fa48e0f
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 12 deletions.
1 change: 1 addition & 0 deletions language/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@
"vae": "vae",
"CFG Mimicking from TSNR": "CFG Mimicking from TSNR",
"Enabling Fooocus's implementation of CFG mimicking for TSNR (effective when real CFG > mimicked CFG).": "Enabling Fooocus's implementation of CFG mimicking for TSNR (effective when real CFG > mimicked CFG).",
"CLIP Skip": "CLIP Skip",
"Sampler": "Sampler",
"dpmpp_2m_sde_gpu": "dpmpp_2m_sde_gpu",
"Only effective in non-inpaint mode.": "Only effective in non-inpaint mode.",
Expand Down
6 changes: 6 additions & 0 deletions modules/async_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ def handler(async_task):
adm_scaler_negative = args.pop()
adm_scaler_end = args.pop()
adaptive_cfg = args.pop()
clip_skip = args.pop()
sampler_name = args.pop()
scheduler_name = args.pop()
vae_name = args.pop()
Expand Down Expand Up @@ -297,6 +298,7 @@ def handler(async_task):
adm_scaler_end = 0.0

print(f'[Parameters] Adaptive CFG = {adaptive_cfg}')
print(f'[Parameters] CLIP Skip = {clip_skip}')
print(f'[Parameters] Sharpness = {sharpness}')
print(f'[Parameters] ControlNet Softness = {controlnet_softness}')
print(f'[Parameters] ADM Scale = '
Expand Down Expand Up @@ -466,6 +468,8 @@ def handler(async_task):
loras=loras, base_model_additional_loras=base_model_additional_loras,
use_synthetic_refiner=use_synthetic_refiner, vae_name=vae_name)

pipeline.set_clip_skip(clip_skip)

progressbar(async_task, 3, 'Processing prompts ...')
tasks = []

Expand Down Expand Up @@ -924,6 +928,8 @@ def callback(step, x0, x, total_steps, y):
d.append(
('CFG Mimicking from TSNR', 'adaptive_cfg', modules.patch.patch_settings[pid].adaptive_cfg))

if clip_skip > 1:
d.append(('CLIP Skip', 'clip_skip', clip_skip))
d.append(('Sampler', 'sampler', sampler_name))
d.append(('Scheduler', 'scheduler', scheduler_name))
d.append(('VAE', 'vae', vae_name))
Expand Down
6 changes: 6 additions & 0 deletions modules/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,6 +434,11 @@ def init_temp_path(path: str | None, default_path: str) -> str:
default_value=7.0,
validator=lambda x: isinstance(x, numbers.Number)
)
default_clip_skip = get_config_item_or_set_default(
key='default_clip_skip',
default_value=1,
validator=lambda x: isinstance(x, numbers.Number)
)
default_overwrite_step = get_config_item_or_set_default(
key='default_overwrite_step',
default_value=-1,
Expand Down Expand Up @@ -488,6 +493,7 @@ def init_temp_path(path: str | None, default_path: str) -> str:
"default_cfg_scale": "guidance_scale",
"default_sample_sharpness": "sharpness",
"default_cfg_tsnr": "adaptive_cfg",
"default_clip_skip": "clip_skip",
"default_sampler": "sampler",
"default_scheduler": "scheduler",
"default_overwrite_step": "steps",
Expand Down
11 changes: 11 additions & 0 deletions modules/default_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,17 @@ def clip_encode(texts, pool_top_k=1):
return [[torch.cat(cond_list, dim=1), {"pooled_output": pooled_acc}]]


@torch.no_grad()
@torch.inference_mode()
def set_clip_skip(clip_skip: int):
global final_clip

if final_clip is None:
return

final_clip.clip_layer(-abs(clip_skip))
return

@torch.no_grad()
@torch.inference_mode()
def clear_all_caches():
Expand Down
18 changes: 10 additions & 8 deletions modules/meta_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,17 @@ def load_parameter_button_click(raw_metadata: dict | str, is_generating: bool):
get_list('styles', 'Styles', loaded_parameter_dict, results)
get_str('performance', 'Performance', loaded_parameter_dict, results)
get_steps('steps', 'Steps', loaded_parameter_dict, results)
get_float('overwrite_switch', 'Overwrite Switch', loaded_parameter_dict, results)
get_number('overwrite_switch', 'Overwrite Switch', loaded_parameter_dict, results)
get_resolution('resolution', 'Resolution', loaded_parameter_dict, results)
get_float('guidance_scale', 'Guidance Scale', loaded_parameter_dict, results)
get_float('sharpness', 'Sharpness', loaded_parameter_dict, results)
get_number('guidance_scale', 'Guidance Scale', loaded_parameter_dict, results)
get_number('sharpness', 'Sharpness', loaded_parameter_dict, results)
get_adm_guidance('adm_guidance', 'ADM Guidance', loaded_parameter_dict, results)
get_str('refiner_swap_method', 'Refiner Swap Method', loaded_parameter_dict, results)
get_float('adaptive_cfg', 'CFG Mimicking from TSNR', loaded_parameter_dict, results)
get_number('adaptive_cfg', 'CFG Mimicking from TSNR', loaded_parameter_dict, results)
get_number('clip_skip', 'CLIP Skip', loaded_parameter_dict, results, cast_type=int)
get_str('base_model', 'Base Model', loaded_parameter_dict, results)
get_str('refiner_model', 'Refiner Model', loaded_parameter_dict, results)
get_float('refiner_switch', 'Refiner Switch', loaded_parameter_dict, results)
get_number('refiner_switch', 'Refiner Switch', loaded_parameter_dict, results)
get_str('sampler', 'Sampler', loaded_parameter_dict, results)
get_str('scheduler', 'Scheduler', loaded_parameter_dict, results)
get_str('vae', 'VAE', loaded_parameter_dict, results)
Expand Down Expand Up @@ -83,11 +84,11 @@ def get_list(key: str, fallback: str | None, source_dict: dict, results: list, d
results.append(gr.update())


def get_float(key: str, fallback: str | None, source_dict: dict, results: list, default=None):
def get_number(key: str, fallback: str | None, source_dict: dict, results: list, default=None, cast_type=float):
try:
h = source_dict.get(key, source_dict.get(fallback, default))
assert h is not None
h = float(h)
h = cast_type(h)
results.append(h)
except:
results.append(gr.update())
Expand Down Expand Up @@ -314,6 +315,7 @@ def get_scheme(self) -> MetadataScheme:
'adm_guidance': 'ADM Guidance',
'refiner_swap_method': 'Refiner Swap Method',
'adaptive_cfg': 'Adaptive CFG',
'clip_skip': 'Clip skip',
'overwrite_switch': 'Overwrite Switch',
'freeu': 'FreeU',
'base_model': 'Model',
Expand Down Expand Up @@ -458,7 +460,7 @@ def parse_string(self, metadata: dict) -> str:
self.fooocus_to_a1111['refiner_model_hash']: self.refiner_model_hash
}

for key in ['adaptive_cfg', 'overwrite_switch', 'refiner_swap_method', 'freeu']:
for key in ['adaptive_cfg', 'clip_skip', 'overwrite_switch', 'refiner_swap_method', 'freeu']:
if key in data:
generation_params[self.fooocus_to_a1111[key]] = data[key]

Expand Down
11 changes: 7 additions & 4 deletions webui.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,9 @@ def update_history_link():
value=modules.config.default_cfg_tsnr,
info='Enabling Fooocus\'s implementation of CFG mimicking for TSNR '
'(effective when real CFG > mimicked CFG).')
clip_skip = gr.Slider(label='CLIP Skip', minimum=1, maximum=10, step=1,
value=modules.config.default_clip_skip,
info='Bypass CLIP layers to avoid overfitting (use 1 to disable).')
sampler_name = gr.Dropdown(label='Sampler', choices=flags.sampler_list,
value=modules.config.default_sampler)
scheduler_name = gr.Dropdown(label='Scheduler', choices=flags.scheduler_list,
Expand Down Expand Up @@ -576,9 +579,9 @@ def refresh_files_clicked():
load_data_outputs = [advanced_checkbox, image_number, prompt, negative_prompt, style_selections,
performance_selection, overwrite_step, overwrite_switch, aspect_ratios_selection,
overwrite_width, overwrite_height, guidance_scale, sharpness, adm_scaler_positive,
adm_scaler_negative, adm_scaler_end, refiner_swap_method, adaptive_cfg, base_model,
refiner_model, refiner_switch, sampler_name, scheduler_name, vae_name, seed_random,
image_seed, generate_button, load_parameter_button] + freeu_ctrls + lora_ctrls
adm_scaler_negative, adm_scaler_end, refiner_swap_method, adaptive_cfg, clip_skip,
base_model, refiner_model, refiner_switch, sampler_name, scheduler_name, vae_name,
seed_random, image_seed, generate_button, load_parameter_button] + freeu_ctrls + lora_ctrls

if not args_manager.args.disable_preset_selection:
def preset_selection_change(preset, is_generating):
Expand Down Expand Up @@ -663,7 +666,7 @@ def inpaint_mode_change(mode):
ctrls += [uov_method, uov_input_image]
ctrls += [outpaint_selections, inpaint_input_image, inpaint_additional_prompt, inpaint_mask_image]
ctrls += [disable_preview, disable_intermediate_results, disable_seed_increment, black_out_nsfw]
ctrls += [adm_scaler_positive, adm_scaler_negative, adm_scaler_end, adaptive_cfg]
ctrls += [adm_scaler_positive, adm_scaler_negative, adm_scaler_end, adaptive_cfg, clip_skip]
ctrls += [sampler_name, scheduler_name, vae_name]
ctrls += [overwrite_step, overwrite_switch, overwrite_width, overwrite_height, overwrite_vary_strength]
ctrls += [overwrite_upscale_strength, mixing_image_prompt_and_vary_upscale, mixing_image_prompt_and_inpaint]
Expand Down

0 comments on commit fa48e0f

Please sign in to comment.