Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added checks for layers to transforms and layer pattern in lora #2159

Merged
merged 10 commits into from
Oct 29, 2024
9 changes: 6 additions & 3 deletions src/peft/tuners/adalora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,13 @@ def __post_init__(self):
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")

# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think these checks should be adjusted like so:

Suggested change
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
if self.layers_pattern and not self.layers_to_transform:

Why?

  1. If users pass layers_pattern=[], layers_to_transform=None, we should not raise.
  2. If users pass layers_pattern=["foo"], layers_to_transform=[], we should raise

Right now, we don't do that because of the strict None check.

raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

# check for layers_to_transform and layers_pattern
if (self.layers_to_transform is not None) and (self.layers_pattern is None):
raise ValueError("When `layers_to_transform` is specified, `layers_pattern` must also be specified.")
# Check if 'r' has been set to a non-default value
if self.r != 8: # 8 is the default value for 'r' in LoraConfig
warnings.warn(
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/boft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,9 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if self.boft_block_size == 0 and self.boft_block_num == 0:
raise ValueError(
f"Either `boft_block_size` or `boft_block_num` must be non-zero. Currently, boft_block_size = {self.boft_block_size} and boft_block_num = {self.boft_block_num}."
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/fourierft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,3 +197,6 @@ def __post_init__(self):
# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
4 changes: 4 additions & 0 deletions src/peft/tuners/hra/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,3 +127,7 @@ def __post_init__(self):
# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")

# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
3 changes: 3 additions & 0 deletions src/peft/tuners/loha/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,3 +131,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
3 changes: 3 additions & 0 deletions src/peft/tuners/lokr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,3 +140,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
5 changes: 5 additions & 0 deletions src/peft/tuners/lora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)

# if target_modules is a regex expression, then layers_to_transform should be None
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")
Expand All @@ -346,6 +347,10 @@ def __post_init__(self):
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")

# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

if self.use_dora and self.megatron_config:
raise ValueError("DoRA does not support megatron_core, please set `use_dora=False`.")

Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/oft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,9 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if self.r == 0 and self.oft_block_size == 0:
raise ValueError(
f"Either `r` or `oft_block_size` must be non-zero. Currently, r = {self.r} and oft_block_size = {self.oft_block_size}."
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/vblora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,3 +188,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
4 changes: 3 additions & 1 deletion src/peft/tuners/vera/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,9 @@ def __post_init__(self):
self.target_modules = (
set(self.target_modules) if isinstance(self.target_modules, list) else self.target_modules
)

# check for layers_to_transform and layers_pattern
if (self.layers_pattern is not None) and (self.layers_to_transform is None):
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if not self.save_projection:
warnings.warn(
"Specified to not save vera_A and vera_B within the state dictionary, instead they will be restored "
Expand Down
27 changes: 27 additions & 0 deletions tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,3 +351,30 @@ def test_from_pretrained_sanity_check(self, config_class, tmp_path):
msg = f"The config that is trying to be loaded is not a valid {config_class.__name__} config"
with pytest.raises(TypeError, match=msg):
config_class.from_pretrained(tmp_path)

def test_lora_config_layers_to_transform_validation(self):
"""Test that specifying layers_pattern without layers_to_transform raises an error"""
with pytest.raises(
ValueError, match="When `layers_pattern` is specified, `layers_to_transform` must also be specified."
):
LoraConfig(r=8, lora_alpha=16, target_modules=["query", "value"], layers_pattern="model.layers")

# Test that specifying both layers_to_transform and layers_pattern works fine
config = LoraConfig(
r=8,
lora_alpha=16,
target_modules=["query", "value"],
layers_to_transform=[0, 1, 2],
layers_pattern="model.layers",
)
assert config.layers_to_transform == [0, 1, 2]
assert config.layers_pattern == "model.layers"

# Test that not specifying either works fine
config = LoraConfig(
r=8,
lora_alpha=16,
target_modules=["query", "value"],
)
assert config.layers_to_transform is None
assert config.layers_pattern is None
Loading