Skip to content

Commit 3c7b6e7

Browse files
authored
Breakout fix of #2481 for #2477, patch release prep (#2497)
This change is a breakout of the changes in PR #2481 to form a patch release. There are no additional tests but the HF_HUB_OFFLINE feature was merged to improve the CI experience. * Testing common uses situational HF_HUB_OFFLINE (#2490) Employ offline mode when the model was already accessed once from the hub in order to speed up the CI and make the process less prone to rate limiting. The idea here is that we can mark contexts that, once they were visited once for a specific model id, we can assume that they are cached locally and can set HF_HUB_OFFLINE=1 for this context. This PR tests this concept for testing_common which is already a big chunk of the tests and probably has the biggest gain given the amount of change. We already saw that the assumption does not always hold true: for the prompt tuning tests (_test_prepare_input_for_generation) there is a case where one time the tokenizer is not used for model X and after that time the tokenizer is used - since we're setting the hub to offline for the second time the tokenizer from_pretrained call will fail. This problem is alleviated by adding the tokenizer name to the model id as cache identifier. (cherry picked from commit 1083964) (Removed delete adapter tests)
1 parent c42eb22 commit 3c7b6e7

File tree

5 files changed

+879
-762
lines changed

5 files changed

+879
-762
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from setuptools import find_packages, setup
1616

1717

18-
VERSION = "0.15.1"
18+
VERSION = "0.15.2"
1919

2020
extras = {}
2121
extras["quality"] = [

src/peft/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = "0.15.1"
15+
__version__ = "0.15.2"
1616

1717
from .auto import (
1818
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,

src/peft/peft_model.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -953,7 +953,7 @@ def set_additional_trainable_modules(self, peft_config, adapter_name):
953953
else:
954954
self.modules_to_save.update(peft_config.modules_to_save)
955955
# this may add a new ModulesToSaveWrapper
956-
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
956+
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
957957

958958
if getattr(peft_config, "trainable_token_indices", None) is not None:
959959
if isinstance(peft_config.trainable_token_indices, dict):
@@ -1497,13 +1497,14 @@ def __init__(
14971497
else:
14981498
peft_config.modules_to_save.extend(classifier_module_names)
14991499

1500-
for name, _ in self.base_model.named_children():
1501-
if any(module_name in name for module_name in self.modules_to_save):
1502-
self.cls_layer_name = name
1503-
break
1500+
if self.modules_to_save:
1501+
for name, _ in self.base_model.named_children():
1502+
if any(module_name in name for module_name in self.modules_to_save):
1503+
self.cls_layer_name = name
1504+
break
15041505

15051506
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
1506-
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
1507+
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
15071508

15081509
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
15091510
"""
@@ -2288,13 +2289,14 @@ def __init__(
22882289
else:
22892290
peft_config.modules_to_save.extend(classifier_module_names)
22902291

2291-
for name, _ in self.base_model.named_children():
2292-
if any(module_name in name for module_name in self.modules_to_save):
2293-
self.cls_layer_name = name
2294-
break
2292+
if self.modules_to_save is not None:
2293+
for name, _ in self.base_model.named_children():
2294+
if any(module_name in name for module_name in self.modules_to_save):
2295+
self.cls_layer_name = name
2296+
break
22952297

22962298
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
2297-
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
2299+
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
22982300

22992301
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
23002302
"""
@@ -2515,7 +2517,7 @@ def __init__(
25152517
break
25162518

25172519
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
2518-
_set_trainable(self, adapter_name, module_names=peft_config.modules_to_save)
2520+
_set_trainable(self, adapter_name, module_names=getattr(peft_config, "modules_to_save", None))
25192521

25202522
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
25212523
"""

src/peft/utils/other.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -749,6 +749,9 @@ def _set_trainable(
749749
if wrapper_cls is None:
750750
wrapper_cls = ModulesToSaveWrapper
751751

752+
if module_names is None:
753+
return
754+
752755
trainable_modules = []
753756
found_modules = set()
754757
# disable removal of duplicates to support targeting tied weights

0 commit comments

Comments
 (0)